prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
import datetime
import csv
with open('SYSTEMS.csv') as f:
reader = csv.reader(f)
ALLOWED_SYSTEMS = [l[0] for l in reader]
class IntelEntry:
KEYS = ["timer_name", "alliance", "system", "time", "date", "location"]
def __init__(self, timer_name="", alliance="", system="", time="", date="", location=""):
if timer_name != "":
self.timer_name = timer_name
else:
raise ValueError("Provided timer not valid.")
if alliance != "":
self.alliance = alliance.strip()
else:
raise ValueError("Provided alliance not valid.")
system = system.upper()
if system in ALLOWED_SYSTEMS:
self.system = system
else:
raise ValueError("Provided solar system not valid.")
self.location = location
if time != "":
self.time = datetime.datetime.s | trptime(' '.join([date, time]), '%m/%d/%y %H:%M')
if self.time < datetime.date | time.now():
raise ValueError("Provided date/time not valid. Time must be in the future.")
else:
raise ValueError("Provided date/time not valid. Time must be in format '%m/%d/%y %H:%M'.")
def to_dict(self):
return { "timer_name": self.timer_name,
"alliance": self.alliance,
"system": self.system,
"location": self.location,
"time": self.time }
|
e = sz
sz = self.get_young_var_basesize(self.min_nursery_size)
self.lb_young_var_basesize = sz
def setup(self):
self.old_objects_pointing_to_young = self.AddressStack()
# ^^^ a list of addresses inside the old objects space; it
# may contain static prebuilt objects as well. More precisely,
# it lists exactly the old and static objects whose
# GCFLAG_NO_YOUNG_PTRS bit is not set.
self.young_objects_with_weakrefs = self.AddressStack()
self.last_generation_root_objects = self.AddressStack()
self.young_objects_with_id = self.AddressDict()
SemiSpaceGC.setup(self)
self.set_nursery_size(self.initial_nursery_size)
# the GC is fully setup now. The rest can make use of it.
if self.auto_nursery_size:
newsize = nursery_size_from_env()
#if newsize <= 0:
# ---disabled--- just use the default value.
# newsize = env.estimate_best_nursery_size()
if newsize > 0:
self.set_nursery_size(newsize)
self.reset_nursery()
def _teardown(self):
self.collect() # should restore last gen objects flags
SemiSpaceGC._teardown(self)
def reset_nursery(self):
self.nursery = NULL
self.nursery_top = NULL
self.nursery_free = NULL
def set_nursery_size(self, newsize):
debug_start("gc-set-nursery-size")
if newsize < self.min_nursery_size:
newsize = self.min_nursery_size
if newsize > self.space_size // 2:
newsize = self.space_size // 2
# Compute the new bounds for how large young objects can be
# (larger objects are allocated directly old). XXX adjust
self.nursery_size = newsize
self.largest_young_fixedsize = self.get_young_fixedsize(newsize)
self.largest_young_var_basesize = self.get_young_var_basesize(newsize)
scale = 0
while (self.min_nursery_size << (scale+1)) <= newsize:
scale += 1
self.nursery_scale = scale
debug_print("nursery_size =", newsize)
debug_print("largest_young_fixedsize =",
self.largest_young_fixedsize)
debug_print("largest_young_var_basesize =",
self.largest_young_var_basesize)
debug_print("nursery_scale =", scale)
# we get the following invariant:
assert self.nursery_size >= (self.min_nursery_size << scale)
# Force a full collect to remove the current nursery whose size
# no longer matches the bounds that we just computed. This must
# be done after changing the bounds, because it might re-create
# a new nursery (e.g. if it invokes finalizers).
self.semispace_collect()
debug_stop("gc-set-nursery-size")
@staticmethod
def get_young_fixedsize(nursery_size):
return nursery_size // 2 - 1
@staticmethod
def get_young_var_basesize(nursery_size):
return nursery_size // 4 - 1
@classmethod
def JIT_max_size_of_young_obj(cls):
min_nurs_size = cls.TRANSLATION_PARAMS['min_nursery_size']
return cls.get_young_fixedsize(min_nurs_size)
def is_in_nursery(self, addr):
ll_assert(llmemory.cast_adr_to_int(addr) & 1 == 0,
"odd-valued (i.e. tagged) pointer unexpected here")
return self.nursery <= addr < self.nursery_top
def appears_to_be_in_nursery(self, addr):
# same as is_in_nursery(), but may return True accidentally if
# 'addr' is a tagged pointer with just the wrong value.
if not self.translated_to_c:
if not self.is_valid_gc_object(addr):
return False
return self.nursery <= addr < self.nursery_top
def malloc_fixedsize_clear(self, typeid, size,
has_finalizer=False,
is_finalizer_light=False,
contains_weakptr=False):
if (has_finalizer or
(raw_malloc_usage(size) > self.lb_young_fixedsize and
raw_malloc_usage(size) > self.largest_young_fixedsize)):
# ^^^ we do two size comparisons; the first one appears redundant,
# but it can be constant-folded if 'size' is a constant; then
# it almost always folds down to False, which kills the
# second comparison as well.
ll_assert(not contains_weakptr, "wrong case for mallocing weakref")
# "non- | simple" case or object too big: don't use the nursery
return SemiSpaceGC.malloc_fixedsize_clear(self, typeid, size,
| has_finalizer,
is_finalizer_light,
contains_weakptr)
size_gc_header = self.gcheaderbuilder.size_gc_header
totalsize = size_gc_header + size
result = self.nursery_free
if raw_malloc_usage(totalsize) > self.nursery_top - result:
result = self.collect_nursery()
llarena.arena_reserve(result, totalsize)
# GCFLAG_NO_YOUNG_PTRS is never set on young objs
self.init_gc_object(result, typeid, flags=0)
self.nursery_free = result + totalsize
if contains_weakptr:
self.young_objects_with_weakrefs.append(result + size_gc_header)
return llmemory.cast_adr_to_ptr(result+size_gc_header, llmemory.GCREF)
def malloc_varsize_clear(self, typeid, length, size, itemsize,
offset_to_length):
# Only use the nursery if there are not too many items.
if not raw_malloc_usage(itemsize):
too_many_items = False
else:
# The following line is usually constant-folded because both
# min_nursery_size and itemsize are constants (the latter
# due to inlining).
maxlength_for_minimal_nursery = (self.min_nursery_size // 4 //
raw_malloc_usage(itemsize))
# The actual maximum length for our nursery depends on how
# many times our nursery is bigger than the minimal size.
# The computation is done in this roundabout way so that
# only the only remaining computation is the following
# shift.
maxlength = maxlength_for_minimal_nursery << self.nursery_scale
too_many_items = length > maxlength
if (too_many_items or
(raw_malloc_usage(size) > self.lb_young_var_basesize and
raw_malloc_usage(size) > self.largest_young_var_basesize)):
# ^^^ we do two size comparisons; the first one appears redundant,
# but it can be constant-folded if 'size' is a constant; then
# it almost always folds down to False, which kills the
# second comparison as well.
return SemiSpaceGC.malloc_varsize_clear(self, typeid, length, size,
itemsize, offset_to_length)
# with the above checks we know now that totalsize cannot be more
# than about half of the nursery size; in particular, the + and *
# cannot overflow
size_gc_header = self.gcheaderbuilder.size_gc_header
totalsize = size_gc_header + size + itemsize * length
result = self.nursery_free
if raw_malloc_usage(totalsize) > self.nursery_top - result:
result = self.collect_nursery()
llarena.arena_reserve(result, totalsize)
# GCFLAG_NO_YOUNG_PTRS is never set on young objs
self.init_gc_object(result, typeid, flags=0)
(result + size_gc_header + offset_to_length).signed[0] = length
self.nursery_free = result + llarena.round_up_for_allocation(totalsize)
return llmemory.cast_adr_to_ptr(result+size_gc_header, llmemory.GCREF)
# override the init_gc_object methods to change the default value of 'flags',
# used by objects that are directly created outside the nursery by the Se |
"""Support for the Roku remote."""
import requests.exceptions
from homeassistant.components import remote
from homeassistant.const import CONF_HOST
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Roku remote platform."""
if not discovery_info:
return
host = discovery_info[CONF_HOST]
async_add_entities([RokuRemote(host)], True)
class RokuRemote(remote.RemoteDevice):
"""Device that sends commands to an Roku."""
def __init__(self, host):
"""Initialize the Roku device."""
from roku import Roku
self.roku = Roku(host)
self._device_info = {}
def update(self):
"""Retrieve latest state."""
try:
self._device_info = self.roku.device_info
except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
pass
@property
def name(self):
"""Return the name of the device."""
if self._device_info.user_device_name:
return self._device_info.user_device_name
return f"Roku {self._device_info.serial_num}"
@property
def unique_ | id(self):
"""Return a unique ID."""
return self._device_info.serial_num
@property
def is_on | (self):
"""Return true if device is on."""
return True
@property
def should_poll(self):
"""No polling needed for Roku."""
return False
def send_command(self, command, **kwargs):
"""Send a command to one device."""
for single_command in command:
if not hasattr(self.roku, single_command):
continue
getattr(self.roku, single_command)()
|
# # # WARNING # # #
# This list must also be updated in doc/_templates/autosummary/class.rst if it
# is changed here!
_doc_special_members = ('__contains__', '__getitem__', '__iter__', '__len__',
'__add__', '__sub__', '__mul__', '__div__',
'__neg__', '__hash__')
from ._bunch import Bunch, BunchConst, BunchConstNamed
from .check import (check_fname, check_version, check_random_state,
_check_fname, _check_subject, _check_pandas_installed,
_check_pandas_index_arguments,
_check_event_id, _check_ch_locs, _check_compensation_grade,
_check_if_nan, _is_numeric, _ensure_int, _check_preload,
_validate_type, _check_info_inv,
_check_channels_spatial_filter, _check_one_ch_type,
_check_rank, _check_option, _check_depth, _check_combine,
_path_like, _check_src_normal, _check_stc_units,
_check_pyqt5_version, _check_sphere, _check_time_format,
_check_freesurfer_home, _suggest, _require_version,
_on_missing, _check_on_missing, int_like, _safe_input,
_check_all_same_channel_names, path_like, _ensure_events,
_check_eeglabio_installed, _check_dict_keys,
_check_edflib_installed, _to_rgb, _soft_import,
_import_h5py, _import_h5io_funcs,
_import_pymatreader_funcs)
from .config import (set_config, get_config, get_config_path, set_cache_dir,
set_memmap_min_size, get_subjects_dir, _get_stim_channel,
sys_info, _get_extra_data_path, _get_root_dir,
_get_numpy_libs)
from .docs import (copy_function_doc_to_method_doc, copy_doc, linkcode_resolve,
open_docs, deprecated, fill_doc, deprecated_alias,
copy_base_doc_to_subclass_doc, docdict as _docdict)
from .fetching import _url_to_local_path
from ._logging import (verbose, logger, set_log_level, set_log_file,
use_log_level, catch_logging, warn, filter_out_warnings,
wrapped_stdout, _get_call_line, _record_warnings,
ClosingStringIO, _VerboseDep)
from .misc import (run_subprocess, _pl, _clean_names, pformat, _file_like,
_explain_exception, _get_argvalues, sizeof_fmt,
running_subprocess, _DefaultEventParser,
_assert_no_instances, _resource_path)
from .progressbar import ProgressBar
from ._testing import (run_command_if_main, requires_sklearn,
requires_version, requires_nibabel, requires_mne,
requires_good_network, requires_pandas, requires_h5py,
ArgvSetter, SilenceStdout, has_freesurfer, has_mne_c,
_TempDir, has_nibabel, buggy_mkl_svd,
requires_numpydoc, requires_vtk, requires_freesurfer,
requires_nitime, requires_dipy,
requires_neuromag2ft, requires_pylsl,
assert_object_equal, assert_and_remove_boundary_annot,
_raw_annot, assert_dig_allclose, assert_meg_snr,
assert | _snr, assert_stcs_equal, modified_env,
| _click_ch_name)
from .numerics import (hashfunc, _compute_row_norms,
_reg_pinv, random_permutation, _reject_data_segments,
compute_corr, _get_inst_data, array_split_idx,
sum_squared, split_list, _gen_events, create_slices,
_time_mask, _freq_mask, grand_average, object_diff,
object_hash, object_size, _apply_scaling_cov,
_undo_scaling_cov, _apply_scaling_array,
_undo_scaling_array, _scaled_array, _replace_md5, _PCA,
_mask_to_onsets_offsets, _array_equal_nan,
_julian_to_cal, _cal_to_julian, _dt_to_julian,
_julian_to_dt, _dt_to_stamp, _stamp_to_dt,
_check_dt, _ReuseCycle, _arange_div, _hashable_ndarray,
_custom_lru_cache)
from .mixin import (SizeMixin, GetEpochsMixin, _prepare_read_metadata,
_prepare_write_metadata, _FakeNoPandas, ShiftTimeMixin)
from .linalg import (_svd_lwork, _repeated_svd, _sym_mat_pow, sqrtm_sym, eigh,
_get_blas_funcs)
from .dataframe import (_set_pandas_dtype, _scale_dataframe_data,
_convert_times, _build_data_frame)
|
': 'A new password, when changing the current one.',
},
'code': {
'location': 'json',
'help': 'A temporary code used to reset the password.',
},
'email': {
'location': 'json',
'help': 'The email.',
},
'description': {
'location': 'json',
'help': 'The user description.',
},
})
@api.route('/login/external/manual/<string:backend>')
class LoginExtManAPI(Resource):
def get(self, backend):
'''Asks the URL that should be used to login with a specific backend
(like Facebook).'''
return {'redirect': get_auth_url(backend, 'loginextmanapi')}
@api.route('/complete/manual/<string:backend>')
class CompleteLoginExtManAPI(Resource):
def post(self, backend):
'''Completes the login with a specific backend.'''
username = get_username(backend, redirect_uri='/')
return create_tokens(username)
# @api.route('/login/external/automatic/<string:backend>')
# class StartLoginExtAutoAPI(Resource):
# def get(self, backend):
# '''Asks the URL that should be used to login with a specific backend
# (like Facebook).'''
# print('AUTH-GET')
# print(get_auth_url(backend, 'completeloginautoapi'))
# return {'redirect': get_auth_url(backend, 'completeloginautoapi')}
# # return redirect(get_auth_url(backend, 'completeloginautoapi'))
# @api.route('/complete/automatic/<string:backend>')
# class CompleteLoginAutoAPI(Resource):
# def get(self, backend):
# '''Completes the login with a specific backend.'''
# print('COMPLETE-GET')
# username = get_username(backend,
# url_for('completeloginautoapi',
# backend='facebook'))
# tokens = create_tokens(username)
# response = redirect("http://localhost:5001/")
# # import IPython; IPython.embed()
# return response
# # return create_tokens(username)
@api.route('/login/local')
class LoginLocalAPI(Resource):
@api.doc(parser=api.create_parser('username', 'password'))
def post(self):
'''Login using local DB, not a third-party service.'''
args = api.general_parse()
username = args['username']
password = args['password']
try:
if User.verify_user_password(username, password):
return create_tokens(username)
else:
api.abort_with_msg(400, 'Wrong password...', ['password'])
except NoResultFound:
api.abort_with_msg(400,
'Username seems not registered...',
['username'])
@api.route('/renew_micro_token')
class RenewMicroToken(Resource):
@api.doc(parser=api.create_parser('token'))
def post(self):
'''Get a new micro token to be used with the other microservices.'''
args = api.general_parse()
decoded = decode_token(args['token'])
if decoded['type'] != 'main':
# This seems not to be a main token. It must be main for security
# reasons, for only main ones can be invalidated at logout.
# Allowing micro tokens would allow infinite renew by a
# compromised token
api.abort_with_msg(400, 'Must use a main token', ['token'])
token = create_token(decoded['username']),
return {
'microToken': token,
'microTokenValidPeriod': api.app.config[
'MICRO_TOKEN_VALID_PERIOD'],
}
@api.route('/reset_password')
class ResetPassword(Resource):
@api.doc(parser=api.create_parser('username', 'email'))
def post(self):
'''Sends an email to the user with a code to reset password.'''
args = api.general_parse()
user = get_user(args['username'])
check_user_email(user, args['email'])
msg = Message(
api.app.config['MAIL_SUBJECT'],
sender=api.app.config['SENDER_NAME'],
recipients=[user.email])
code = passlib.utils.generate_password(15)
exp = api.app.config['TIME_RESET_PASSWORD']
user.set_temp_password(code, exp)
db.session.commit()
msg.body = (api.app.config['EMAIL_TEMPLATE']
.format(code=code, exp_min=exp/60))
api.mail.send(msg)
return {
'message': 'Check email!',
'exp': exp,
}
@api.doc(parser=api.create_parser('username', 'email', 'code', 'password'))
def put(self):
'''Change the password of a user using a temporary code.'''
args = api.general_parse()
password = args['password']
validate_password(password)
username = args['username']
user = get_user(username)
check_user_email(user, args['email'])
if not user.check_temp_password(args['code']):
api.abort_with_msg(400, 'Invalid code', ['code'])
user.hash_password(password)
# Commit is done by create_tokens
return create_tokens(username)
@api.route('/logout')
class Logout(Resource):
@api.doc(parser=api.create_parser('token'))
def post(self):
'''Invalidates the main token.'''
args = api.general_parse()
decoded = decode_token(args['token'])
# Invalidates all main tokens
get_user(decoded['username']).last_token_exp = 0
db.session.commit()
return {}
@api.route('/users/<string:username>')
class UserAPI(Resource):
@api.doc(parser=api.create_parser('token'))
def get(self, username):
'''Get information about an user.'''
args = api.general_parse()
try:
user = User.get_user(username)
except NoResultFound:
| api.abort_with_msg(404, 'User not found', ['username'])
resp = {
'username': user.username,
'description': user.description,
}
# Add ema | il if this is the owner of the account
token = args['token']
if token:
decoded = decode_token(token)
if decoded['username'] == username:
resp['email'] = user.email
return resp
@api.doc(parser=api.create_parser('token', 'description',
'email', 'password', 'new_password'))
def put(self, username):
'''Edit information about an user.'''
args = api.general_parse()
decoded = decode_token(args['token'])
if username == decoded['username']:
user = get_user(decoded['username'])
changed = False
password = args.get('password')
# If is changing password
if password:
new_password = args['new_password']
if user.verify_password(password):
validate_password(new_password, 'new_password')
user.hash_password(new_password)
changed = True
else:
api.abort_with_msg(400, 'Wrong password...', ['password'])
# If is changing description
if args['description']:
user.description = bleach.clean(args['description'],
strip=True)
changed = True
email = args.get('email')
# If is changing email
if email:
validate_email(email)
user.email = email
changed = True
# If some data seems to have changed, commit
if changed:
db.session.commit()
return {
'username': user.username,
'description': user.description,
'email': user.email,
}
else:
api.abort_with_msg(550, 'Editing other user profile...',
['username', 'token'])
@api.route('/users')
class ListUsers(Resource):
def get(self):
'''List registered users.'''
users = db.session.query(User.username).all()
return {
'users': [u[0] fo |
"""
Author: Junhong Chen
"""
from Bio import SeqIO
import gzip
import sys
import os
pe1 = []
pe2 = []
pname = []
for dirName, subdirList, fileList in os.walk(sys.argv[1]):
for fname in fileList:
tmp = fname.split(".")[0]
tmp = tmp[:len(tmp)-1]
if tmp not in pname:
pname.append(tmp)
pe1.append(dirName+"/"+tmp+"1.fq.gz")
pe2.append(dirName+"/"+tmp+"2.fq.gz")
def concat(name,file_list):
with open(nam | e, 'w') as w_file:
for filen in file_list:
print 'working with',filen
with gzip.open(filen, 'rU') as o_file:
seq_records = SeqIO.parse(o_file, | 'fastq')
SeqIO.write(seq_records, w_file, 'fastq')
#print pe1
#print pe2
concat(sys.argv[2]+"-pe1.fq", pe1)
concat(sys.argv[2]+"-pe2.fq", pe2)
|
zes from settings or fallback to the module constants
"""
page_size = AGNOCOMPLETE_DEFAULT_PAGESIZE
settings_page_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_PAGESIZE', None)
page_size = settings_page_size or page_size
page_size_min = AGNOCOMPLETE_MIN_PAGESIZE
settings_page_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_PAGESIZE', None)
page_size_min = settings_page_size_min or page_size_min
page_size_max = AGNOCOMPLETE_MAX_PAGESIZE
settings_page_size_max = getattr(
settings, 'AGNOCOMPLETE_MAX_PAGESIZE', None)
page_size_max = settings_page_size_max or page_size_max
# Query sizes
query_size = AGNOCOMPLETE_DEFAULT_QUERYSIZE
settings_query_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_QUERYSIZE', None)
query_size = settings_query_size or query_size
query_size_min = AGNOCOMPLETE_MIN_QUERYSIZE
settings_query_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_QUERYSIZE', None)
query_size_min = settings_query_size_min or query_size_min
return (
page_size, page_size_min, page_size_max,
query_size, query_size_min,
)
class AgnocompleteBase(with_metaclass(ABCMeta, object)):
"""
Base class for Agnocomplete tools.
"""
# To be overridden by settings, or constructor arguments
page_size = None
page_size_max = None
page_size_min = None
query_size = None
query_size_min = None
url = None
def __init__(self, user=None, page_size=None, url=None):
# Loading the user context
self.user = user
# Load from settings or fallback to constants
settings_page_size, settings_page_size_min, settings_page_size_max, \
query_size, query_size_min = load_settings_sizes()
# Use the class attributes or fallback to settings
self._conf_page_size = self.page_size or settings_page_size
self._conf_page_size_min = self.page_size_min or settings_page_size_min
self._conf_page_size_max = self.page_size_max or settings_page_size_max
# Use instance constructor parameters to eventually override defaults
page_size = page_size or self._conf_page_size
if page_size > self._conf_page_size_max \
or page_size < self._conf_page_size_min:
page_size = self._conf_page_size
# Finally set this as the wanted page_size
self._page_size = page_size
# set query sizes
self._query_size = self.query_size or query_size
self._query_size_min = self.query_size_min or query_size_min
# Eventual custom URL
self._url = url
def set_agnocomplete_field(self, field):
self.agnocomplete_field = field
@classproperty
def slug(cls):
"""
Return the key used in the register, used as a slug for the URL.
You can override this by adding a class property.
"""
return cls.__name__
def get_url(self):
return self._url or self.url
def get_page_size(sel | f):
"""
Return the computed page_size
It takes into account:
* class variables
* constructor arguments,
* settings
* fallback to the module constants if needed.
"""
return self._page_size
def get_query_size(self):
"""
| Return the computed default query size
It takes into account:
* class variables
* settings,
* fallback to the module constants
"""
return self._query_size
def get_query_size_min(self):
"""
Return the computed minimum query size
It takes into account:
* class variables
* settings,
* fallback to the module constants
"""
return self._query_size_min
@abstractmethod
def get_choices(self):
pass
@abstractmethod
def items(self, query=None, **kwargs):
pass
@abstractmethod
def selected(self, ids):
"""
Return the values (as a tuple of pairs) for the ids provided
"""
pass
def is_valid_query(self, query):
"""
Return True if the search query is valid.
e.g.:
* not empty,
* not too short,
"""
# No query, no item
if not query:
return False
# Query is too short, no item
if len(query) < self.get_query_size_min():
return False
return True
class AgnocompleteChoices(AgnocompleteBase):
"""
Usage Example::
class AgnocompleteColor(AgnocompleteChoices):
choices = (
('red', 'Red'),
('green', 'Green'),
('blue', 'Blue'),
)
"""
choices = ()
def get_choices(self):
return self.choices
def item(self, current_item):
value, label = current_item
return dict(value=value, label=label)
def items(self, query=None, **kwargs):
if not self.is_valid_query(query):
return []
result = copy(self.choices)
if query:
result = filter(lambda x: x[1].lower().startswith(query), result)
result = tuple(result)
# Slicing before rendering
result = result[:self.get_page_size()]
return [self.item(item) for item in result]
def selected(self, ids):
"""
Return the selected options as a list of tuples
"""
result = copy(self.choices)
result = filter(lambda x: x[0] in ids, result)
# result = ((item, item) for item in result)
return list(result)
class AgnocompleteModelBase(with_metaclass(ABCMeta, AgnocompleteBase)):
model = None
requires_authentication = False
@abstractmethod
def get_queryset(self):
pass
@property
def fields(self):
raise NotImplementedError(
"Integrator: You must have a `fields` property")
def get_model(self):
"""
Return the class Model used by this Agnocomplete
"""
if hasattr(self, 'model') and self.model:
return self.model
# Give me a "none" queryset
try:
none = self.get_queryset().none()
return none.model
except Exception:
raise ImproperlyConfigured(
"Integrator: Unable to determine the model with this queryset."
" Please add a `model` property")
def get_model_queryset(self):
"""
Return an unfiltered complete model queryset.
To be used for the select Input initialization
"""
return self.get_model().objects.all()
get_choices = get_model_queryset
def get_field_name(self):
"""
Return the model field name to be used as a value, or 'pk' if unset
"""
if hasattr(self, 'agnocomplete_field') and \
hasattr(self.agnocomplete_field, 'to_field_name'):
return self.agnocomplete_field.to_field_name or 'pk'
return 'pk'
class AgnocompleteModel(AgnocompleteModelBase):
"""
Example::
class AgnocompletePeople(AgnocompleteModel):
model = People
fields = ['first_name', 'last_name']
class AgnocompletePersonQueryset(AgnocompleteModel):
fields = ['first_name', 'last_name']
def get_queryset(self):
return People.objects.filter(email__contains='example.com')
"""
def __init__(self, *args, **kwargs):
super(AgnocompleteModel, self).__init__(*args, **kwargs)
self.__final_queryset = None
def _construct_qs_filter(self, field_name):
"""
Using a field name optionnaly prefixed by `^`, `=`, `@`, return a
case-insensitive filter condition name usable as a queryset `filter()`
keyword argument.
"""
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % |
def revertdigits( item ):
| return (item%10)*100 + (int(item/10)%10)*10 + int(item/1 | 00)
numlist = [314, 315, 642, 246, 129, 999]
numlist.sort( key=revertdigits )
print( numlist ) |
#!/usr/bin/python3
#
# Copyright (c) 2014-2022 The Voxie Authors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import numpy as np
import voxie
args = voxie.parser.parse_args()
context = voxie.VoxieContext(args)
instance = context.createInstance()
if args.voxie_action != 'RunFilter':
raise Exception('Invalid operation: ' + args.voxie_action)
with context.makeObject(context.bus, context.busName, args.voxie_operation, ['de.uni_stuttgart.Voxie.ExternalOperationRunFilter']).ClaimOperationAndCatch() as op:
filterPath = op.FilterObject
pars = op.Parameters
# print (pars)
properties = pars[filterPath._objectPath]['Properties'].getValue('a{sv}')
# print (properties)
inputPath = properties['de.uni_stuttgart.Voxie.Input'].getValue('o')
inputDataPath = pars[inputPath]['Data'].getValue('o')
inputData = context.makeObject(context.bus, context.busName, inputDataPath, [
'de.uni_stuttgart.Voxie.VolumeDataVoxel'])
outputPath = properties['de.uni_stuttgart.Voxie.Output'].getValue('o')
factor = properties['de.uni_stuttgart.Voxie.Filter.Downsample.Factor'].getValue(
'x')
origin = inputData.VolumeOrigin
sizeOrig = inputData.ArrayShape
spacingOrig = np.array(inputData.GridSpacing)
# | print (origin, sizeOrig, spacingOrig)
# TODO: Don't cut away data at the end
# size = ((int(sizeOrig[0]) + factor - 1) // factor,
# (int(sizeOrig[1]) + factor - 1) // factor,
# (int(sizeOrig[2]) + factor - 1) // factor)
size = (int(sizeOrig[0]) // factor,
int(sizeOrig[1]) // factor,
int(sizeOrig[2 | ]) // factor)
spacing = spacingOrig * factor
with inputData.GetBufferReadonly() as bufferOld:
arrayOld = bufferOld.array
arrayOld2 = arrayOld[:size[0] * factor,
:size[1] * factor, :size[2] * factor]
arrayOld3 = arrayOld2.view()
arrayOld3.shape = size[0], factor, size[1], factor, size[2], factor
dataType = ('float', 32, 'native') # TODO?
with instance.CreateVolumeDataVoxel(size, dataType, origin, spacing) as data:
with data.CreateUpdate() as update, data.GetBufferWritable(update) as buffer:
buffer[:] = 0
zCount = arrayOld3.shape[4]
for z in range(zCount):
buffer[:, :, z] = np.mean(
arrayOld3[:, :, :, :, z, :], axis=(1, 3, 4))
op.SetProgress((z + 1) / zCount)
version = update.Finish()
result = {}
result[outputPath] = {
'Data': voxie.Variant('o', data._objectPath),
'DataVersion': voxie.Variant('o', version._objectPath),
}
op.Finish(result)
|
'''
Created on Jun 6, 2014
@author: rtermondt
'''
from django.conf import settings
def global_settings(request):
invitation_system_setting = getattr(settings, 'INVITATION_SYSTEM', None)
if invitation_system_setting == True:
invite_system = True
else:
invite_system = False
| return {
'INVITATION_SYSTEM': invite_system
}
| |
'''Michael Lange <klappnase (at) freakmail (dot) de>
The ToolTip class provides a flexible tooltip widget for Tkinter; it is based on IDLE's ToolTip
module which unfortunately seems to be broken (at least the version I saw).
INITIALIZATION OPTIONS:
anchor : where the text should be positioned inside the widget, must be on of "n", "s", "e", "w", "nw" and so on;
default is "center"
bd : borderwidth of the widget; default is 1 (NOTE: don't use "borderwidth" here)
bg : background color to use for the widget; default is "lightyellow" (NOTE: don't use "background")
delay : time in ms that it takes for the widget to appear on the screen when the mouse pointer has
entered the parent widget; default is 800
fg : foreground (i.e. text) color to use; default is "black" (NOTE: don't use "foreground")
follow_mouse : if set to 1 the tooltip will follow the mouse pointer instead of being displayed
outside of the parent widget; this may be useful if you want to use tooltips for
large widgets like listboxes or canvases; default is 0
font : font to use for the widget; default is system specific
justify : how multiple lines of text will be aligned, must be "left", "right" or "center"; default is "left"
padx : extra space added to the left and right within the widget; default is 4
pady : extra space above and below the text; default is 2
relief : one of "flat", "ridge", "groove", "raised", "sunken" or "solid"; default is "solid"
state : must be "normal" or "disabled"; if set to "disabled" the tooltip will not appear; default is "normal"
text : the text that is displayed inside the widget
textvariable : if set to an instance of Tkinter.StringVar() the variable's value will be used as text for the widget
width : width of the widget; the default is 0, which means that "wraplength" will be used to limit the widgets width
wraplength : limits the number of characters in each line; default is 150
WIDGET METHODS:
configure(**opts) : change one or more of the widget's options as described above; the changes will take effect the
next time the tooltip shows up; NOTE: follow_mouse cannot be changed after widget initialization
Other widget methods that might be useful if you want to subclass ToolTip:
enter() : callback when the mouse pointer enters the parent widget
leave() : called when the mouse pointer leaves the parent widget
motion() : is called when the mouse pointer moves inside the parent widget if follow_mouse is set to 1 and the
tooltip has shown up to continually update the coordinates of the tooltip window
coords() : calculates the screen coordinates of the tooltip window
create_contents() : creates the contents of the tooltip window (by default a Tkinter.Label)
'''
# Ideas gleaned from PySol
import Tkinter
class ToolTip:
def __init__(self, master, text='Your text here', delay=800, **opts):
self.master = master
self._opts = {'anchor':'center', 'bd':1, 'bg':'lightyellow', 'delay':delay, 'fg':'black',\
'follow_mouse':0, 'font':None, 'justify':'left', 'padx':4, 'pady':2,\
'relief':'solid', 'state':'normal', 'text':text, 'textvariable':None,\
'width':0, 'wraplength':150}
self.configure(**opts)
self._tipwindow = None
self._id = None
self._id1 = self.master.bind("<Enter>", self.enter, '+')
self._id2 = self.master.bind("<Leave>", self.leave, '+')
self._id3 = self.master.bind("<ButtonPress>", self.leave, '+')
self._follow_mouse = 0
if self._opts['follow_mouse']:
self._id4 = self.master.bind("<Motion>", self.motion, '+')
self._follow_mouse = 1
def configure(self, **opts):
for key in opts:
if self._opts.has_key(key):
self._opts[key] = opts[key]
else:
KeyError = 'KeyError: Unknown option: "%s"' %key
raise KeyError
##----these methods handle the callbacks on "<Enter>", "<Leave>" and "<Motion>"---------------##
##----events on the parent widget; override them if you want to change the widget's behavior--##
def enter(self, event=None):
self._schedule()
def leave(self, event=None):
self._unschedule()
self._hide()
def motion(self, event=None):
if self._tipwindow and self._follow_mouse:
x, y = self.coords()
self._tipwindow.wm_geometry("+%d+%d" % (x, y))
##------the methods that do the work:---------------------------------------------------------##
def _schedule(self):
self._unschedule()
if self._opts['state'] == 'disabled':
return
self._id = self.master.after(self._opts['delay'], self._show)
def _unschedule(self):
id = self._id
self._id = None
if id:
self.master.after_cancel(id)
def _show(self):
if self._opts['state'] == 'disabled':
self._unschedule()
return
if not self._tipwindow:
self._tipwindow = tw = Tkinter.Toplevel(self.master)
# hide the window until we know the geometry
tw.withdraw()
tw.wm_overrideredirect(1)
if tw.tk.call("tk", "windowingsyste | m") == 'aqua':
tw.tk.call("::tk::unsupported::MacWindowStyle", "style", tw._w, "help", "none")
self.create_contents()
tw.update_idletasks()
x, y = self.coords()
tw.wm_geometry("+%d+%d" % (x, y))
tw.deiconify()
def _hide(self):
tw = self._tipwindow
self._tipwindow = None
if tw:
tw.destroy()
##----these me | thods might be overridden in derived classes:----------------------------------##
def coords(self):
# The tip window must be completely outside the master widget;
# otherwise when the mouse enters the tip window we get
# a leave event and it disappears, and then we get an enter
# event and it reappears, and so on forever :-(
# or we take care that the mouse pointer is always outside the tipwindow :-)
tw = self._tipwindow
twx, twy = tw.winfo_reqwidth(), tw.winfo_reqheight()
w, h = tw.winfo_screenwidth(), tw.winfo_screenheight()
# calculate the y coordinate:
if self._follow_mouse:
y = tw.winfo_pointery() + 20
# make sure the tipwindow is never outside the screen:
if y + twy > h:
y = y - twy - 30
else:
y = self.master.winfo_rooty() + self.master.winfo_height() + 3
if y + twy > h:
y = self.master.winfo_rooty() - twy - 3
# we can use the same x coord in both cases:
x = tw.winfo_pointerx() - twx / 2
if x < 0:
x = 0
elif x + twx > w:
x = w - twx
return x, y
def create_contents(self):
opts = self._opts.copy()
for opt in ('delay', 'follow_mouse', 'state'):
del opts[opt]
label = Tkinter.Label(self._tipwindow, **opts)
label.pack()
##---------demo code-----------------------------------##
def demo():
root = Tkinter.Tk(className='ToolTip-demo')
l = Tkinter.Listbox(root)
l.insert('end', "I'm a listbox")
l.pack(side='top')
t1 = ToolTip(l, follow_mouse=1, text="I'm a tooltip with follow_mouse set to 1, so I won't be placed outside my parent")
b = Tkinter.Button(root, text='Quit', command=root.quit)
b.pack(side='bottom')
t2 = ToolTip(b, text='Enough of this')
root.mainloop()
if __name__ == '__main__':
demo()
|
#!/usr/bin/env python
# angelus.py - John Burnett & Will Johnson (c)2015
#
# Angelus does the following:
# -FFT analysis
# -Partial tracking
# -Modal analysis
# -Resynthesis
#
# Angelus will eventually do the following:
# -FFT Analysis -> Notation
# -Modal Analysis -> 3D mesh (and reverse?)
from FFT_Analyzer import FFT_Analyzer
from writeRObU import writeRObU
from Synthesizer import Synthesizer
import sys
def main():
| fname = sys.argv[1]
title = parse_fname(fname)
infile = "../audio/" + fname
| outfile = "../build/" + title + ".ro"
analysis = FFT_Analyzer(infile)
analysis.perform_analysis()
analysis.stft(20)
analysis.get_modal_data(30)
out = writeRObU(outfile, analysis.modal_model)
out.write()
synth = Synthesizer(analysis, title)
synth.write_wav()
#synth.write_residual()
def parse_fname(fname):
s = ""
for l in fname:
if l != '.': s += l
else: return s
main()
|
# -*- cod | ing: utf-8 -*-
im | port common_sale_contract
import test_sale_contract
|
import unittest
from streamlink.strea | m import StreamIOIterWrapper
class TestPluginStream(unittest.TestCase):
def test_iter(self):
def generator():
yield b"1" * 8192
yield b"2" * 4096
yield b"3" * 2048
fd = StreamIOIterWrapper(generator())
self.assertEqual(fd.read(4096), b"1" * 4096)
self.assertEqual(fd.read(2048), b"1" * 2048)
self.assertEqual(fd.read(2048) | , b"1" * 2048)
self.assertEqual(fd.read(1), b"2")
self.assertEqual(fd.read(4095), b"2" * 4095)
self.assertEqual(fd.read(1536), b"3" * 1536)
self.assertEqual(fd.read(), b"3" * 512)
|
from django.conf import settings
from django.conf.urls.defaults import *
from models import *
from django.views.generic import date_based, list_detail
from django.contrib.auth.decorators import login_required
# Number of random images from the gallery to display.
SAMPLE_SIZE = ":%s" % getattr(settings, 'GALLERY_SAMPLE_SIZE', 8)
# galleries
gallery_args = {'date_field': 'date_added', 'allow_empty': True, 'queryset': Gallery.objects.filter(is_public=True), 'extra_context':{'sample_size':SAMPLE_SIZE}}
urlpatterns = patterns('django.views.generic.date_based',
url(r'^gallery/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/(?P<slug>[\-\d\w]+)/$', login_required(date_based.object_detail), {'date_field': 'date_added', 'slug_field': 'title_slug', 'queryset': Gallery.objects.filter(is_public=True), 'extra_context':{'sample_size':SAMPLE_SIZE}}, name='pl-gallery-detail'),
url(r'^gallery/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/$', login_required(date_based.archive_day), gallery_args, name='pl-gal | lery-archive-day'),
url(r'^gallery/(?P<year>\d{4})/(?P<month>[a-z]{3})/$', login_required(date_based.archive_month), gallery_args, name='pl-gallery-archive-month'),
url(r'^gallery/(?P<year>\d{4})/$', login_required(date_based.archive_year), gallery_args, name='pl-gallery-archive-year'),
url(r'^gallery/?$', login_required(date_based.archive_index), | gallery_args, name='pl-gallery-archive'),
)
urlpatterns += patterns('django.views.generic.list_detail',
url(r'^gallery/(?P<slug>[\-\d\w]+)/$', login_required(list_detail.object_detail), {'slug_field': 'title_slug', 'queryset': Gallery.objects.filter(is_public=True), 'extra_context':{'sample_size':SAMPLE_SIZE}}, name='pl-gallery'),
url(r'^gallery/page/(?P<page>[0-9]+)/$', login_required(list_detail.object_list), {'queryset': Gallery.objects.filter(is_public=True), 'allow_empty': True, 'paginate_by': 8, 'extra_context':{'sample_size':SAMPLE_SIZE}}, name='pl-gallery-list'),
)
# photographs
photo_args = {'date_field': 'date_added', 'allow_empty': True, 'queryset': Photo.objects.filter(is_public=True)}
urlpatterns += patterns('django.views.generic.date_based',
url(r'^photo/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/(?P<slug>[\-\d\w]+)/$', login_required(date_based.object_detail), {'date_field': 'date_added', 'slug_field': 'title_slug', 'queryset': Photo.objects.filter(is_public=True)}, name='pl-photo-detail'),
url(r'^photo/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/$', login_required(date_based.archive_day), photo_args, name='pl-photo-archive-day'),
url(r'^photo/(?P<year>\d{4})/(?P<month>[a-z]{3})/$', login_required(date_based.archive_month), photo_args, name='pl-photo-archive-month'),
url(r'^photo/(?P<year>\d{4})/$', login_required(date_based.archive_year), photo_args, name='pl-photo-archive-year'),
url(r'^photo/$', login_required(date_based.archive_index), photo_args, name='pl-photo-archive'),
)
urlpatterns += patterns('django.views.generic.list_detail',
url(r'^photo/(?P<slug>[\-\d\w]+)/$', login_required(list_detail.object_detail), {'slug_field': 'title_slug', 'queryset': Photo.objects.filter(is_public=True)}, name='pl-photo'),
url(r'^photo/page/(?P<page>[0-9]+)/$', login_required(list_detail.object_list), {'queryset': Photo.objects.filter(is_public=True), 'allow_empty': True, 'paginate_by': 20}, name='pl-photo-list'),
)
|
#!/usr/bin/python -*- coding:utf-8 -*-
__Author__ = "Riyaz Ahmad Bhat"
__Email__ = "riyaz.ah.bhat@gmail.com"
import re
from collections import namedtuple
from sanity_checker import SanityChecker
class DefaultList(list):
"""Equivalent of Default dictionaries for Indexing Errors."""
def __init__(self, default=None):
self.default = default
list.__init__(self)
def __getitem__(self, index):
try: return list.__getitem__(self, index)
except IndexError: return self.default
class SSFReader (SanityChecker):
def __init__ (self, sentence):
super(SSFReader, self).__init__()
self.id_ = int()
self.nodeList = list()
self.chunk_word = dict()
self.sentence = sentence
self.modifierModified = dict()
self.node = namedtuple('node',
('id', 'head', 'children', 'pos', 'poslcat', 'af', 'vpos', 'name','drel','parent',
'chunkId', 'chunkType', 'mtype', 'troot', 'coref', 'stype','voicetype', 'posn'))
self.features = namedtuple('features',
('lemma','cat','gen','num','per','case','vib','tam'))
def getAnnotations (self):
children_ = list()
for line in self.sentence.split("\n"):
nodeInfo = line.decode("utf-8").split("\t")
if nodeInfo[0].isdigit():
assert len(nodeInfo) == 4 # no need to process trash! FIXME
attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1])
attributes = self.updateFSValues(attributeValue_pairs)
h = attributes.get #NOTE h -> head node attributes
elif nodeInfo[0].replace(".",'',1).isdigit():
assert (len(nodeInfo) == 4) and (nodeInfo[1] and nodeInfo[2] != '') # FIXME
self.id_ += 1
pos_ = nodeInfo[2].encode("utf-8").decode("ascii",'ignore').encode("ascii")
wordForm_ = nodeInfo[1]
attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1])
if attributeValue_pairs['name'] == h('head_'):# NOTE head word of the chunk
self.nodeList.append(self.node(str(self.id_),wordForm_,children_,pos_,h('poslcat_'),
self.features(h('lemma_') if h('lemma_') else wordForm_ ,h('cat_'),h('gen_'), h('num_'),
h('per_'),h('case_'),h('vib_'),h('tam_')),h('vpos_'),h('head_'),h('drel_'),
h('parent_'),h('chunkId_'),":".join(('head',h('chunkId_'))),h('mtype_'),h('troot_'),
h('coref_'),h('stype_'),h('voicetype_'),h('posn_')))
self.modifierModified[h('chunkId_')] = h('parent_')
self.chunk_word[h('chunkId_')] = h('head_')
else:
attributes = self.updateFSValues(attributeValue_pairs)
c = attributes.get #NOTE c -> child node attributes
children_.append(self.node(str(self.id_),wordForm_,[],pos_,c('poslcat_'),self.features(c('lemma_') \
if c('lemma_') else wordForm_ ,c('cat_'),c('gen_'),c('num_'),c('per_'),c('case_'),c('vib_'),
c('tam_')),c('vpos_'),c('name_'),"_","_",None,":".join(('child',h('chunkId_'))),c('mtype_'),
c('troot_'),c('coref_'),None, None, c('posn_')))
else: children_ = list()
return self
def FSPairs (self, FS) :
feats = dict()
for feat in FS.split():
if "=" not in feat:continue
feat = re.sub("af='+","af='",feat.replace("dmrel=",'drel='))
assert len(feat.split("=")) == 2
attribute,value = feat.split("=")
feats[attribute] = value
return feats
def morphFeatures (self, AF):
"LEMMA,CAT,GEN,NUM,PER,CASE,VIB,TAM"
assert len(AF[:-1].split(",")) == 8 # no need to process trash! FIXME
lemma_,cat_,gen_,num_,per_,case_,vib_,tam_ = AF.split(",")
if len(lemma_) > 1: lemma_ = lemma_.strip("'")
return lemma_.strip("'" | ),cat_,gen_,num_,per_,case_,vib_,tam_.strip("'")
def updateFSValues (self, attributeValue_pairs):
attributes = dict(zip(['head_','poslcat_','af_','vpos_','name_','drel_','parent_','mtype_','troot_','chunkId_',\
'coref_','stype_','voicetype_','posn_'], [None] * 14))
attributes.update(dict(zip(['lemma | _','cat_','gen_','num_','per_','case_','vib_','tam_'], [''] * 8)))
for key,value in attributeValue_pairs.items():
if key == "af":
attributes['lemma_'],attributes['cat_'],attributes['gen_'],attributes['num_'],\
attributes['per_'],attributes['case_'],attributes['vib_'],attributes['tam_'] = \
self.morphFeatures (value)
elif key == "drel":
assert len(value.split(":")) == 2 # no need to process trash! FIXME
attributes['drel_'], attributes['parent_'] = re.sub("'|\"",'',value).split(":")
assert attributes['drel_'] and attributes['parent_'] != "" # no need to process trash! FIXME
else:
variable = str(key) + "_"
if variable == "name_": attributes['chunkId_'] = re.sub("'|\"",'',value)
attributes[variable] = re.sub("'|\"",'',value)
return attributes
|
i | mport nspkg1.foo
| |
'''
Tree from:
http://www.quesucede.com/page/show/id/python-3-tree-implementation
'''
from urllib.parse import urlparse
import os
(_ROOT, _DEPTH, _BREADTH) = range(3)
class Node:
def __init__(self, identifier):
self.__id | entifier = identifier
self.__children = []
@property
def identifier(self):
return self.__identifier
@property
def children(self):
return self.__children
def add_child(self, identifier):
self.__children.append(identifier)
class Tree:
def __init__(self):
self.__nodes = {}
@property
def nodes(self):
return self.__nodes
| def add_node(self, identifier, parent=None):
print("identifier: " + identifier + " parent= " + str(parent))
node = Node(identifier)
self[identifier] = node
if parent is not None:
self[parent].add_child(identifier)
return node
def display(self, identifier, depth=_ROOT):
children = self[identifier].children
if depth == _ROOT:
print("{0}".format(identifier))
else:
print("\t"*depth, "{0}".format(identifier))
depth += 1
for child in children:
print("\t"*depth, "{0}".format(identifier))
self.display(child, depth) # recursive call
def traverse(self, identifier, mode=_DEPTH):
yield identifier
queue = self[identifier].children
while queue:
yield queue[0]
expansion = self[queue[0]].children
if mode == _DEPTH:
queue = expansion + queue[1:] # depth-first
elif mode == _BREADTH:
queue = queue[1:] + expansion # width-first
def __getitem__(self, key):
return self.__nodes[key]
def __setitem__(self, key, item):
self.__nodes[key] = item
'''
tree = Tree()
t = print("{0}".format("palestras"))
tree.add_node("Harry") # root node
tree.add_node("Jane", t)
tree.add_node("Bill", "Harry")
tree.add_node("Joe", "Jane")
tree.add_node("Diane", "Jane")
tree.add_node("George", "Diane")
tree.add_node("Mary", "Diane")
tree.add_node("Jill", "George")
tree.add_node("Carol", "Jill")
tree.add_node("Grace", "Bill")
tree.add_node("Mark", "Jane")
tree.display("Harry")
print("***** DEPTH-FIRST ITERATION *****")
for node in tree.traverse("Harry"):
print(node)
print("***** BREADTH-FIRST ITERATION *****")
for node in tree.traverse("Harry", mode=_BREADTH):
print(node)
''' |
#!/usr/bin/python
import sys, commands, struct, operator, subprocess, os
if len(sys.argv) != 3:
print 'usage:',sys.argv[0],'<program> <core>'
sys.exit(1)
prog, core = sys.argv[1:]
# finds out the size of void*/size_t. could be hardcoded for speed...
try:
cell = int(commands.getoutput('gdb '+prog+r''' -ex 'printf "cell %d\n", sizeof(void*)' -ex q | grep cell''').split()[1])
except:
print 'gdb failed to open',prog,core,'- assuming a 32b pointer'
cell = 4
fmt = {4:'I',8:'Q'}[cell]
def gdb_sym_info(addrs,exe):
gdb = subprocess.Popen(['gdb',prog,core], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
info = {}
found = 0
for addr in addrs:
if addr:
gdb.stdin.write('info symbol 0x%x\n'%addr)
gdb.stdin.write('list *0x%x\n'%addr)
gdb.stdin.write('printf "\\ndone\\n"\n')
gdb.stdin.flush()
line = ''
lineinfo = None
syminfo = 'UNKNOWN'
while line != 'done':
line = gdb.stdout.readline().strip()
if 'is in' in line: lineinfo = line.split('is in ')[1]
if 'in section' in line: syminfo = line.split('(gdb) ')[1]
if lineinfo:
info[addr] = lineinfo
else:
info[addr] = syminfo
found += int(info[addr] != 'UNKNOWN')
return info, found
def addr2line_sym_info(addrs,exe):
addr2line = subprocess.Popen('addr2line -f -e'.split()+[exe], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
info = {}
for addr in addrs:
if addr:
addr2line.stdin.write('0x%x\n'%addr)
addr2line.stdin.flush()
info[addr] = addr2line.stdout.readline().strip()+' '+addr2line.stdout.readline().strip()
return info
def sym_info(addrs,exe):
if 'HEAPPROF_ADDR2LINE' in os.environ:
gdb_found = 0
else:
syminfo, gdb_found = gdb_sym_info(addrs, prog)
if gdb_found < 1: # gdb didn't manage to find anything - perhaps the core dump is in a custom format
syminfo = addr2line_sym_info(addrs, prog)
return syminfo
# a silly guard against "non-blocks" - occurences of HeaP and ProF
# in code instead of data
def is_block(s,e): return (e-s)%cell == 0 and (e-s)/cell < 100
class Block:
def __init__(self, metadata):
self.size = struct.unpack(fmt, metadata[0:cell])[0]
self.stack = struct.unpack('%d'%(len(metadata)/cell - 1)+fmt, metadata[cell:])
def find_blocks(bytes):
blocks = []
end_index = 0
while True:
start_index = bytes.find('HeaP',end_index)
end_index = bytes.find('ProF',start_index)
if not is_block(start_index, end_index):
end_index = start_index + cell # search again
else:
if min(start_index, end_index) < 0:
break
blocks.append(Block(bytes[start_index+cell:end_index])) # this assumes little endian...
return blocks
def code_addrs(blocks):
return list(reduce(operator.or_, [set(block.stack) for block in blocks]))
def report(blocks, syminfo):
stack2sizes = {}
for block in blocks:
stack2sizes.setdefault(block.stack,list()).append(block.size)
total = sorted([(sum(sizes), stack) for stack, sizes in stack2sizes.iteritems()])
heapsize = sum([size for size, stack in total])
for size, s | tack in reversed(total):
print '%d%% %d %s'%(int(100.*size/heapsize), size, stack2sizes[stack])
for addr in stack:
if addr:
print ' 0x%x'%addr, syminfo[addr]
blocks = find_blocks(open(core,'rb').read | ())
if not blocks:
print 'no heap blocks found in the core dump (searched for metadata enclosed in the magic string HeaP...ProF)'
sys.exit(1)
syminfo = sym_info(code_addrs(blocks), prog)
report(blocks, syminfo)
|
import sys
from PyQt5 import QtWidgets, QtGui, QtCore
from PyQt5 import uic
from . import guiStart
from . import guiCompileSuccess
# sys.path.insert(1, 'C:/Users/GuSan/Desktop/powerOverWhelming/project/src/comp_exec')
from ..comp_exec import validation
from . import guiErrorCode
class GuiSelectCode(QtWidgets.QMainWindow) :
def setupUi(self, SelectCode):
SelectCode.setObjectName("SelectCode")
SelectCode.resize(1300, 1100)
self.centralwidget = QtWidgets.QWidget(SelectCode)
self.centralwidget.setObjectName("centralwidget")
self.opt_select_code_3 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_3.setGeometry(QtCore.QRect(970, 100, 21, 22))
self.opt_select_code_3.setText("")
self.opt_select_code_3.setObjectName("opt_select_code_3")
self.txt_select_code_1 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_1.setGeometry(QtCore.QRect(150, 140, 320, 721))
self.txt_select_code_1.setObjectName("txt_select_code_1")
self.opt_select_code_1 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_1.setGeometry(QtCore.QRect(310, 100, 21, 22))
self.opt_select_code_1.setText("")
self.opt_select_code_1.setObjectName("opt_select_code_1")
self.txt_select_code_3 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_3.setGeometry(QtCore.QRect(810, 140, 320, 721))
self.txt_select_code_3.setObjectName("txt_select_code_3")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(560, 40, 201, 41))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.btn_compile_start = QtWidgets.QPushButton(self.centralwidget)
self.btn_compile_start.setGeometry(QtCore.QRect(980, 890, 151, 51))
self.btn_compile_start.setObjectName("btn_compile_start")
self.btn_return_search = QtWidgets.QPushButton(self.centralwidget)
self.btn_return_search.setGeometry(QtCore.QRect(980, 970, 151, 51))
self.btn_return_search.setObjectName("btn_return_search")
self.opt_select_code_2 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_2.setGeometry(QtCore.QRect(640, 100, 21, 22))
self.opt_select_code_2.setText("")
self.opt_select_code_2.setObjectName("opt_select_code_2")
self.txt_select_code_2 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_2.setGeometry(QtCore.QRect(480, 140, 320, 721))
self.txt_select_code_2.setObjectName("txt_select_code_2")
self.progress = QtWidgets.QProgressBar(self.centralwidget)
self.progress.setGeometry(QtCore.QRect(150, 910, 791, 31))
self.progress.setProperty("value", 0)
self.progress.setObjectName("progress")
SelectCode.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(SelectCode)
self.statusbar.setObjectName("statusbar")
SelectCode.setStatusBar(self.statusbar)
self.retranslateUi(SelectCode)
QtCore.QMetaObject.connectSlotsByName(SelectCode)
def retranslateUi(self, SelectCode):
_translate = QtCore.QCoreApplication.translate
SelectCode.setWindowTitle(_translate("SelectCode", "Select Code"))
self.label.setText(_translate("SelectCode", "Select Code"))
self.btn_compile_start.setText(_translate("SelectCode", "Compile!"))
self.btn_r | eturn_search.setText(_translate("SelectCode", "Return to Search"))
def __init__(self):
QtWidgets.QMainWind | ow.__init__(self)
self.setupUi(self)
self.initUi()
def initUi(self) :
self.btn_compile_start.clicked.connect(self.compile_click)
self.opt_select_code_1.setChecked(True)
self.btn_return_search.clicked.connect(self.return_search)
#window_start = guiStart.GuiStart(self)
#self.txt_select_code_1.setPlainText(window_start.inputOutput(window_start.edit_keyword.toPlainText())[0])
#self.txt_select_code_2.setPlainText(window_start.inputOutput(window_start.edit_keyword.toPlainText())[1])
#self.txt_select_code_3.setPlainText(window_start.inputOutput(window_start.edit_keyword.toPlainText())[2])
def return_search(self) :
global window_search_code
self.close()
window_search_code = guiStart.GuiStart()
window_search_code.show()
def compile_click(self) :
global window_compile_success
global window_compile_fail
window_compile_success = guiCompileSuccess.GuiCompileSuccess()
window_compile_fail = guiErrorCode.GuiErrorCode()
self.completed = 0
while self.completed<100 :
self.completed+=0.001
self.progress.setValue(self.completed)
QtWidgets.QApplication.processEvents()
tupleCompile = validation.validation(self.loadText(), 'cpp')
print(tupleCompile[0])
if(tupleCompile[1]==1) :
msg = QtWidgets.QMessageBox()
msg.setText("컴파일 에러")
msg.setWindowTitle("컴파일 에러")
msg.show()
msg.exec_()
window_compile_fail.txt_error_code.setPlainText(self.loadText())
window_compile_fail.txt_error_context.setPlainText(tupleCompile[0])
window_compile_fail.show()
return window_compile_fail
else :
window_compile_success.txt_code_complete.setPlainText(self.loadText())
window_compile_success.txt_output_test.setPlainText(tupleCompile[0])
window_compile_success.show()
return window_compile_success
def loadText(self) :
if(self.opt_select_code_1.isChecked()) :
print("radioButton 1 is toggled")
return self.txt_select_code_1.toPlainText()
elif(self.opt_select_code_2.isChecked()) :
print("radioButton 2 is toggled")
return self.txt_select_code_2.toPlainText()
else :
print("radioButton 3 is toggled")
return self.txt_select_code_3.toPlainText()
|
nguage.getLanguage()[:2] # getLanguage returns e.g. "fi_FI" for "language_country"
PackageInfoHandler.__init__(self, self.statusCallback, blocking = False, neededTag = 'ALL_TAGS', neededFlag = self.ImageVersion)
self.directory = resolveFilename(SCOPE_METADIR)
self.hardware_info = HardwareInfo()
self.list = List([])
self.NotifierCallback = None
self.Console = Console()
self.UpdateConsole = Console()
self.cmdList = []
self.unwanted_extensions = ('-dbg', '-dev', '-doc', '-staticdev', '-src')
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
def statusCallback(self, status, progress):
pass
def startSoftwareTools(self, callback = None):
if callback is not None:
self.NotifierCallback = callback
iNetwork.checkNetworkState(self.checkNetworkCB)
def checkNetworkCB(self, data):
if data is not None:
if data <= 2:
self.NetworkConnectionAvailable = True
self.getUpdates()
else:
self.NetworkConnectionAvailable = False
self.getUpdates()
def getUpdates(self, callback = None):
if self.lastDownloadDate is None:
if self.NetworkConnectionAvailable:
self.lastDownloadDate = time()
if self.list_updating is False and callback is None:
self.list_updating = True
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is False and callback is not None:
self.list_updating = True
self.NotifierCallback = callback
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is True and callback is not None:
self.NotifierCallback = callback
else:
self.list_updating = False
if callback is not None:
callback(False)
elif self.NotifierCallback is not None:
self.NotifierCallback(False)
else:
if self.NetworkConnectionAvailable:
self.lastDownloadDate = time()
if self.list_updating is False and callback is None:
self.list_updating = True
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is False and callback is not None:
self.list_updating = True
self.NotifierCallback = callback
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is True and callback is not None:
self.NotifierCallback = callback
else:
if self.list_updating and callback is not None:
self.NotifierCallback = callback
self.startIpkgListAvailable()
else:
self.list_updating = False
if callback is not None:
callback(False)
elif self.NotifierCallback is not None:
self.NotifierCallback(False)
def ipkgCallback(self, event, param):
if event == IpkgComponent.EVENT_ERROR:
self.list_updating = False
if self.NotifierCallback is not None:
self.NotifierCallback(False)
elif event == IpkgComponent.EVENT_DONE:
if self.list_updating:
self.startIpkgListAvailable()
pass
def startIpkgListAvailable(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " list"
self.UpdateConsole.ePopen(cmd, self.IpkgListAvailableCB, callback)
def IpkgListAvailableCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
if self.list_updating:
self.available_packetlist = []
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
descr = l > 2 and tokens[2].strip() or ""
self.available_packetlist.append([name, version, descr])
if callback is None:
self.startInstallMetaPackage()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def startInstallMetaPackage(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if self.NetworkConnectionAvailable:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " install enigma2-meta enigma2-plugins-meta enigma2-skins-meta"
self.UpdateConsole.ePopen(cmd, self.InstallMetaPackageCB, callback)
else:
self.InstallMetaPackageCB(True)
def InstallMetaPackageCB(self, result, retval = None, extra_args = None):
(callback) = extra_args or None
if result:
self.fillPackagesIndexList()
if callback is None:
self.startIpkgListInstalled()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def startIpkgListInstalled(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " list_installed"
self.UpdateConsole.ePopen(cmd, self.IpkgListInstalledCB, callback)
def IpkgListInstalledCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
self.installed_packetlist = {}
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
self.installed_packetlist[name] = version
for package in self.packagesIndexlist[:]:
if not self.verifyPrerequisites(package[0]["prerequisites"]):
self.packagesIndexlist.remove(package)
for package in self.packagesIndexlist[:]:
attributes = package[0]["attributes"]
if "packagetype" in attributes:
if attributes["packagetype"] == "internal":
self.packagesIndexlist.remove(package)
if callback is None:
self.countUpdates()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def countUpdates(self, callback = None):
self.available_updates = 0
self.available_updatelist = []
for package in self.packagesIndexlist[:]:
attributes = package[0]["attributes"]
packagename = attributes["packagename"]
for x in self.available_packetlist:
if x[0] == packagename:
if packagename in self.installed_packetlist:
if self.installed_packetlist[packagename] != x[1]:
self.available_updates +=1
self.available_updatelist.append([packagename])
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateCon | sole.appContainers) == 0:
if callback is not None:
callback(True)
callback = None
elif self.NotifierCallback is not None:
self.NotifierCallback(True)
self.NotifierCallback = None
def startIpkgUpdate(self, callback = None):
if not self.Console:
| self.Console = Console()
cmd = self.ipkg.ipkg + " update"
self.Console.ePopen(cmd, self.IpkgUpdateCB, callback)
def IpkgUpdateCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
if self.Console:
if len(self.Console.appContainers) == 0:
if callback is not None:
callback(True)
callback = None
def cleanupSoftwareTools(self):
self.list_updating = False
if self.NotifierCallback is not None:
self.NotifierCallback = None
self.ipkg.stop()
if self.Console is not None:
if len(self.Console.appContainers):
for name in self.Console.appContainers.keys():
self.Console.kill(name)
if self.UpdateConsole is not None:
if len(self.UpdateConsole.appContainers):
for name in self.UpdateConsole.appContainers.keys():
self.UpdateConsole.kill(name)
def verifyPrerequisites(self, prerequisites):
if "hardware" in pre |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import with_statement
import os
import sys
from optparse import OptionParser, BadOptionError
from celery import __version__
from celery.platforms im | port EX_FAILURE, detached
from celery.utils.log import get_logger
from celery.bin.base import daemon_options, Option
logger = get_logger(__name__)
OPTION_LIST = daemon_options(default_pidfile="celeryd.pid") + (
Option("--fake",
default=False, action="store_true", dest="fake",
help="Don't fork (for debugging purposes)"), )
def detach(path, argv, logfile=None, pidfile=None, uid=None,
gid=None, umask=0, working | _directory=None, fake=False, ):
with detached(logfile, pidfile, uid, gid, umask, working_directory, fake):
try:
os.execv(path, [path] + argv)
except Exception:
from celery import current_app
current_app.log.setup_logging_subsystem("ERROR", logfile)
logger.critical("Can't exec %r", " ".join([path] + argv),
exc_info=True)
return EX_FAILURE
class PartialOptionParser(OptionParser):
def __init__(self, *args, **kwargs):
self.leftovers = []
OptionParser.__init__(self, *args, **kwargs)
def _process_long_opt(self, rargs, values):
arg = rargs.pop(0)
if "=" in arg:
opt, next_arg = arg.split("=", 1)
rargs.insert(0, next_arg)
had_explicit_value = True
else:
opt = arg
had_explicit_value = False
try:
opt = self._match_long_opt(opt)
option = self._long_opt.get(opt)
except BadOptionError:
option = None
if option:
if option.takes_value():
nargs = option.nargs
if len(rargs) < nargs:
if nargs == 1:
self.error("%s option requires an argument" % opt)
else:
self.error("%s option requires %d arguments" % (
opt, nargs))
elif nargs == 1:
value = rargs.pop(0)
else:
value = tuple(rargs[0:nargs])
del rargs[0:nargs]
elif had_explicit_value:
self.error("%s option does not take a value" % opt)
else:
value = None
option.process(opt, value, values, self)
else:
self.leftovers.append(arg)
def _process_short_opts(self, rargs, values):
arg = rargs[0]
try:
OptionParser._process_short_opts(self, rargs, values)
except BadOptionError:
self.leftovers.append(arg)
if rargs and not rargs[0][0] == "-":
self.leftovers.append(rargs.pop(0))
class detached_celeryd(object):
option_list = OPTION_LIST
usage = "%prog [options] [celeryd options]"
version = __version__
description = ("Detaches Celery worker nodes. See `celeryd --help` "
"for the list of supported worker arguments.")
command = sys.executable
execv_path = sys.executable
execv_argv = ["-m", "celery.bin.celeryd"]
def Parser(self, prog_name):
return PartialOptionParser(prog=prog_name,
option_list=self.option_list,
usage=self.usage,
description=self.description,
version=self.version)
def parse_options(self, prog_name, argv):
parser = self.Parser(prog_name)
options, values = parser.parse_args(argv)
if options.logfile:
parser.leftovers.append("--logfile=%s" % (options.logfile, ))
if options.pidfile:
parser.leftovers.append("--pidfile=%s" % (options.pidfile, ))
return options, values, parser.leftovers
def execute_from_commandline(self, argv=None):
if argv is None:
argv = sys.argv
config = []
seen_cargs = 0
for arg in argv:
if seen_cargs:
config.append(arg)
else:
if arg == "--":
seen_cargs = 1
config.append(arg)
prog_name = os.path.basename(argv[0])
options, values, leftovers = self.parse_options(prog_name, argv[1:])
sys.exit(detach(path=self.execv_path,
argv=self.execv_argv + leftovers + config,
**vars(options)))
def main():
detached_celeryd().execute_from_commandline()
if __name__ == "__main__": # pragma: no cover
main()
|
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import sqlalchemy as sa
metadata = sa.MetaData()
Queues = sa.Table('Queues', metadata,
sa.Column('id', sa.INTEGER, primary_key=True),
sa.Column('project', sa.String(64)),
sa.Column('name', sa.String(64)),
sa.Column('metadata', sa.LargeBinary),
sa.UniqueConstraint('project', 'name'),
)
Pools = sa.Table('Pools', metadata,
sa.Column('name', sa.String(64), primary_key=True),
sa.Column('uri', sa.String(255),
unique=True, nullable=False),
sa.Column('weight', sa.INTEGER, nullable=False),
sa.Column('options', sa.Text()),
sa.Column('flavor', sa.String(64), nullable=True))
# NOTE(gengchc2): Modify pool_group define: turn NOT NULL into DEFAULT NULL:
# [alter table Flavors change column pool_group pool_group varchar(64)
# default null;]
Flavors = sa.Table( | 'Flavors', metadata,
sa.Column('name', sa.String(64), primary_key=True),
sa.Column('project', sa.String(64)),
sa.Column('capabilities', sa.Text()))
Catalogue = sa.Table( | 'Catalogue', metadata,
sa.Column('pool', sa.String(64),
sa.ForeignKey('Pools.name',
ondelete='CASCADE')),
sa.Column('project', sa.String(64)),
sa.Column('queue', sa.String(64), nullable=False),
sa.UniqueConstraint('project', 'queue'))
|
rom constants import *
from helpers import OrderedAttrDict, utc
"""
The AS types and their FLV representations.
"""
log = logging.getLogger('flvlib.astypes')
class MalformedFLV(Exception):
pass
# Number
def get_number(f, max_offset=None):
return get_double(f)
def make_number(num):
return make_double(num)
# Boolean
def get_boolean(f, max_offset=None):
value = get_ui8(f)
return bool(value)
def make_boolean(value):
return make_ui8((value and 1) or 0)
# String
def get_string(f, max_offset=None):
# First 16 bits are the string's length
length = get_ui16(f)
# Then comes the string itself
ret = f.read(length)
return ret
def make_string(string):
if isinstance(string, unicode):
# We need a blob, not unicode. Arbitrarily choose UTF-8
string = string.encode('UTF-8')
length = make_ui16(len(string))
return length + string
# Longstring
def get_longstring(f, max_offset=None):
# First 32 bits are the string's length
length = get_ui32(f)
# Then comes the string itself
ret = f.read(length)
return ret
def make_longstring(string):
if isinstance(string, unicod | e):
# We need a blob, not unicode. Arbitrarily choose UTF-8
string = string.encode('UTF-8')
length = make_ui32(len(string))
return length + string
# ECMA Array
class ECMAArray(OrderedAttrDict):
pass
def get_ecma_array(f, max_offset=None):
length = get_ui32(f)
log.debug( | "The ECMA array has approximately %d elements", length)
array = ECMAArray()
while True:
if max_offset and (f.tell() == max_offset):
log.debug("Prematurely terminating reading an ECMA array")
break
marker = get_ui24(f)
if marker == 9:
log.debug("Marker!")
break
else:
f.seek(-3, os.SEEK_CUR)
name, value = get_script_data_variable(f, max_offset=max_offset)
array[name] = value
return array
def make_ecma_array(d):
length = make_ui32(len(d))
rest = ''.join([make_script_data_variable(name, value)
for name, value in d.iteritems()])
marker = make_ui24(9)
return length + rest + marker
# Strict Array
def get_strict_array(f, max_offset=None):
length = get_ui32(f)
log.debug("The length is %d", length)
elements = [get_script_data_value(f, max_offset=max_offset)
for _ in xrange(length)]
return elements
def make_strict_array(l):
ret = make_ui32(len(l))
rest = ''.join([make_script_data_value(value) for value in l])
return ret + rest
# Date
def get_date(f, max_offset=None):
timestamp = get_number(f) / 1000.0
# From the following document:
# http://opensource.adobe.com/wiki/download/
# attachments/1114283/amf0_spec_121207.pdf
#
# Section 2.13 Date Type
#
# (...) While the design of this type reserves room for time zone offset
# information, it should not be filled in, nor used (...)
_ignored = get_si16(f)
return datetime.datetime.fromtimestamp(timestamp, utc)
def make_date(date):
if date.tzinfo:
utc_date = date.astimezone(utc)
else:
# assume it's UTC
utc_date = date.replace(tzinfo=utc)
ret = make_number(calendar.timegm(utc_date.timetuple()) * 1000)
offset = 0
return ret + make_si16(offset)
# Null
def get_null(f, max_offset=None):
return None
def make_null(none):
return ''
# Object
class FLVObject(OrderedAttrDict):
pass
def get_object(f, max_offset=None):
ret = FLVObject()
while True:
if max_offset and (f.tell() == max_offset):
log.debug("Prematurely terminating reading an object")
break
marker = get_ui24(f)
if marker == 9:
log.debug("Marker!")
break
else:
f.seek(-3, os.SEEK_CUR)
name, value = get_script_data_variable(f)
setattr(ret, name, value)
return ret
def make_object(obj):
# If the object is iterable, serialize keys/values. If not, fall
# back on iterating over __dict__.
# This makes sure that make_object(get_object(StringIO(blob))) == blob
try:
iterator = obj.iteritems()
except AttributeError:
iterator = obj.__dict__.iteritems()
ret = ''.join([make_script_data_variable(name, value)
for name, value in iterator])
marker = make_ui24(9)
return ret + marker
# MovieClip
class MovieClip(object):
def __init__(self, path):
self.path = path
def __eq__(self, other):
return isinstance(other, MovieClip) and self.path == other.path
def __repr__(self):
return "<MovieClip at %s>" % self.path
def get_movieclip(f, max_offset=None):
ret = get_string(f)
return MovieClip(ret)
def make_movieclip(clip):
return make_string(clip.path)
# Undefined
class Undefined(object):
def __eq__(self, other):
return isinstance(other, Undefined)
def __repr__(self):
return '<Undefined>'
def get_undefined(f, max_offset=None):
return Undefined()
def make_undefined(undefined):
return ''
# Reference
class Reference(object):
def __init__(self, ref):
self.ref = ref
def __eq__(self, other):
return isinstance(other, Reference) and self.ref == other.ref
def __repr__(self):
return "<Reference to %d>" % self.ref
def get_reference(f, max_offset=None):
ret = get_ui16(f)
return Reference(ret)
def make_reference(reference):
return make_ui16(reference.ref)
as_type_to_getter_and_maker = {
VALUE_TYPE_NUMBER: (get_number, make_number),
VALUE_TYPE_BOOLEAN: (get_boolean, make_boolean),
VALUE_TYPE_STRING: (get_string, make_string),
VALUE_TYPE_OBJECT: (get_object, make_object),
VALUE_TYPE_MOVIECLIP: (get_movieclip, make_movieclip),
VALUE_TYPE_NULL: (get_null, make_null),
VALUE_TYPE_UNDEFINED: (get_undefined, make_undefined),
VALUE_TYPE_REFERENCE: (get_reference, make_reference),
VALUE_TYPE_ECMA_ARRAY: (get_ecma_array, make_ecma_array),
VALUE_TYPE_STRICT_ARRAY: (get_strict_array, make_strict_array),
VALUE_TYPE_DATE: (get_date, make_date),
VALUE_TYPE_LONGSTRING: (get_longstring, make_longstring)
}
type_to_as_type = {
bool: VALUE_TYPE_BOOLEAN,
int: VALUE_TYPE_NUMBER,
long: VALUE_TYPE_NUMBER,
float: VALUE_TYPE_NUMBER,
# WARNING: not supporting Longstrings here.
# With a max length of 65535 chars, noone will notice.
str: VALUE_TYPE_STRING,
unicode: VALUE_TYPE_STRING,
list: VALUE_TYPE_STRICT_ARRAY,
dict: VALUE_TYPE_ECMA_ARRAY,
ECMAArray: VALUE_TYPE_ECMA_ARRAY,
datetime.datetime: VALUE_TYPE_DATE,
Undefined: VALUE_TYPE_UNDEFINED,
MovieClip: VALUE_TYPE_MOVIECLIP,
Reference: VALUE_TYPE_REFERENCE,
type(None): VALUE_TYPE_NULL
}
# SCRIPTDATAVARIABLE
def get_script_data_variable(f, max_offset=None):
name = get_string(f)
log.debug("The name is %s", name)
value = get_script_data_value(f, max_offset=max_offset)
log.debug("The value is %r", value)
return (name, value)
def make_script_data_variable(name, value):
log.debug("The name is %s", name)
log.debug("The value is %r", value)
ret = make_string(name) + make_script_data_value(value)
return ret
# SCRIPTDATAVALUE
def get_script_data_value(f, max_offset=None):
value_type = get_ui8(f)
log.debug("The value type is %r", value_type)
try:
get_value = as_type_to_getter_and_maker[value_type][0]
except KeyError:
raise MalformedFLV("Invalid script data value type: %d", value_type)
log.debug("The getter function is %r", get_value)
value = get_value(f, max_offset=max_offset)
return value
def make_script_data_value(value):
value_type = type_to_as_type.get(value.__class__, VALUE_TYPE_OBJECT)
log.debug("The value type is %r", value_type)
# KeyError can't happen here, because we always fall back on
# VALUE_TYPE_OBJECT when determining value_type
make_value = as_type_to_getter_and_maker[value_type][1]
log.debug("The maker function is %r", make_value)
type_tag = make_ui |
ep_GS: str
irrep_ES: str
irrep_trans: str
edtm_length: np.ndarray
f_length: float
edtm_velocity: np.ndarray
f_velocity: float
mdtm: np.ndarray
R_length: float
R_velocity: float
spin_mult: str
R_eigvec: Union[core.Matrix, List[core.Matrix]]
L_eigvec: Union[core.Matrix, List[core.Matrix]]
def _solve_loop(wfn,
ptype,
solve_function,
states_per_irrep: List[int],
maxiter: int,
restricted: bool = True,
spin_mult: str = "singlet") -> List[_TDSCFResults]:
"""
References
----------
For the expression of the transition moments in length and velocity gauges:
- T. B. Pedersen, A. E. Hansen, "Ab Initio Calculation and Display of the
Rotary Strength Tensor in the Random Phase Approximation. Method and Model
Studies." Chem. Phys. Lett., 246, 1 (1995)
- P. J. Lestrange, F. Egidi, X. Li, "The Consequences of Improperly
Describing Oscillator Strengths beyond the Electric Dipole Approximation."
J. Chem. Phys., 143, 234103 (2015)
"""
core.print_out("\n ==> Requested Excitations <==\n\n")
for nstate, state_sym in zip(states_per_irrep, wfn.molecule().irrep_labels()):
core.print_out(f" {nstate} {spin_mult} states with {state_sym} symmetry\n")
# construct the engine
if restricted:
if spin_mult == "triplet":
engine = TDRSCFEngine(wfn, ptype=ptype.lower(), triplet=True)
else:
engine = TDRSCFEngine(wfn, ptype=ptype.lower(), triplet=False)
else:
engine = TDUSCFEngine(wfn, ptype=ptype.lower())
# collect results and compute some spectroscopic observables
mints = core.MintsHelper(wfn.basisset())
results = []
irrep_GS = wfn.molecule().irrep_labels()[engine.G_gs]
for state_sym, nstates in enumerate(states_per_irrep):
if nstates == 0:
continue
irrep_ES = wfn.molecule().irrep_labels()[state_sym]
core.print_out(f"\n\n ==> Seeking the lowest {nstates} {spin_mult} states with {irrep_ES} symmetry")
engine.reset_for_state_symm(state_sym)
guess_ = engine.generate_guess(nstates * 4)
# ret = {"eigvals": ee, "eigvecs": (rvecs, rvecs), "stats": stats} (TDA)
# ret = {"eigvals": ee, "eigvecs": (rvecs, lvecs), "stats": stats} (RPA)
ret = solve_function(engine, nstates, guess_, maxiter)
# check whether all roots converged
if not ret["stats"][-1]["done"]:
# raise error
raise TDSCFConvergenceError(maxiter, wfn, f"singlet excitations in irrep {irrep_ES}", ret["stats"][-1])
# flatten dictionary: helps with sorting by energy
# also append state symmetry to return value
for e, (R, L) in zip(ret["eigvals"], ret["eigvecs"]):
irrep_trans = wfn.molecule().irrep_labels()[engine.G_gs ^ | state_sym]
| # length-gauge electric dipole transition moment
edtm_length = engine.residue(R, mints.so_dipole())
# length-gauge oscillator strength
f_length = ((2 * e) / 3) * np.sum(edtm_length**2)
# velocity-gauge electric dipole transition moment
edtm_velocity = engine.residue(L, mints.so_nabla())
## velocity-gauge oscillator strength
f_velocity = (2 / (3 * e)) * np.sum(edtm_velocity**2)
# length gauge magnetic dipole transition moment
# 1/2 is the Bohr magneton in atomic units
mdtm = 0.5 * engine.residue(L, mints.so_angular_momentum())
# NOTE The signs for rotatory strengths are opposite WRT the cited paper.
# This is becasue Psi4 defines length-gauge dipole integral to include the electron charge (-1.0)
# length gauge rotatory strength
R_length = np.einsum("i,i", edtm_length, mdtm)
# velocity gauge rotatory strength
R_velocity = -np.einsum("i,i", edtm_velocity, mdtm) / e
results.append(
_TDSCFResults(e, irrep_GS, irrep_ES, irrep_trans, edtm_length, f_length, edtm_velocity, f_velocity,
mdtm, R_length, R_velocity, spin_mult, R, L))
return results
def _states_per_irrep(states, nirrep):
"""Distributes states into nirrep"""
spi = [states // nirrep] * nirrep
for i in range(states % nirrep):
spi[i] += 1
return spi
def _validate_tdscf(*, wfn, states, triplets, guess) -> None:
# validate states
if not isinstance(states, (int, list)):
raise ValidationError("TDSCF: Number of states must be either an integer or a list of integers")
# list of states per irrep given, validate it
if isinstance(states, list):
if len(states) != wfn.nirrep():
raise ValidationError(f"TDSCF: States requested ({states}) do not match number of irreps ({wfn.nirrep()})")
# do triplets?
if triplets not in ["NONE", "ALSO", "ONLY"]:
raise ValidationError(
f"TDSCF: Triplet option ({triplets}) unrecognized. Must be one of 'NONE', 'ALSO' or 'ONLY'")
restricted = wfn.same_a_b_orbs()
do_triplets = False if triplets == "NONE" else True
if (not restricted) and do_triplets:
raise ValidationError("TDSCF: Cannot compute triplets with an unrestricted reference")
# determine how many states per irrep to seek and apportion them between singlets/triplets and irreps.
# validate calculation
if restricted and wfn.functional().needs_xc() and do_triplets:
raise ValidationError("TDSCF: Restricted Vx kernel only spin-adapted for singlets")
not_lda = wfn.functional().is_gga() or wfn.functional().is_meta()
if (not restricted) and not_lda:
raise ValidationError("TDSCF: Unrestricted Kohn-Sham Vx kernel currently limited to SVWN functional")
if guess != "DENOMINATORS":
raise ValidationError(f"TDSCF: Guess type {guess} is not valid")
def tdscf_excitations(wfn,
*,
states: Union[int, List[int]],
triplets: str = "NONE",
tda: bool = False,
r_convergence: float = 1.0e-4,
maxiter: int = 60,
guess: str = "DENOMINATORS",
verbose: int = 1):
"""Compute excitations from a SCF(HF/KS) wavefunction
Parameters
-----------
wfn : :py:class:`psi4.core.Wavefunction`
The reference wavefunction
states : Union[int, List[int]]
How many roots (excited states) should the solver seek to converge?
This function accepts either an integer or a list of integers:
- The list has :math:`n_{\mathrm{irrep}}` elements and is only
acceptable if the system has symmetry. It tells the solver how many
states per irrep to calculate.
- If an integer is given _and_ the system has symmetry, the states
will be distributed among irreps.
For example, ``states = 10`` for a D2h system will compute 10 states
distributed as ``[2, 2, 1, 1, 1, 1, 1, 1]`` among irreps.
triplets : {"NONE", "ONLY", "ALSO"}
Should the solver seek to converge states of triplet symmetry?
Default is `none`: do not seek to converge triplets.
Valid options are:
- `NONE`. Do not seek to converge triplets.
- `ONLY`. Only seek to converge triplets.
- `ALSO`. Seek to converge both triplets and singlets. This choice is
only valid for restricted reference wavefunction.
The number of states given will be apportioned roughly 50-50 between
singlet and triplet states, preferring the former. For example:
given ``state = 5, triplets = "ALSO"``, the solver will seek to
converge 3 states of singlet spin symmetry and 2 of triplet spin
symmetry. When asking for ``states = [3, 3, 3, 3], triplets =
"ALSO"`` states (C2v symmetry), ``[2, 2, 2, 2]`` will be of singlet
spin symmetry and ``[1, 1, 1, 1]``` will be of triplet spin
symmetry.
tda : bool, |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This is a doctest example with Numpy arrays.
For more information about doctest, see
https://docs.python.org/3/library/doctest.html (reference)
and
www.fil.univ-lille1.fr/~L1S2API/CoursTP/tp_doctest.html (nice examples in
French).
To run doctest, execute this script (thanks to the
`if __name__ == "__main__": import doctest ; doctest.testmod()` directives)
or execute the following command in a terminal::
python3 -m doctest datapipe/io/images.py
"""
import numpy as np
def example1():
"""A very basic doctest example.
Notes
-----
The numpy module is imported at the end of this file, in the test::
if __name__ == "__main__":
import doctest
import numpy
doctest.testmod()
Examples
--------
>>> numpy.array([1, 2, 3])
array([1, 2, 3])
"""
pass
def example2():
"""A very basic doctest example to test values returned by this function.
Examples
--------
>>> example2()
array([1, 2, 3])
"""
return numpy.array([1, 2, 3])
def example3(a):
"""A very basic example.
Examples
--------
>>> a = numpy.array([3, 1, 2])
>>> example3(a)
>>> a
array([1, 2, 3])
"""
a.sort()
def example4(a):
"""Replace *in-place* `NaN` values in `a` by zeros.
Replace `NaN` ("Not a Number") values in `a` by zeros.
Parameters
----------
image : array_like
The image to process. `NaN` values are replaced **in-place** thus this
function changes the provided object.
Returns
-------
array_like
Returns a boolean mask array indicating whether values in `a`
initially contained `NaN` values (`True`) of not (`False`). This array
is defined by the instruction `np.isnan(a)`.
Notes
-----
`NaN` values are replaced **in-place** in the provided `a`
| parameter.
Examples
--------
>>> a = numpy.array([1., | 2., numpy.nan])
>>> a
array([ 1., 2., nan])
>>> example4(a)
array([False, False, True], dtype=bool)
Be careful with white space! The following will work...
>>> a
array([ 1., 2., 0.])
but this one would't
# >>> a
# array([ 1., 2., 0.])
As an alternative, the `doctest: +NORMALIZE_WHITESPACE` can be used (see
https://docs.python.org/3/library/doctest.html#doctest.NORMALIZE_WHITESPACE
and http://www.fil.univ-lille1.fr/~L1S2API/CoursTP/tp_doctest.html)
>>> a
... # doctest: +NORMALIZE_WHITESPACE
array([ 1., 2., 0.])
but the space before the '1' is still required...
"""
nan_mask = np.isnan(a)
a[nan_mask] = 0
return nan_mask
if __name__ == "__main__":
import doctest
import numpy
doctest.testmod()
|
import unittest
import time
from datetime import datetime
from app import create_app, db
from app.models import User, AnonymousUser, Role, Permission
class UserModelTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
Role.insert_roles()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
def test_valid_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token()
self.assertTrue(u.confirm(token))
def test_invalid_confirmation_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_confirmation_token()
self.assertFalse(u2.confirm(token))
def test_expired_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token(1)
time.sleep(2)
self.assertFalse(u.confirm(token))
def test_valid_reset_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_reset_token()
self.assertTrue(u.reset_password(token, 'dog'))
self.assertTrue(u.verify_password('dog'))
def test_invalid_reset_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_reset_token()
self.assertFalse(u2.reset_password(token, 'horse'))
self.assertTrue(u2.verify_password('dog'))
def test_valid_email_change_token(self):
u = User(email='john@example.com', password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_email_change_token('susan@example.org')
self.assertTrue(u.change_email(token))
self.assertTrue(u.email == 'susan@example.org')
def test_invalid_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_email_change_token('david@ex | ample.net')
| self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_duplicate_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u2.generate_email_change_token('john@example.com')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_roles_and_permissions(self):
u = User(email='john@example.com', password='cat')
self.assertTrue(u.can(Permission.WRITE_ARTICLES))
self.assertFalse(u.can(Permission.MODERATE_COMMENTS))
def test_anonymous_user(self):
u = AnonymousUser()
self.assertFalse(u.can(Permission.FOLLOW))
def test_timestamps(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
self.assertTrue(
(datetime.utcnow() - u.member_since).total_seconds() < 3)
self.assertTrue(
(datetime.utcnow() - u.last_seen).total_seconds() < 3)
def test_ping(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
time.sleep(2)
last_seen_before = u.last_seen
u.ping()
self.assertTrue(u.last_seen > last_seen_before)
def test_gravatar(self):
u = User(email='john@example.com', password='cat')
with self.app.test_request_context('/'):
gravatar = u.gravatar()
gravatar_256 = u.gravatar(size=256)
gravatar_pg = u.gravatar(rating='pg')
gravatar_retro = u.gravatar(default='retro')
with self.app.test_request_context('/', base_url='https://example.com'):
gravatar_ssl = u.gravatar()
self.assertTrue('http://www.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6'in gravatar)
self.assertTrue('s=256' in gravatar_256)
self.assertTrue('r=pg' in gravatar_pg)
self.assertTrue('d=retro' in gravatar_retro)
self.assertTrue('https://secure.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6' in gravatar_ssl)
|
import numpy as np
import torch
from PIL import Image
from argparse import ArgumentParser
from torch.optim import SGD, Adam
from torch.autograd import Variable
from torch.utils.data import DataLoader
from torchvision.transforms import Compose, CenterCrop, Normalize
from torchvision.transforms import ToTensor, ToPILImage
from piwise.dataset import VOC12
from piwise.network import FCN8, FCN16, FCN32, UNet, PSPNet, SegNet
from piwise.criterion import CrossEntropyLoss2d
from piwise.transform import Relabel, ToLabel, Colorize
from piwise.visualize import Dashboard
NUM_CHANNELS = 3
NUM_CLASSES = 22
color_transform = Colorize()
image_transform = ToPILImage()
input_transform = Compose([
CenterCrop(256),
ToTensor(),
Normalize([.485, .456, .406], [.229, .224, .225]),
])
target_transform = Compose([
CenterCrop(256),
ToLabel(),
Relabel(255, 21),
])
def train(args, model):
model.train()
weight = torch.ones(22)
weight[0] = 0
loader = DataLoader(VOC12(args.datadir, input_transform, target_transform),
num_workers=args.num_workers, batch_size=args.batch_size, shuffle=True)
if args.cuda:
criterion = CrossEntropyLoss2d(weight.cuda())
else:
criterion = CrossEntropyLoss2d(weight)
optimizer = Adam(model.parameters())
if args.model.startswith('FCN'):
optimizer = SGD(model.parameters(), 1e-4, .9, 2e-5)
if args.model.startswith('PSP'):
optimizer = SGD(model.parameters(), 1e-2, .9, 1e-4)
if args.model.startswith('Seg'):
optimizer = SGD(model.parameters(), 1e-3, .9)
if args.steps_plot > 0:
board = Dashboard(args.port)
for epoch in range(1, args.num_epochs+1):
epoch_loss = []
for step, (images, labels) in enumerate(loader):
if args.cuda:
images = images.cuda()
labels = labels.cuda()
inputs = Variable(images)
targets = Variable(labels)
outputs = model(inputs)
optimizer.zero_grad()
loss = criterion(outputs, targets[:, 0])
loss.backward()
optimizer.step()
epoch_loss.append(loss.data[0])
if args.steps_plot > 0 and step % args.steps_plot == 0:
image = inputs[0].cpu().data
image[0] = image[0] * .229 + .485
image[1] = image[1] * .224 + .456
image[2] = image[2] * .225 + .406
board.image(image,
f'input (epoch: {epoch}, step: {step})')
board.image(color_transform(outputs[0].cpu().max(0)[1].data),
f'output (epoch: {epoch}, step: {step})')
board.image(color_transform(targets[0].cpu().data),
f'target (epoch: {epoch}, step: {step})')
if args.steps_loss > 0 and step % args.steps_loss == 0:
average = sum(epoch_loss) / len(epoch_loss)
print(f'loss: {average} (epoch: {epoch}, step: {step})')
if args.steps_save > 0 and step % args.steps_save == 0:
filename = f'{args.model}-{epoch:03}-{step:04}.pth'
torch.save(model.state_dict(), filename)
print(f'save: {filename} (epoch: {epoch}, step: {step})')
def evaluate(args, model):
model.eval()
image = input_transform(Image.open(args.image))
label = model(Variable(image, volatile=True).unsqueeze(0))
label = color_transform(label[0].data.max(0)[1])
image_transform(label).save(args.label)
def main(args):
Net = None
if args.model == 'fcn8':
Net = FCN8
if args.model == 'fcn16':
Net = FCN16
if args.model == 'fcn32':
Net = FCN32
if args.model == 'fcn32':
Net = FCN32
if args.model == 'unet':
Net = UNet
if args.model == 'pspnet':
Net = PSPNet
if args.model | == 'segnet':
Net = SegNet
assert Net is not None, f'model {args.model} not available'
model = Net(NUM_CLASSES)
if args.cuda:
model = model.cuda()
if args.state:
try:
model.load_state_dict(torch.load(args.state))
except AssertionError:
model.load_state_dict(torch.load(args.state,
map_location=lambda storage, loc: storage))
if args.mode == 'eval':
evaluate(args, model)
if a | rgs.mode == 'train':
train(args, model)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--cuda', action='store_true')
parser.add_argument('--model', required=True)
parser.add_argument('--state')
subparsers = parser.add_subparsers(dest='mode')
subparsers.required = True
parser_eval = subparsers.add_parser('eval')
parser_eval.add_argument('image')
parser_eval.add_argument('label')
parser_train = subparsers.add_parser('train')
parser_train.add_argument('--port', type=int, default=80)
parser_train.add_argument('--datadir', required=True)
parser_train.add_argument('--num-epochs', type=int, default=32)
parser_train.add_argument('--num-workers', type=int, default=4)
parser_train.add_argument('--batch-size', type=int, default=1)
parser_train.add_argument('--steps-loss', type=int, default=50)
parser_train.add_argument('--steps-plot', type=int, default=0)
parser_train.add_argument('--steps-save', type=int, default=500)
main(parser.parse_args()) |
# read_hadamard_file.py
# Reads data from a text file to create a 3D
# version of a given Hadamard Matrix.
# Created by Rick Henderson
# Created on June 4, 2015
# Completed June 5, 2015
# Note: A "Hadamard File" is a text file containing rows
# rows of + and - where the + indicates a 1 or a cube
# and the - represents a 0 or a space.
import bpy
# Set the order (size) of the matrix
nOrder = 12
# You can also change these values if you want to alter the offset between the cubes
xOffset = 1.0
yOffset = 1.0
zOffset = 0 # You would have to alter the code more if you want a 3D array of cubes
xpos = 0
ypos = 0
char_number = 0
# Open the file to read from
# Modified technique from DiveIntoPython3.net/files.html
line_number = 0
with open('c:/had12.txt', encoding='utf-8') as a_file:
for each_row in a_file:
line_number += 1
# Just print the current row to the console as a test
print(each_row.rstrip())
for a_char in each_row:
char_number += 1
# If the current character is +, generate a cube then position it
if a_char == '+':
bpy.ops.mesh.primitive_cube_add(radius=0.5)
bpy.context.object.location[0] = line_number * xOffset
bpy.context.object.location[1] = char_number * yOffset
|
# Now an entire row has been read, so reset char_number to 0
| char_number = 0
# Program Ends
|
from numpy.distutils.core import setup, Extension
#f | rom setuptools import setup, Extension
setup(
name = "Infer", version = "1.0",
description='Python version of MCMC, plus other inference codes under development',
author='Neale Gibson',
author_email='ngibs | on@eso.org',
packages=['Infer'],
package_dir={'Infer':'src'},
#and extension package for solving toeplitz matrices...
ext_modules = [
Extension("Infer.LevinsonTrenchZoharSolve",sources=["src/LevinsonTrenchZoharSolve.c"],),
]
)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @ | author: Emanuel Cino < | ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from . import communication_config
from . import communication_job
from . import communication_attachment
from . import res_partner
from . import email
from . import crm_phonecall
from . import ir_attachment
from . import mail_template
from . import communication_dashboard
from . import report_with_omr |
# -*- coding: utf-8 -*-
###############################################################################
#
# ListPlaylistsByID
# Returns a collection of playlists that match the provided IDs.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListPlaylistsByID(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListPlaylistsByID Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ListPlaylistsByID, self).__init__(temboo_session, '/Library/YouTube/Playlists/ListPlaylistsByID')
def new_input_set(self):
return ListPlaylistsByIDInputSet()
def _make_result_set(self, result, path):
return ListPlaylistsByIDResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListPlaylistsByIDChoreographyExecution(session, exec_id, path)
class ListPlaylistsByIDInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListPlaylistsByID
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((optional, string) The API Key provided by Google for simple API access when you do not need to access user data.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('APIKey', value)
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth process. This is required for OAuth authentication unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.)
"""
| super(ListPlaylistsByIDInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required for OAuth authentication unless providing a valid AccessToken.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for th | is Choreo. ((conditional, string) The Client Secret provided by Google. Required for OAuth authentication unless providing a valid AccessToken.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('ClientSecret', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) Allows you to specify a subset of fields to include in the response using an xpath-like syntax (i.e. items/snippet/title).)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('Fields', value)
def set_MaxResults(self, value):
"""
Set the value of the MaxResults input for this Choreo. ((optional, integer) The maximum number of results to return.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('MaxResults', value)
def set_PageToken(self, value):
"""
Set the value of the PageToken input for this Choreo. ((optional, string) The "nextPageToken" found in the response which is used to page through results.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('PageToken', value)
def set_Part(self, value):
"""
Set the value of the Part input for this Choreo. ((optional, string) Specifies a comma-separated list of playlist resource properties that the API response will include. Part names that you can pass are: id, snippet, and status.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('Part', value)
def set_PlaylistID(self, value):
"""
Set the value of the PlaylistID input for this Choreo. ((required, string) A comma-separated list of the YouTube playlist ID(s) for the resource(s) that are being retrieved.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('PlaylistID', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required for OAuth authentication unless providing a valid AccessToken.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('RefreshToken', value)
class ListPlaylistsByIDResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListPlaylistsByID Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from YouTube.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class ListPlaylistsByIDChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListPlaylistsByIDResultSet(response, path)
|
#!/usr/bin/env python
# "manhole" entry point, friendlier ipython startup to remote container
__author__ = 'Dave Foster <dfoster@asascience.com>'
def main():
import sys, os, re, errno, json, socket
from pkg_resources import load_entry_point
r = re.compile('manhole-(\d+).json')
if len(sys.argv) == 2:
mh_file = sys.argv[1]
else:
# find manhole file in local dir
mh_files = [f for f in os.listdir(os.getcwd()) if r.search(f) is not None]
if len(mh_files) == 0:
print >>sys.stderr, "No manhole files detected, specify it manually"
sys.exit(1)
elif len(mh_files) > 1:
def legal_manhole_file(f):
"""
Helper method to check if a process exists and is likely a manhole-able container.
@return True/ | False if is a likely container.
"""
mh_pid = int(r.search(f).group(1))
try:
| os.getpgid(mh_pid)
except OSError as e:
if e.errno == errno.ESRCH:
return False
raise # unexpected, just re-raise
# the pid seems legal, now check status of sockets - the pid may be reused
with open(f) as ff:
mh_doc = json.load(ff)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((mh_doc['ip'], mh_doc['shell_port']))
except socket.error as e:
if e.errno == errno.EADDRINUSE:
return True
raise # unexpected, re-raise
finally:
s.close()
return False
# try to see if these are active processes
legal_mh_files = filter(legal_manhole_file, mh_files)
if len(legal_mh_files) > 1:
print >>sys.stderr, "Multiple legal manhole files detected, specify it manually:", legal_mh_files
sys.exit(1)
# we found a single legal file, use it
mh_file = legal_mh_files[0]
# perform cleanup of stale files
dead_mh_files = [x for x in mh_files if x not in legal_mh_files]
for df in dead_mh_files:
print >>sys.stderr, "Cleaning up stale manhole file", df
os.unlink(df)
else:
mh_file = mh_files[0]
if not os.access(mh_file, os.R_OK):
print >>sys.stderr, "Manhole file (%s) does not exist" % mh_file
sys.exit(1)
mhpid = r.search(mh_file).group(1)
# configure branding
manhole_logo = """
__ __ _______ __ _ __ __ _______ ___ _______
| |_| || _ || | | || | | || || | | |
| || |_| || |_| || |_| || _ || | | ___|
| || || || || | | || | | |___
| || || _ || || |_| || |___ | ___|
| ||_|| || _ || | | || _ || || || |___
|_| |_||__| |__||_| |__||__| |__||_______||_______||_______|
"""
# manipulate argv!
sys.argv = [sys.argv[0], "console", "--existing", mh_file,
"--PromptManager.in_template=>o> ",
"--PromptManager.in2_template=... ",
"--PromptManager.out_template=--> ",
"--TerminalInteractiveShell.banner1=%s" % manhole_logo,
"--TerminalInteractiveShell.banner2=SciON Container Manhole, connected to %s\n(press Ctrl-D to detach, quit() to exit container)\n" % mhpid]
# HACK: Mock out client shutdown to avoid default shutdown on Ctrl-D
from mock import patch
with patch("IPython.kernel.client.KernelClient.shutdown"):
ipy_entry = load_entry_point('ipython', 'console_scripts', 'ipython')()
sys.exit(ipy_entry)
if __name__ == '__main__':
main()
|
Time()
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, 'Invalid')
def testFormatDateTimeWithoutDynamicTime(self):
"""Tests the _FormatDateTime function without dynamic time."""
output_mediator = self._CreateOutputMediator(dynamic_time=False)
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
# Test with event.date_time
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00')
output_mediator.SetTimezone('Europe/Amsterdam')
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T20:17:01.000000+02:00')
output_mediator.SetTimezone('UTC')
event.date_time = dfdatetime_semantic_time.InvalidTime()
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00')
# Test with event.timestamp
event.date_time = None
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00')
event.timestamp = 0
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00')
event.timestamp = -9223372036854775808
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00')
def testFormatDisplayName(self):
"""Tests the _FormatDisplayName function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
display_name_string = test_helper._FormatDisplayName(
event, event_data, event_data_stream)
self.assertEqual(display_name_string, 'FAKE:log/syslog.1')
def testFormatFilename(self):
"""Tests the _FormatFilename function."""
output_mediator = self._CreateOutputMediator()
test_hel | per = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(se | lf._TEST_EVENTS[0]))
filename_string = test_helper._FormatFilename(
event, event_data, event_data_stream)
self.assertEqual(filename_string, 'log/syslog.1')
def testFormatHostname(self):
"""Tests the _FormatHostname function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
hostname_string = test_helper._FormatHostname(
event, event_data, event_data_stream)
self.assertEqual(hostname_string, 'ubuntu')
def testFormatInode(self):
"""Tests the _FormatInode function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
inode_string = test_helper._FormatInode(
event, event_data, event_data_stream)
self.assertEqual(inode_string, '-')
def testFormatMACB(self):
"""Tests the _FormatMACB function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
macb_string = test_helper._FormatMACB(event, event_data, event_data_stream)
self.assertEqual(macb_string, '..C.')
def testFormatMessage(self):
"""Tests the _FormatMessage function."""
output_mediator = self._CreateOutputMediator()
formatters_directory_path = self._GetTestFilePath(['formatters'])
output_mediator.ReadMessageFormattersFromDirectory(
formatters_directory_path)
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
message_string = test_helper._FormatMessage(
event, event_data, event_data_stream)
expected_message_string = (
'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session closed '
'for user root)')
self.assertEqual(message_string, expected_message_string)
def testFormatMessageShort(self):
"""Tests the _FormatMessageShort function."""
output_mediator = self._CreateOutputMediator()
formatters_directory_path = self._GetTestFilePath(['formatters'])
output_mediator.ReadMessageFormattersFromDirectory(
formatters_directory_path)
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
message_short_string = test_helper._FormatMessageShort(
event, event_data, event_data_stream)
expected_message_short_string = (
'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session closed '
'for user root)')
self.assertEqual(message_short_string, expected_message_short_string)
def testFormatSource(self):
"""Tests the _FormatSource function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
source_string = test_helper._FormatSource(
event, event_data, event_data_stream)
self.assertEqual(source_string, 'Test log file')
def testFormatSourceShort(self):
"""Tests the _FormatSourceShort function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
source_short_string = test_helper._FormatSourceShort(
event, event_data, event_data_stream)
self.assertEqual(source_short_string, 'FILE')
def testFormatTag(self):
"""Tests the _FormatTag function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
tag_string = test_helper._FormatTag(None)
self.assertEqual(tag_string, '-')
event_tag = events.EventTag()
event_tag.AddLabel('one')
event_tag.AddLabel('two')
tag_string = test_helper._FormatTag(event_tag)
self.assertEqual(tag_string, 'one two')
def testFormatTime(self):
"""Tests the _FormatTime function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
# Test with event.date_time
time_string = test_helper._FormatTime(
event, event_data, event_data_stream)
self.assertEqual(time_string, '18:17:01')
output_mediator.SetTimezone('Europe/Amsterdam')
time_string = test_helper._FormatTime(
event, event_data, event_data_stream)
self.assertEqual(time_string, '20:17:01')
output_mediator.SetTimezone('UTC')
# Test with event.timestamp
event.date_time = None
time_string = test_helper._FormatTime(
event, event_data, event_data_stream)
self.assertEqual(time_string, '18:17:01')
event.timestamp = 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY | KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import url
from openstack_dashboard.dashboards.admin.hypervisors.compute import views
urlpatterns = [
url(r'^(?P<compute_host>[^/]+)/evacuate_host$',
views.EvacuateHostView.as_view(),
name='evacuate_host'),
url(r'^(?P<compute_host>[^/]+)/disable_service$',
views.DisableServiceView.as_view(),
| name='disable_service'),
url(r'^(?P<compute_host>[^/]+)/migrate_host$',
views.MigrateHostView.as_view(),
name='migrate_host'),
]
|
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# | #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have rec | eived a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
import res_partner
import oehealth_annotation
import oehealth_professional_category
import oehealth_professional
import oehealth_tag
import oehealth_event_participant
import oehealth_specialty
|
tory.')
transform_parser.add_argument('--output', required=True,
help='path of output directory.')
transform_parser.add_argument(
'--prefix', required=True, metavar='NAME',
help='The prefix of the output file name. The output files will be like '
'NAME_00000_of_00005.tar.gz')
transform_parser.add_argument('--cloud', action='store_true', default=False,
help='whether to run transform in cloud or local.')
transform_parser.add_argument('--shuffle', action='store_true', default=False,
help='whether to shuffle the training data in output.')
transform_parser.add_argument('--batch_size', type=int, default=100,
help='number of instances in a batch to process once. '
'Larger batch is more efficient but may consume more memory.')
transform_parser.add_argument('--package', required=False,
help='A local or GCS tarball path to use as the source. '
'If not set, the default source package will be used.')
transform_parser.add_cell_argument(
'training_data',
required=True,
help=textwrap.dedent("""\
Training data. A dict containing one of the following:
csv (example: "csv: file.csv"), or
bigquery_table (example: "bigquery_table: project.dataset.table"), or
bigquery_sql (example: "bigquery_sql: select * from table where num1 > 1.0")"""))
transform_parser.add_cell_argument(
'cloud_config',
help=textwrap.dedent("""\
A dictionary of cloud config. All of them are optional.
num_workers: Dataflow number of workers. If not set, DataFlow
service will determine the number.
worker_machine_type: a machine name from
https://cloud.google.com/compute/docs/machine-types
If not given, the service uses the default machine type.
project_id: id of the project to use for DataFlow service. If not set,
Datalab's default project (set by %%datalab project set) is used.
job_name: Unique name for a Dataflow job to use. If not set, a
random name will be used."""))
transform_parser.set_defaults(func=_transform)
train_parser = parser.subcommand(
'train',
formatter_class=argparse.RawTextHelpFormatter,
help='Train a model.',
epilog=textwrap.dedent("""\
Example usage:
%%ml train --cloud
analysis: path/to/analysis_output
output: path/to/dir
training_data:
transformed: path/to/transformed/train
evaluation_data:
tranaformed: path/to/transformed/eval
model_args:
model: linear_regression
cloud_config:
region: us-central1"""))
train_parser.add_argument('--analysis', required=True,
help='path of analysis output directory.')
train_parser.add_argument('--output', required=True,
help='path of trained model directory.')
train_parser.add_argument('--cloud', action='store_true', default=False,
help='whether to run training in cloud or local.')
train_parser.add_argument('--package', required=False,
help='A local or GCS tarball path to use as the source. '
'If not set, the default source package will be used.')
train_parser.add_cell_argument(
'training_data',
required=True,
help=textwrap.dedent("""\
Training data. It is either raw csv file pattern, or transformed file pattern.
For example:
"training_data:
csv: /path/to/csv/mycsv*.csv"
or
"training_data:
transformed: /path/to/transformed-*" """))
train_parser.add_cell_argument('evaluation_data', required=True,
help='same as training_data.')
package_model_help = subprocess.Popen(
['python', '-m', 'trainer.task', '--datalab-help'],
cwd=MLTOOLBOX_CODE_PATH,
stdout=subprocess.PIPE).communicate()[0]
package_model_help = ('model_args: a dictionary of model specific args, including:\n\n' +
package_model_help.decode())
train_parser.add_cell_argument('model_args', help=package_model_help)
train_parser.add_cell_argument(
'cloud_config',
help=textwrap.dedent("""\
A dictionary of cloud training config, including:
job_id: the name of the job. If not provided, a default job name is created.
region: see {url}
runtime_version: see "region". Must be a string like '1.2'.
scale_tier: see "region".""".format(
url='https://cloud.google.com/sdk/gcloud/reference/ml-engine/jobs/submit/training')))
train_parser.set_defaults(func=_train)
predict_parser = parser.subcommand(
'predict',
formatter_class=argparse.RawTextHelpFormatter,
help='Predict with local or deployed models. (Good for small datasets).',
epilog=textwrap.dedent("""\
Example usage:
%%ml predict
headers: key,num
model: path/to/model
prediction_data:
- key1,value1
- key2,value2
Or, in another cell, define a list of dict:
my_data = [{'key': 1, 'num': 1.2}, {'key': 2, 'num': 2.8}]
Then:
%%ml predict
headers: key,num
model: path/to/model
prediction_data: $my_data"""))
predict_parser.add_argument('--model', required=True,
help='The model path if not --cloud, or the id in '
'the form of model.version if --cloud.')
predict_parser.add_argument('--headers',
help='Online models only. ' +
'The c | omma seperated headers of the prediction data. ' +
'Order must match the training order.')
predict_parser.add_argument('--image_columns',
help='Online models only. ' +
'Comma seperated headers of image URL columns. ' +
'Required if prediction data contains image URL columns.')
predict_parser.add_argument('--no_show_image', action='store_true', default=F | alse,
help='If not set, add a column of images in output.')
predict_parser.add_argument('--cloud', action='store_true', default=False,
help='whether to run prediction in cloud or local.')
predict_parser.add_cell_argument(
'prediction_data',
required=True,
help=textwrap.dedent("""\
Prediction data can be
1) CSV lines in the input cell in yaml format or
2) a local variable which is one of
a) list of dict
b) list of strings of csv lines
c) a Pandas DataFrame"""))
predict_parser.set_defaults(func=_predict)
batch_predict_parser = parser.subcommand(
'batch_predict',
formatter_class=argparse.RawTextHelpFormatter,
help='Batch prediction with local or deployed models. (Good for large datasets)',
epilog=textwrap.dedent("""\
Example usage:
%%ml batch_predict [--cloud]
model: path/to/model
output: path/to/output
format: csv
prediction_data:
csv: path/to/file_pattern"""))
batch_predict_parser.add_argument('--model', required=True,
help='The model path if not --cloud, or the id in '
'the form of model.version if --cloud.')
batch_predict_parser.add_argument('--output', required=True,
help='The path of output directory with prediction results. '
'If --cloud, it h |
from flask_admin import expose
from listenbrainz.webserver.a | dmin import AdminIndexView
class HomeView(AdminIndexView):
@expose('/')
def index(self):
return self.render('ad | min/home.html')
|
"""
A p | ythonic interface to the Zabbix API.
"""
from .api import Api, ApiException
from .objects.host import Host
from .objects.hostgroup import HostGroup
from .objects.item import Item
from .objects.trigger import Trigger
from .objects.itservice import ItServi | ce
|
from decimal import Decimal
class Integer:
def __init__(self, val=None):
self | .val = int(val)
def __repr__(self):
return self.val
class Text:
def __init__(self, val=None):
self.val = str(val)
def __repr__ | (self):
return self.val
class Bool:
def __init__(self, val=None):
self.val = bool(val)
def __repr__(self):
return self.val
class Real:
def __init__(self, val=None):
self.val = Decimal(val)
def __repr__(self):
return self.val
class Date:
pass
|
import codecs
import pathlib
import re
import sys
from distutils.command.build_ext import build_ext
from distutils.errors import (CCompilerError, DistutilsExecError,
DistutilsPlatformError)
from setuptools import Extension, setup
if sys.version_info < (3, 5, 3):
raise RuntimeError("aiohttp 3.x requires Python 3.5.3+")
try:
from Cython.Build import cythonize
USE_CYTHON = True
except ImportError:
USE_CYTHON = False
ext = '.pyx' if USE_CYTHON else '.c'
extensions = [Extension('aiohttp._websocket', ['aiohttp/_websocket' + ext]),
Extension('aiohttp._http_parser',
['aiohttp/_http_parser' + ext,
'vendor/http-parser/http_parser.c',
'aiohttp/_find_header.c'],
define_macros=[('HTTP_PARSER_STRICT', 0)],
),
Extension('aiohttp._frozenlist',
['aiohttp/_frozenlist' + ext]),
Extension('aiohttp._helpers',
['aiohttp/_helpers' + ext]),
Extension('aiohttp._http_writer',
['aiohttp/_http_writer' + ext])]
if USE_CYTHON:
extensions = cythonize(extensions)
class BuildFailed(Exception):
pass
class ve_build_ext(build_ext):
# This class allows C extension building to fail.
def run(self):
try:
build_ext.run(self)
except (DistutilsPlatformError, FileNotFoundError):
raise BuildFailed()
def build_extension(self, ext):
try:
build_ext.build_extension(self, ext)
except (DistutilsExecError,
DistutilsPlatformError, ValueError):
raise BuildFailed()
here = pathlib.Path(__file__).parent
txt = (here / 'aiohttp' / '__init__.py').read_text('utf-8')
try:
version = re.findall(r"^__version__ = '([^']+)'\r?$",
txt, re.M)[0]
except IndexError:
raise RuntimeError('Unable to determine version.')
install_requires = [
'attrs>=17.3.0',
'chardet>=2.0,<4.0',
'multidict>=4.0,<5.0',
'async_timeout>=3.0,<4.0',
'yarl>=1.0,<2.0',
'idna-ssl>=1.0; python_version<"3.7"',
]
def read(f):
return (here / f).read_text('utf-8').strip()
NEEDS_PYTEST = {'pytest', 'test'}.intersection(sys.argv)
pytest_runner = ['pytest-runner'] if NEEDS_PYTEST else []
tests_require = ['pytest', 'gunicorn',
'pytest-timeout', 'async-generator']
args = dict(
name='aiohttp',
version=version, |
description='Async http client/server framework (asyncio)',
long_description='\n\n'.join((read('README.rst'), read('CHANGES.rst'))),
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python | :: 3.7',
'Development Status :: 5 - Production/Stable',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Topic :: Internet :: WWW/HTTP',
'Framework :: AsyncIO',
],
author='Nikolay Kim',
author_email='fafhrd91@gmail.com',
maintainer=', '.join(('Nikolay Kim <fafhrd91@gmail.com>',
'Andrew Svetlov <andrew.svetlov@gmail.com>')),
maintainer_email='aio-libs@googlegroups.com',
url='https://github.com/aio-libs/aiohttp',
project_urls={
'Chat: Gitter': 'https://gitter.im/aio-libs/Lobby',
'CI: AppVeyor': 'https://ci.appveyor.com/project/aio-libs/aiohttp',
'CI: Circle': 'https://circleci.com/gh/aio-libs/aiohttp',
'CI: Shippable': 'https://app.shippable.com/github/aio-libs/aiohttp',
'CI: Travis': 'https://travis-ci.com/aio-libs/aiohttp',
'Coverage: codecov': 'https://codecov.io/github/aio-libs/aiohttp',
'Docs: RTD': 'https://docs.aiohttp.org',
'GitHub: issues': 'https://github.com/aio-libs/aiohttp/issues',
'GitHub: repo': 'https://github.com/aio-libs/aiohttp',
},
license='Apache 2',
packages=['aiohttp'],
python_requires='>=3.5.3',
install_requires=install_requires,
tests_require=tests_require,
setup_requires=pytest_runner,
include_package_data=True,
ext_modules=extensions,
cmdclass=dict(build_ext=ve_build_ext),
)
try:
setup(**args)
except BuildFailed:
print("************************************************************")
print("Cannot compile C accelerator module, use pure python version")
print("************************************************************")
del args['ext_modules']
del args['cmdclass']
setup(**args)
|
from django.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
from edc_registration.models import RegisteredSubject
from .base_maternal_clinical_measurements import BaseMaternalClinicalMeasurements
class MaternalClinicalMeasurementsOne(BaseMaternalClinicalMeasurements):
height = models.DecimalField(
max_digits=5,
decimal_places=2,
| verbose_name="Mother's height? ",
validators=[MinValueValidator(134), MaxValueValidator(195), ],
help_text="Measured in Centimeters (cm)")
class Meta:
app_label = 'td_maternal'
verbose_name = 'Maternal Clinical Measurements One'
verbose_name_plural = 'Maternal Clinical Measurements One' | |
#!/usr/bin/env python3
import os
import sys
thispath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(os.path.dirname(thispath),"helper"))
from MiscFxns import *
from StandardModules import *
import pulsar_psi4
def ApplyBasis(syst,bsname,bslabel="primary"):
return psr.system.apply_single_basis(bslabel,bsname,syst)
def CompareEgy(EgyIn):
return abs(EgyIn+224.89287653924677)<0.00001
def CompareGrad(GradIn):
CorrectGrad=[
-0.000988976949000001, 0.0004443157829999993, 0.05238342271999999,
0.018237358511, -0.002547005771, -0.030731839919000005,
-0.02344281975, -0.0062568701740000005, -0.025360880303,
-0.015409293889000001, -0.047382578540999996, -0.012807191666999996,
0.016869055227000003, 0.024963490952999996, -0.017442968207000004,
0.007207092293000001, 0.025306999363999997, 0.023850402741000004,
0.019786523729999998, 0.04038960502300001, -0.028509120090000006,
-0.026869925129, -0.022975320699000004, 0.005627050168,
0.004610985953999999, -0.011942635934, 0.032991124551000006]
AllGood=True
for i in range(0,len(CorrectGrad)):
AllGood=AllGood and CorrectGrad[i]-GradIn[i]<0.00001
return AllGood
def Run(mm):
try:
tester = psr.testing.Tester("Testing Boys and Bernardi CP")
tester.print_header()
pulsar_psi4.pulsar_psi4_setup(mm)
LoadDefaultModules(mm)
mm.change_option("PSI4_SCF","BASIS_SET","sto-3g")
mm.change_option("PSR_CP","METHOD","PSI4_SCF")
mm.change_option("PSR_MBE","METHOD","PSI4_SCF")
mm.change_option("PSI4_SCF","PRINT",0)
mol=psr.system.make_system("""
0 1
O 1.2361419 1.0137761 -0.0612424
H 0.5104418 0.8944555 0.5514190
H 1.9926927 1.1973129 0.4956931
O -0.9957202 0.0160415 1.2422556
H -1.4542703 -0.5669741 1.8472817
H -0.9377950 -0.4 | 817912 0.4267562
O -0.2432343 -1.0198566 -1.1953808
H 0.4367536 -0.3759433 -0.9973297
H -0.5031835 -0.8251492 -2.0957959
""")
mol = ApplyBasis(mol,"sto-3g","sto-3g")
| wfn=psr.datastore.Wavefunction()
wfn.system=mol
MyMod=mm.get_module("PSR_CP",0)
NewWfn,Egy=MyMod.deriv(0,wfn)
tester.test("Testing CP Energy via Deriv(0)", True, CompareEgy, Egy[0])
NewWfn,Egy=MyMod.energy(wfn)
tester.test("Testing CP Energy via Energy()", True, CompareEgy, Egy)
NewWfn,Egy=MyMod.deriv(1,wfn)
tester.test("Testing CP Gradient via Deriv(1)", True, CompareGrad, Egy)
NewWfn,Egy=MyMod.gradient(wfn)
tester.test("Testing CP Gradient via Gradient()", True, CompareGrad, Egy)
tester.print_results()
except Exception as e:
psr.output.Output("Caught exception in main handler\n")
traceback.print_exc()
with psr.ModuleAdministrator() as mm:
Run(mm)
psr.finalize()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Lic | ense is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import Axon
from Kamaelia.Chassis.ConnectedServ | er import ServerCore
class RequestResponseComponent(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
for msg in self.Inbox("inbox"):
self.send(msg, "outbox")
self.pause()
yield 1
self.send(self.recv("control"), "signal")
ServerCore(protocol=RequestResponseComponent,
port=1599).run()
|
from models.sampler import DynamicBlockGibbsSampler
from models.distribution import DynamicBernoulli
from models.optimizer import DynamicSGD
from utils.utils import prepare_frames
from scipy import io as matio
from data.gwtaylor.path import *
import ipdb
import numpy as np
SIZE_BATCH = 10
EPOCHS = 100
SIZE_HIDDEN = 50
SIZE_VISIBLE = 150
# CRBM Constants
M_LAG_VISIBLE = 2
N_LAG_HIDDEN = 2
SIZE_LAG = max(M_LAG_VISIBLE, N_LAG_HIDDEN)+1
# load and prepare dataset from .mat
mat = matio.loadmat(MOCAP_SAMPLE)
dataset = mat['batchdatabinary']
# generate batches
batch_idx_list = prepare_frames(len(dataset), SIZE_LAG, SIZE_BATCH)
# load distribution
bernoulli = DynamicBernoulli(SIZE_VISIBLE, SIZE_HIDDEN, m_lag_visible=M_LAG_VISIBLE, n_lag_hidden=N_LAG_HIDDEN)
gibbs_sampler = DynamicBlockGibbsSampler(bernoulli, sampling_steps=1)
sgd = DynamicSGD(bernoulli)
for epoch in range(EPOCHS):
error = 0.0
for chunk_idx_list in batch_idx_list:
# get batch data set
data = np.zeros(shape=(SIZE_BATCH, SIZE_VISIBLE, SIZE_LAG))
for idx, (start, end) in enumerate(chunk_idx_list):
data[idx, :, :] = dataset[start:end, :].T
hidden_0_probs, hidden_0_states, \
hidden_k_probs, hidden_k_states, \
visible_k_probs, visible_k_states = gibbs_sampler.sample(data[:, :, 0], data[:, :, 1:])
# compute deltas
d_weight_update, d_bias_hidden_update, \
d_bias_visible_update, d_vis_vis, d_vis_hid = sgd.optimize(data[:, :, 0], hidden_0_states, hidden_0_probs, hidden_k_probs,
hidd | en_k_states, visible_k_probs, visible_k_states, data[:, :, 1:])
# update model values
bernoulli.weights += d_weight_update
bernoulli.bias_hidden += d_bias_hidden_update
bernoulli.bias_visible += d_bias_visible_update
bernoulli.vis_vis_weights += d_vis_vis
bernoulli.vis_hid_weights += d_vis_hid
# compute reconstruction error
_, _, \
| _, _, \
_, visible_k_states = gibbs_sampler.sample(data[:, :, 0], data[:, :, 1:])
error += np.mean(np.abs(visible_k_states - data[:, :, 0]))
error = 1./len(batch_idx_list) * error;
print error
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEA | SE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/deed/event_perk/shared_lambda_shuttle_static_deed.iff"
result.attribute_template_id = 2
result.stfName("event_perk","lambda_shuttle_static_deed_name")
#### BEGIN MODIFICATIONS ####
#### END MO | DIFICATIONS ####
return result |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from unittest import mock
from configman.dotdict import DotDict
from socorro.lib.task_manager import TaskManager, default_task_func
class TestTaskManager:
def test_constuctor1(self):
config = DotDict()
config.quit_on_empty_queue = False
tm = TaskManager(config)
assert tm.config == config
assert tm.task_func == default_task_func
assert tm.quit is False
def test_get_iterator(self):
config = DotDict()
config.quit_on_empty_queue = False
tm = TaskManager(config, job_source_iterator=range(1))
assert list(tm._get_iterator()) == [0]
def an_iter(self):
yield from range(5)
tm = TaskManager(config, job_source_iterator=an_iter)
assert list(tm._get_iterator()) == [0, 1, 2, 3, 4]
class X:
def __init__(self, config):
self.config = config
def __iter__(self):
yield from self.config
tm = TaskManager(config, job_source_iterator=X(config))
assert list(tm._get_iterator()) == list(config.keys())
def test_blocking_start(self):
config = DotDict()
config.idle_delay = 1
config.quit_on_empty_queue = False
class MyTaskManager(TaskManager):
def _responsive | _sleep(self, | seconds, wait_log_interval=0, wait_reason=""):
try:
if self.count >= 2:
raise KeyboardInterrupt
self.count += 1
except AttributeError:
self.count = 0
tm = MyTaskManager(config, task_func=mock.Mock())
waiting_func = mock.Mock()
tm.blocking_start(waiting_func=waiting_func)
assert tm.task_func.call_count == 10
assert waiting_func.call_count == 0
def test_blocking_start_with_quit_on_empty(self):
config = DotDict()
config.idle_delay = 1
config.quit_on_empty_queue = True
tm = TaskManager(config, task_func=mock.Mock())
waiting_func = mock.Mock()
tm.blocking_start(waiting_func=waiting_func)
assert tm.task_func.call_count == 10
assert waiting_func.call_count == 0
|
'pmap_index', 0)
self.fmap_index = self._get_metadata('fmap_index', 0)
self.lmap_index = self._get_metadata('lmap_index', 0)
self.omap_index = self._get_metadata('omap_index', 0)
self.rmap_index = self._get_metadata('rmap_index', 0)
self.nmap_index = self._get_metadata('nmap_index', 0)
self.db_is_open = True
def _close(self):
"""
Close database backend.
"""
raise NotImplementedError
def close(self, update=True, user=None):
"""
Close the database.
if update is False, don't change access times, etc.
"""
if self._directory != ":memory:":
if update and not self.readonly:
# This is just a dummy file to indicate last modified time of
# the database for gramps.cli.clidbman:
filename = os.path.join(self._directory, "meta_data.db")
touch(filename)
# Save metadata
self._set_metadata('name_formats', self.name_formats)
self._set_metadata('researcher', self.owner)
# Bookmarks
self._set_metadata('bookmarks', self.bookmarks.get())
self._set_metadata('family_bookmarks',
self.family_bookmarks.get())
self._set_metadata('event_bookmarks', self.event_bookmarks.get())
self._set_metadata('place_bookmarks', self.place_bookmarks.get())
self._set_metadata('repo_bookmarks', self.repo_bookmarks.get())
self._set_metadata('source_bookmarks',
self.source_bookmarks.get())
self._set_metadata('citation_bookmarks',
self.citation_bookmarks.get())
self._set_metadata('media_bookmarks', self.media_bookmarks.get())
self._set_metadata('note_bookmarks', self.note_bookmarks.get())
# Custom type values, sets
self._set_metadata('event_names', self.event_names)
self._set_metadata('fattr_names', self.family_attributes)
self._set_metadata('pattr_names', self.individual_attributes)
self._set_metadata('sattr_names', self.source_attributes)
self._set_metadata('marker_names', self.marker_names)
self._set_metadata('child_refs', self.child_ref_types)
self._set_metadata('family_rels', self.family_rel_types)
self._set_metadata('event_roles', self.event_role_names)
self._set_metadata('name_types', self.name_types)
self._set_metadata('origin_types', self.origin_types)
self._set_metadata('repo_types', self.repository_types)
self._set_metadata('note_types', self.note_types)
self._set_metadata('sm_types', self.source_media_types)
self._set_metadata('url_types', self.url_types)
self._set_metadata('mattr_names', self.media_attributes)
self._set_metadata('eattr_names', self.event_attributes)
self._set_metadata('place_types', self.place_types)
# Save misc items:
if self.has_changed:
self.save_gender_stats(self.genderStats)
# Indexes:
self._set_metadata('cmap_index', self.cmap_index)
self._set_metadata('smap_index', self.smap_index)
self._set_metadata('emap_index', self.emap_index)
self._set_metadata('pmap_index', self.pmap_index)
self._set_metadata('fmap_index', self.fmap_index)
self._set_metadata('lmap_index', self.lmap_index)
self._set_metadata('omap_index', self.omap_index)
self._set_metadata('rmap_index', self.rmap_index)
self._set_metadata('nmap_index', self.nmap_index)
self._close()
try:
clear_lock_file(self.get_save_path())
except IOError:
pass
self.db_is_open = False
self._directory = None
def is_open(self):
return self.db_is_open
def get_dbid(self):
"""
We use the file directory name as the unique ID for
this database on this computer.
"""
return self.brief_name
def get_dbname(self):
"""
In DbGeneric, the database is in a text file at the path
"""
name = None
if self._directory:
filepath = os.path.join(self._directory, "name.txt")
try:
with open(filepath, "r") as name_file:
| name = name_file.readline().strip()
except (OSError, IOError) as msg:
LOG.error(str(msg))
return name
def version_supported(self):
| """Return True when the file has a supported version."""
return True
def _get_table_func(self, table=None, func=None):
"""
Private implementation of get_table_func.
"""
if table is None:
return list(self.__tables.keys())
elif func is None:
return self.__tables[table] # dict of functions
elif func in self.__tables[table].keys():
return self.__tables[table][func]
else:
return None
def _txn_begin(self):
"""
Lowlevel interface to the backend transaction.
Executes a db BEGIN;
"""
pass
def _txn_commit(self):
"""
Lowlevel interface to the backend transaction.
Executes a db END;
"""
pass
def _txn_abort(self):
"""
Lowlevel interface to the backend transaction.
Executes a db ROLLBACK;
"""
pass
def transaction_begin(self, transaction):
"""
Transactions are handled automatically by the db layer.
"""
self.transaction = transaction
return transaction
def _get_metadata(self, key, default=[]):
"""
Get an item from the database.
Default is an empty list, which is a mutable and
thus a bad default (pylint will complain).
However, it is just used as a value, and not altered, so
its use here is ok.
"""
raise NotImplementedError
def _set_metadata(self, key, value):
"""
key: string
value: item, will be serialized here
"""
raise NotImplementedError
################################################################
#
# set_*_id_prefix methods
#
################################################################
@staticmethod
def _validated_id_prefix(val, default):
if isinstance(val, str) and val:
try:
str_ = val % 1
except TypeError: # missing conversion specifier
prefix_var = val + "%d"
except ValueError: # incomplete format
prefix_var = default+"%04d"
else:
prefix_var = val # OK as given
else:
prefix_var = default+"%04d" # not a string or empty string
return prefix_var
@staticmethod
def __id2user_format(id_pattern):
"""
Return a method that accepts a Gramps ID and adjusts it to the users
format.
"""
pattern_match = re.match(r"(.*)%[0 ](\d+)[diu]$", id_pattern)
if pattern_match:
str_prefix = pattern_match.group(1)
#nr_width = int(pattern_match.group(2))
def closure_func(gramps_id):
if gramps_id and gramps_id.startswith(str_prefix):
id_number = gramps_id[len(str_prefix):]
if id_number.isdigit():
id_value = int(id_number, 10)
#if len(str(id_value)) > nr_width:
# # The ID to be imported is too large to fit in the
# # users format. For now just create a new ID,
|
from djblets.cache.backend import cache_memoize
class BugTracker(object):
"""An interface to a bug tracker.
BugTracker subclasses are used to enable interaction with different
bug trackers.
"""
def get_bug_info(self, repository, bug_id):
"""Get the information for the specified bug.
This should return a dictionary with 'summary', 'description', and
'status' keys.
This is cached for 60 seconds to reduce the number of queries to the
bug trackers and make things seem fast after the first infobox load,
but is still a short enough time to give relatively fresh data.
"""
return cache_memoize(self.make_bug_cache_key(repository, bug_id),
lambda: self.get_bug_info_uncached(repository,
bug_id),
expiration=60)
def get_bug_info_uncached(self, repository, bug_id):
"""Get the information for the specified bug (implementation).
This should be implemented by subclasses, and should return a
dictionary with 'summary', 'description', and 'status' keys.
If any of those are unsupported by the given bug tracker, the unknown
| values should be gi | ven as an empty string.
"""
return {
'summary': '',
'description': '',
'status': '',
}
def make_bug_cache_key(self, repository, bug_id):
"""Returns a key to use when caching fetched bug information."""
return 'repository-%s-bug-%s' % (repository.pk, bug_id)
|
'''
Task Coach - Your friendly task manager
Copyright (C) 2004-2013 Task Coach developers <developers@taskcoach.org>
Task Coach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Task Coach is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from notebook import Notebook, BookPage
from frame import AuiManagedFrameWithDynamicCenterPane
from dialog import Dialog, NotebookDialog, HTMLDialog, Attac | hmentSelector
from itemctrl import Column
from listctrl import VirtualListCtrl
from checklistbox import CheckListBox
from treectrl import CheckTreeCtrl, TreeListCtrl
from squaremap import SquareMap
from timeline import Timeline
from datectrl import DateTimeCtrl, TimeEntry
from textctrl import SingleLineTextCtrl, MultiLineTextCtrl, StaticTextWithToolTip
from panel import PanelWithBoxSizer, BoxWithFlexGridSizer, BoxWithBoxSizer
from searchctrl import SearchCtrl
from spinctrl im | port SpinCtrl
from tooltip import ToolTipMixin, SimpleToolTip
from dirchooser import DirectoryChooser
from fontpicker import FontPickerCtrl
from syncmlwarning import SyncMLWarningDialog
from calendarwidget import Calendar
from calendarconfig import CalendarConfigDialog
from password import GetPassword
import masked
from wx.lib import sized_controls
|
#!/usr/bin/env python
"""simple thread pool
@author: dn13(dn13@gmail.com)
@author: Fibrizof(dfang84@gmail.com)
"""
import threading
import Queue
import new
def WorkerPoolError( Exception ):
pass
class Task(threading.Thread):
def __init__(self, queue, result_queue):
threading.Thread.__init__(self)
self.queue = queue
self.result_queue = result_queue
self.running = True
def cancel(self):
self.running = False
self.queue.put(None)
def run(self):
| while self.running:
call = self.queue.get()
if call:
try:
reslut = call()
| self.result_queue.put(reslut)
except:
pass
self.queue.task_done()
class WorkerPool( object ):
def __init__( self, threadnum ):
self.threadnum = threadnum
self.q = Queue.Queue()
self.result_q = Queue.Queue()
self.ts = [ Task(self.q, self.result_q) for i in range(threadnum) ]
self._registfunctions = {}
self.is_in_join = False
for t in self.ts :
t.setDaemon(True)
t.start()
def __del__(self):
try:
# 调用两次的意义在于, 第一次将所有线程的running置成false, 在让他们发一次queue的信号
# 偷懒没有写成两个接口
for t in self.ts:
t.cancel()
for t in self.ts:
t.cancel()
except:
pass
def __call__( self, work ):
if not self.is_in_join:
self.q.put( work )
else:
raise WorkerPoolError, 'Pool has been joined'
def join( self ):
self.is_in_join = True
self.q.join()
self.is_in_join = False
return
def runwithpool( self, _old ):
def _new( *args, **kwargs ):
self.q.put( lambda : _old( *args, **kwargs ) )
return _new
def registtopool( self, _old ):
if _old.__name__ in self._registfunctions :
raise WorkerPoolError, 'function name exists'
self._registfunctions[_old.__name__] = _old
return _old
def get_all_result(self):
result_list = []
while True:
try:
result_list.append(self.result_q.get_nowait())
except Exception as e:
if 0 == self.result_q.qsize():
break
else:
continue
return result_list
def __getattr__( self, name ):
if name in self._registfunctions :
return self._registfunctions[name]
raise AttributeError, '%s not found' % name
if __name__ == '__main__' :
import thread
p = WorkerPool(5)
@p.runwithpool
def foo( a ):
print 'foo>', thread.get_ident(), '>', a
return
@p.registtopool
def bar( b ):
print 'bar>', thread.get_ident(), '>', b
for i in range(10):
foo(i)
p.bar(i+100)
p( lambda : bar(200) )
p.join()
|
from ..utils.command import BaseCommand
from ..utils.tabulate import tabulate
from ..utils.info import get_packages, Sources
class Colors:
PURPLE = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
UNDERLINE = '\033[4m'
ENDC = '\033[0m'
class Command(BaseCommand):
name = "outdated"
@classmet | hod
def run(cls, args):
cls._run()
@classmethod
def _run(cls):
packages = get_packages((Sources.required, Sources.installed))
packages = list(filter(lambda p: p.wanted_rule, packages)) # filter out ones with no wanted version (not in package.json)
packages_to_display = []
fo | r package in packages:
package.get_wanted_version()
if not package.version or (
package.version != package.latest_version or
package.version != package.wanted_version):
packages_to_display.append(package)
cls.display_outdated(packages_to_display)
@staticmethod
def display_outdated(packages):
if len(packages) == 0:
return
headings = ["package", "current", "wanted", "latest"]
headings = list(map(lambda heading: Colors.UNDERLINE+heading+Colors.ENDC, headings))
table = []
packages = sorted(packages, key=lambda package: package.name)
for package in packages:
table.append([
Colors.OKGREEN+package.name+Colors.ENDC,
package.version or "n/a",
Colors.OKGREEN+(package.wanted_version or "n/a")+Colors.ENDC,
Colors.PURPLE+(package.latest_version or "n/a")+Colors.ENDC
])
print(tabulate(table, headings, tablefmt="plain"))
|
ist(self):
"""
Tests --list output of migrate command
"""
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("migrate", list=True, stdout=out, verbosity=0, no_color=False)
self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m'
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = six.StringIO()
# Giving the explicit app_label tests for selective `show_migration_list` in the command
call_command("migrate", "migrations", list=True, stdout=out, verbosity=0, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_showmigrations_list(self):
"""
Tests --list output of showmigrations command
"""
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False)
self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m'
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = six.StringIO()
# Giving the explicit app_label tests for selective `show_list` in the command
call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"})
def test_showmigrations_plan(self):
"""
Tests --plan output of showmigrations command
"""
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertIn(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third\n"
"[ ] migrations.0002_second",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertIn(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial)",
out.getvalue().lower()
)
call_command("migrate", "migrations", "0003", verbosity=0)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertIn(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third\n"
"[ ] migrations.0002_second",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertIn(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial)",
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"})
def test_showmigrations_plan_no_migrations(self):
"""
Tests --plan output of showmigrations command without migrations
"""
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual("", out.getvalue().lower())
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual("", out.getvalue().lower())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"})
def test_showmigrations_plan_squashed(self):
"""
Tests --plan output of showmigrations command with squashed migrations.
"""
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto\n"
"[ ] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto ... (migrations.1_auto)\n"
"[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower()
)
call_command("migrate", "migrations", "3_squashed_5", verbosity=0)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto\n"
"[x] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto ... (migrations.1_auto)\n"
"[x] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower()
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate(self):
"""
Makes sure that sqlmigrate does something.
"""
# Make sure the output is wrapped in a transaction
out = six.StringIO()
call_command("sqlmigrate" | , "migrations", "0001", stdout=out)
output = out.getvalue()
self.assertIn(connection.ops.start_tra | nsaction_sql(), output)
self.assertIn(connection.ops.end_transaction_sql(), output)
# Test forwards. All the databases agree on CREATE TABLE, at least.
out = six.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
self.assertIn("create table", out.getvalue().lower())
# Cannot generate the reverse SQL unless we've applied the migration.
call_command("migrate", "migrations", verbosity=0)
# And backwards is a DROP TABLE
out = six.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True)
self.assertIn("drop table", out.getvalue().lower())
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.unmigrated_app"])
def test_regression_22823_unmigrated_fk_to_migrated_model(self):
"""
https://code.djangoproject.com/ticket/22823
Assuming you have 3 apps, `A`, `B`, and |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from opus_core.variables.variable import Variable
from variable_functions import my_attribute_label
class home_grid_id(Variable):
'''The grid_id of a person's residence.'''
def dependencies(self):
return [my_attribute_label('household_id'),
'psrc.household.grid_id']
def compute(self, dataset_pool):
households = dataset_pool.get_dataset('household')
return self.get_dataset().get_join_data(households, name='grid_id')
from opus_core.tests import opus_unittest
from urbansim.variable_test_toolbox import VariableTestToolbox
from numpy import array
from numpy import ma
from psrc.datasets.person_dataset import PersonDataset
from opus_core.storage_factory import StorageFactory
class Tests(opus_unittest.OpusTestCase):
variable_name = 'psrc.person.home_grid_id'
def test_my_inputs(self):
storage = StorageFactory().get_storage('dict_storage')
persons_table_name = 'persons'
storage.write_table(
table_name=persons_table_name,
table_data={
'person_id':array([1, 2, 3, 4, 5]),
'household_id':array([1, 1, 3, 3, 3]),
'member_id':array([1,2,1,2,3])
},
)
persons = PersonDataset(in_storage=storage, in_table_name=persons_table_name)
values = VariableTestToolbox().compute_variable(self.variable_name,
data_dictionary = {
'household':{
'household_id':array([1,2,3]),
'grid_id':array([9, 9, 7])
| },
'person':persons
},
dataset = 'person'
)
should_be = array([9, 9, 7, 7, 7])
self.assert_ | (ma.allclose(values, should_be, rtol=1e-7),
'Error in ' + self.variable_name)
if __name__=='__main__':
opus_unittest.main() |
_ in content_view.repository])
content_view.publish()
cvv = content_view.read().version[0]
self.assertEqual(len(cvv.read().environment), 1)
for i in range(1, randint(3, 6)):
lce = entities.LifecycleEnvironment(organization=self.org).create()
promote(cvv, lce.id)
self.assertEqual(len(cvv.read().environment), i+1)
def test_promote_docker_repo_composite_content_view(self):
| """@Test: Add Docker-type repository to content view and publish it.
Then add that content view to composite one. Publish and promote that
c | omposite content view to the next available lifecycle-environment.
@Assert: Docker-type repository is promoted to content view found in
the specific lifecycle-environment.
@Feature: Docker
"""
lce = entities.LifecycleEnvironment(organization=self.org).create()
repo = _create_repository(
entities.Product(organization=self.org).create())
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertEqual(
[repo.id], [repo_.id for repo_ in content_view.repository])
content_view.publish()
cvv = content_view.read().version[0].read()
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = [cvv]
comp_content_view = comp_content_view.update(['component'])
self.assertEqual(cvv.id, comp_content_view.component[0].id)
comp_content_view.publish()
comp_cvv = comp_content_view.read().version[0]
self.assertEqual(len(comp_cvv.read().environment), 1)
promote(comp_cvv, lce.id)
self.assertEqual(len(comp_cvv.read().environment), 2)
def test_promote_multiple_docker_repo_composite_content_view(self):
"""@Test: Add Docker-type repository to content view and publish it.
Then add that content view to composite one. Publish and promote that
composite content view to the multiple available lifecycle-environments
@Assert: Docker-type repository is promoted to content view found in
the specific lifecycle-environments.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertEqual(
[repo.id], [repo_.id for repo_ in content_view.repository])
content_view.publish()
cvv = content_view.read().version[0].read()
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = [cvv]
comp_content_view = comp_content_view.update(['component'])
self.assertEqual(cvv.id, comp_content_view.component[0].id)
comp_content_view.publish()
comp_cvv = comp_content_view.read().version[0]
self.assertEqual(len(comp_cvv.read().environment), 1)
for i in range(1, randint(3, 6)):
lce = entities.LifecycleEnvironment(organization=self.org).create()
promote(comp_cvv, lce.id)
self.assertEqual(len(comp_cvv.read().environment), i+1)
@run_only_on('sat')
class DockerActivationKeyTestCase(APITestCase):
"""Tests specific to adding ``Docker`` repositories to Activation Keys."""
@classmethod
def setUpClass(cls):
"""Create necessary objects which can be re-used in tests."""
super(DockerActivationKeyTestCase, cls).setUpClass()
cls.org = entities.Organization().create()
cls.lce = entities.LifecycleEnvironment(organization=cls.org).create()
cls.repo = _create_repository(
entities.Product(organization=cls.org).create())
content_view = entities.ContentView(
composite=False,
organization=cls.org,
).create()
content_view.repository = [cls.repo]
cls.content_view = content_view.update(['repository'])
cls.content_view.publish()
cls.cvv = content_view.read().version[0].read()
promote(cls.cvv, cls.lce.id)
def test_add_docker_repo_to_activation_key(self):
"""@Test: Add Docker-type repository to a non-composite content view
and publish it. Then create an activation key and associate it with the
Docker content view.
@Assert: Docker-based content view can be added to activation key
@Feature: Docker
"""
ak = entities.ActivationKey(
content_view=self.content_view,
environment=self.lce,
organization=self.org,
).create()
self.assertEqual(ak.content_view.id, self.content_view.id)
self.assertEqual(ak.content_view.read().repository[0].id, self.repo.id)
def test_remove_docker_repo_to_activation_key(self):
"""@Test: Add Docker-type repository to a non-composite content view
and publish it. Create an activation key and associate it with the
Docker content view. Then remove this content view from the activation
key.
@Assert: Docker-based content view can be added and then removed from
the activation key.
@Feature: Docker
"""
ak = entities.ActivationKey(
content_view=self.content_view,
environment=self.lce,
organization=self.org,
).create()
self.assertEqual(ak.content_view.id, self.content_view.id)
ak.content_view = None
self.assertIsNone(ak.update(['content_view']).content_view)
def test_add_docker_repo_composite_view_to_activation_key(self):
"""@Test:Add Docker-type repository to a non-composite content view and
publish it. Then add this content view to a composite content view and
publish it. Create an activation key and associate it with the
composite Docker content view.
@Assert: Docker-based content view can be added to activation key
@Feature: Docker
"""
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = [self.cvv]
comp_content_view = comp_content_view.update(['component'])
self.assertEqual(self.cvv.id, comp_content_view.component[0].id)
comp_content_view.publish()
comp_cvv = comp_content_view.read().version[0].read()
promote(comp_cvv, self.lce.id)
ak = entities.ActivationKey(
content_view=comp_content_view,
environment=self.lce,
organization=self.org,
).create()
self.assertEqual(ak.content_view.id, comp_content_view.id)
def test_remove_docker_repo_composite_view_to_activation_key(self):
"""@Test: Add Docker-type repository to a non-composite content view
and publish it. Then add this content view to a composite content view
and publish it. Create an activation key and associate it with the
composite Docker content view. Then, remove the composite content view
from the activation key.
@Assert: Docker-based composite content view can be added and then
removed from the activation key.
@Feature: Docker
"""
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = [self.cvv]
comp_content_view = comp_content_view.update(['component'])
self.assertEqual(self.cvv.id, comp_content_view.component[0].id)
comp_content_view.publish()
comp_cvv = comp_content_view.read().version[0].read()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
|
from yali.storage import StorageError
class LibraryError(StorageError) | :
pass
|
"""
Interrogate stick for supported capabilities.
"""
import sys
from codinghyde.ant import driver
from codinghyde.ant import node
from config import *
# Initialize
stick = driv | er.USB1Driver(SERIAL, debug=DEBUG)
antnode = node.Node(stick)
antnode.start()
# Interrogate stick
# Note: This method will return immediately, as the stick's capabilities are
# interrogated on node initialization (node.start()) in order to set proper
# internal Node instance state.
capabilities = antnode.getCapabilities()
print 'Maximum channels:', capabilities['max_channels']
print 'Maximum network keys:', capabilities['max_net_keys']
print 'Standard options: %X' % capa | bilities['std_options']
print 'Advanced options: %X' % capabilities['adv_options']
# Shutdown
antnode.stop()
|
("realm")
.filter(
delivery_email__iexact=email.strip(),
is_active=True,
realm__deactivated=False,
is_bot=False,
)
.order_by("date_joined")
)
accounts: List[Accounts] = []
for profile in profiles:
accounts.append(
dict(
realm_name=profile.realm.name,
realm_id=profile.realm.id,
full_name=profile.full_name,
avatar=avatar_url(profile),
)
)
return accounts
def get_api_key(user_profile: UserProfile) -> str:
return user_profile.api_key
def get_all_api_keys(user_profile: UserProfile) -> List[str]:
# Users can only have one API key for now
return [user_profile.api_key]
def validate_user_custom_profile_field(
realm_id: int, field: CustomProfileField, value: Union[int, str, List[int]]
) -> Union[int, str, List[int]]:
validators = CustomProfileField.FIELD_VALIDATORS
field_type = field.field_type
var_name = f"{field.name}"
if field_type in validators:
validator = validators[field_type]
return validator(var_name, value)
elif field_type == CustomProfileField.SELECT:
choice_field_validator = CustomProfileField.SELECT_FIELD_VALIDATORS[field_type]
field_data = field.field_data
# Put an assertion so that mypy doesn't complain.
assert field_data is not None
return choice_field_validator(var_name, field_data, value)
elif field_type == CustomProfileField.USER:
user_field_validator = CustomProfileField.USER_FIELD_VALIDATORS[field_type]
return user_field_validator(realm_id, value, False)
else:
raise AssertionError("Invalid field type")
def validate_user_custom_profile_data(
realm_id: int, profile_data: List[Dict[str, Union[int, str, List[int]]]]
) -> None:
# This function validate all custom field values according to their field type.
for item in profile_data:
field_id = item["id"]
try:
field = CustomProfileField.objects.get(id=field_id)
except CustomProfileField.DoesNotExist:
raise JsonableError(_("Field id {id} not found.").format(id=field_id))
try:
validate_user_custom_profile_field(realm_id, field, item["value"])
except ValidationError as error:
raise JsonableError(error.message)
def can_access_delivery_email(user_profile: UserProfile) -> bool:
realm = user_profile.realm
if realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS:
return user_profile.is_realm_admin
if realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_MODERATORS:
return user_profile.is_realm_admin or user_profile.is_moderator
return False
def format_user_row(
realm: Realm,
acting_user: Optional[UserProfile],
row: Dict[str, Any],
client_gravatar: bool,
user_avatar_url_field_optional: bool,
custom_profile_field_data: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
"""Formats a user row returned by a database fetch using
.values(*realm_user_dict_fields) into a dictionary representation
of that user for API delivery to clients. The acting_user
argument is used for permissions checks.
"""
is_admin = is_administrator_role(row["role"])
is_owner = row["role"] == UserProfile.ROLE_REALM_OWNER
is_guest = row["role"] == UserProfile.ROLE_GUEST
is_bot = row["is_bot"]
result = dict(
email=row["email"],
user_id=row["id"],
avatar_version=row["avatar_version"],
is_admin=is_admin,
is_owner=is_owner,
is_guest=is_guest,
is_billing_admin=row["is_billing_admin"],
role=row["role"],
is_bot=is_bot,
full_name=row["full_name"],
timezone=canonicalize_timezone(row["timezone"]),
is_active=row["is_active"],
date_joined=row["date_joined"].isoformat(),
)
# Zulip clients that support using `GET /avatar/{user_id}` as a
# fallback if we didn't send an avatar URL in the user object pass
# user_avatar_url_field_optional in client_capabilities.
#
# This is a major network performance optimization for
# organizations with 10,000s of users where we would otherwise
# send avatar URLs in the payload (either because most users have
# uploaded avatars or because EMAIL_ADDRESS_VISIBILITY_ADMINS
# prevents the older client_gravatar optimization from helping).
# The performance impact is large largely because the hashes in
# avatar URLs structurally cannot compress well.
#
# The user_avatar_url_field_optional gives the server sole
# discretion in deciding for which users we want to send the
# avatar URL (Which saves clients an RTT at the cost of some
# bandwidth). At present, the server looks at `long_term_idle` to
# decide which users to include avatars for, piggy-backing on a
# different optimization for organizations with 10,000s of users.
include_avatar_url = not user_avatar_url_field_optional or not row["long_term_idle"]
if include_avatar_url:
result["avatar_url"] = get_avatar_field(
user_id=row["id"],
realm_id=realm.id,
email=row["delivery_email"],
avatar_source=row["avatar_source"],
avatar_version=row["avatar_version"],
medium=False,
client_gravatar=client_gravatar,
)
if acting_user is not None and can_access_delivery_email(acting_user):
result["delivery_email"] = row["delivery_email"]
if is_bot:
result["bot_type"] = row["bot_type"]
if row["email"] in settings.CROSS_REALM_BOT_EMAILS:
result["is_cross_realm_bot"] | = True
# Note that bot_owner_id can be None with legacy data.
result["bot_owner_id"] = row["bot_owner_id"]
elif custom_profile_field_data is not None:
result["profile_data"] = custom_profile_field_data
return result
def user_profile_to_user_row(user_profile: UserProfile) -> Dict[str, Any]:
# What we're trying to do is simula | te the user_profile having been
# fetched from a QuerySet using `.values(*realm_user_dict_fields)`
# even though we fetched UserProfile objects. This is messier
# than it seems.
#
# What we'd like to do is just call model_to_dict(user,
# fields=realm_user_dict_fields). The problem with this is
# that model_to_dict has a different convention than
# `.values()` in its handling of foreign keys, naming them as
# e.g. `bot_owner`, not `bot_owner_id`; we work around that
# here.
#
# This could be potentially simplified in the future by
# changing realm_user_dict_fields to name the bot owner with
# the less readable `bot_owner` (instead of `bot_owner_id`).
user_row = model_to_dict(user_profile, fields=[*realm_user_dict_fields, "bot_owner"])
user_row["bot_owner_id"] = user_row["bot_owner"]
del user_row["bot_owner"]
return user_row
def get_cross_realm_dicts() -> List[Dict[str, Any]]:
users = bulk_get_users(
list(settings.CROSS_REALM_BOT_EMAILS),
None,
base_query=UserProfile.objects.filter(realm__string_id=settings.SYSTEM_BOT_REALM),
).values()
result = []
for user in users:
# Important: We filter here, is addition to in
# `base_query`, because of how bulk_get_users shares its
# cache with other UserProfile caches.
if user.realm.string_id != settings.SYSTEM_BOT_REALM: # nocoverage
continue
user_row = user_profile_to_user_row(user)
# Because we want to avoid clients becing exposed to the
# implementation detail that these bots are self-owned, we
# just set bot_owner_id=None.
user_row["bot_owner_id"] = None
result.append(
format_user_row(
user.realm,
acting_user=user,
row=user_row,
client_gravatar=False,
user_avatar_url_field_optional=False,
custom_profile_field_data=No |
# -*- coding: utf-8 -*-
"""Installer script for Pywikibot 2.0 framework."""
#
# (C) Pywikibot team, 2009-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
import itertools
import os
import sys
PYTHON_VERSION = sys.version_info[:3]
PY2 = (PYTHON_VERSION[0] == 2)
PY26 = (PYTHON_VERSION < (2, 7))
versions_required_message = """
Pywikibot not available on:
%s
Pywikibot is only supported under Python 2.6.5+, 2.7.2+ or 3.3+
"""
def python_is_supported():
"""Check that Python is supported."""
# Any change to this must be copied to pwb.py
return (PYTHON_VERSION >= (3, 3, 0) or
(PY2 and PYTHON_VERSION >= (2, 7, 2)) or
(PY26 and PYTHON_VERSION >= (2, 6, 5)))
if not python_is_supported():
raise RuntimeError(versions_required_message % sys.version)
test_deps = []
dependencies = ['requests']
# the irc module has no Python 2.6 support since 10.0
irc_dep = 'irc==8.9' if sys.version_info < (2, 7) else 'irc'
extra_deps = {
# Core library dependencies
'isbn': ['python-stdnum'],
'Graphviz': ['pydot>=1.0.28'],
'Google': ['google>=1.7'],
'IRC': [irc_dep],
'mwparserfromhell': ['mwparserfromhell>=0.3.3'],
'Tkinter': ['Pillow'],
# 0.6.1 supports socket.io 1.0, but WMF is using 0.9 (T91393 and T85716)
'rcstream': ['socketIO-client<0.6.1'],
'security': ['requests[security]'],
'mwoauth': ['mwoa | uth>=0.2.4'],
'html': ['BeautifulSoup4'],
}
if PY2:
# Additional core library dependencies which are only available on | Python 2
extra_deps.update({
'csv': ['unicodecsv'],
'MySQL': ['oursql'],
'unicode7': ['unicodedata2>=7.0.0-2'],
})
script_deps = {
'flickrripper.py': ['Pillow'],
'states_redirect.py': ['pycountry'],
'weblinkchecker.py': ['memento_client>=0.5.1'],
}
# flickrapi 1.4.4 installs a root logger in verbose mode; 1.4.5 fixes this.
# The problem doesnt exist in flickrapi 2.x.
# pywikibot accepts flickrapi 1.4.5+ on Python 2, as it has been stable for a
# long time, and only depends on python-requests 1.x, whereas flickrapi 2.x
# depends on python-requests 2.x, which is first packaged in Ubuntu 14.04
# and will be first packaged for Fedora Core 21.
# flickrapi 1.4.x does not run on Python 3, and setuptools can only
# select flickrapi 2.x for Python 3 installs.
script_deps['flickrripper.py'].append(
'flickrapi>=1.4.5,<2' if PY26 else 'flickrapi')
# lunatic-python is only available for Linux
if sys.platform.startswith('linux'):
script_deps['script_wui.py'] = [irc_dep, 'lunatic-python', 'crontab']
# The main pywin32 repository contains a Python 2 only setup.py with a small
# wrapper setup3.py for Python 3.
# http://pywin32.hg.sourceforge.net:8000/hgroot/pywin32/pywin32
# The main pywinauto repository doesnt support Python 3.
# The repositories used below have a Python 3 compliant setup.py
dependency_links = [
'git+https://github.com/AlereDevices/lunatic-python.git#egg=lunatic-python',
'hg+https://bitbucket.org/TJG/pywin32#egg=pywin32',
'git+https://github.com/vasily-v-ryabov/pywinauto-64#egg=pywinauto',
'git+https://github.com/nlhepler/pydot#egg=pydot-1.0.29',
]
if PYTHON_VERSION < (2, 7, 3):
# work around distutils hardcoded unittest dependency
# work around T106512
import unittest # noqa
if 'test' in sys.argv:
import unittest2
sys.modules['unittest'] = unittest2
if sys.version_info[0] == 2:
if PY26:
# requests security extra includes pyOpenSSL. cryptography is the
# dependency of pyOpenSSL. 0.8.2 is the newest and compatible version
# for Python 2.6, which won't raise unexpected DeprecationWarning.
extra_deps['security'].append('cryptography<=0.8.2')
script_deps['replicate_wiki.py'] = ['argparse']
dependencies.append('future>=0.15.0') # provides collections backports
dependencies += extra_deps['unicode7'] # T102461 workaround
# tools.ip does not have a hard dependency on an IP address module,
# as it falls back to using regexes if one is not available.
# The functional backport of py3 ipaddress is acceptable:
# https://pypi.python.org/pypi/ipaddress
# However the Debian package python-ipaddr is also supported:
# https://pypi.python.org/pypi/ipaddr
# Other backports are likely broken.
# ipaddr 2.1.10+ is distributed with Debian and Fedora. See T105443.
dependencies.append('ipaddr>=2.1.10')
if sys.version_info < (2, 7, 9):
# Python versions before 2.7.9 will cause urllib3 to trigger
# InsecurePlatformWarning warnings for all HTTPS requests. By
# installing with security extras, requests will automatically set
# them up and the warnings will stop. See
# <https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning>
# for more details.
dependencies += extra_deps['security']
script_deps['data_ingestion.py'] = extra_deps['csv']
# mwlib is not available for py3
script_deps['patrol'] = ['mwlib']
# Some of the ui_tests depend on accessing the console window's menu
# to set the console font and copy and paste, achieved using pywinauto
# which depends on pywin32.
# These tests may be disabled because pywin32 depends on VC++, is time
# comsuming to build, and the console window cant be accessed during appveyor
# builds.
# Microsoft makes available a compiler for Python 2.7
# http://www.microsoft.com/en-au/download/details.aspx?id=44266
# If you set up your own compiler for Python 3, on 3.3 two demo files
# packaged with pywin32 may fail. Remove com/win32com/demos/ie*.py
if os.name == 'nt' and os.environ.get('PYSETUP_TEST_NO_UI', '0') != '1':
# FIXME: tests/ui_tests.py suggests pywinauto 0.4.2
# which isnt provided on pypi.
test_deps += ['pywin32', 'pywinauto>=0.4.0']
extra_deps.update(script_deps)
# Add all dependencies as test dependencies,
# so all scripts can be compiled for script_tests, etc.
if 'PYSETUP_TEST_EXTRAS' in os.environ:
test_deps += list(itertools.chain(*(extra_deps.values())))
# mwlib requires 'pyparsing>=1.4.11,<1.6', which conflicts with
# pydot's requirement for pyparsing>=2.0.1.
if 'mwlib' in test_deps:
test_deps.remove('mwlib')
if 'oursql' in test_deps and os.name == 'nt':
test_deps.remove('oursql') # depends on Cython
if 'requests[security]' in test_deps:
# Bug T105767 on Python 2.7 release 9+
if sys.version_info[:2] == (2, 7) and sys.version_info[2] >= 9:
test_deps.remove('requests[security]')
# These extra dependencies are needed other unittest fails to load tests.
if sys.version_info[0] == 2:
test_deps += extra_deps['csv']
else:
test_deps += ['six']
from setuptools import setup, find_packages
name = 'pywikibot'
version = '2.0rc1.post2'
github_url = 'https://github.com/wikimedia/pywikibot-core'
setup(
name=name,
version=version,
description='Python MediaWiki Bot Framework',
long_description=open('README.rst').read(),
maintainer='The Pywikibot team',
maintainer_email='pywikibot@lists.wikimedia.org',
license='MIT License',
packages=['pywikibot'] + [package
for package in find_packages()
if package.startswith('pywikibot.')],
install_requires=dependencies,
dependency_links=dependency_links,
extras_require=extra_deps,
url='https://www.mediawiki.org/wiki/Pywikibot',
test_suite="tests.collector",
tests_require=test_deps,
classifiers=[
'License :: OSI Approved :: MIT License',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Environment :: Console',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
],
use_2to3=False
)
|
# (c) 2014, Brian Coca, Josh Drake, et al
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import time
import errno
try:
import simplejson as json
except ImportError:
import json
from ansible import constants as C
from ansible import utils
from ansible.cache.base import BaseCacheModule
class CacheModule(BaseCacheModule):
"""
A caching module backed by json files.
"""
def __init__(self, *args, **kwargs):
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self._cache = {}
self._cache_dir = C.CACHE_PLUGIN_CONNECTION # expects a dir path
if not self._cache_dir:
utils.exit("error, fact_caching_connection is not set, cannot use fact cache")
if not os.path.exists(self._cache_dir):
try:
os.makedirs(self._cache_dir)
except (OSError,IOError), e:
utils.warning("error while trying to create cache dir %s : %s" % (self._cache_dir, str(e)))
return None
def get(self, key):
if key in self._cache:
return self._cache.get(key)
if self.has_expired(key):
raise KeyError
cachefile = "%s/%s" % (self._cache_dir, key)
try:
f = open( cachefile, 'r')
except (OSError,IOError), e:
utils.warning("error while trying to write to %s : %s" % (cachefile, str(e)))
else:
value = js | on.load(f)
self._cache[key] = value
return value
finally:
f.close()
def set(self, key, value):
self._cache[key] = value |
cachefile = "%s/%s" % (self._cache_dir, key)
try:
f = open(cachefile, 'w')
except (OSError,IOError), e:
utils.warning("error while trying to read %s : %s" % (cachefile, str(e)))
else:
f.write(utils.jsonify(value))
finally:
f.close()
def has_expired(self, key):
cachefile = "%s/%s" % (self._cache_dir, key)
try:
st = os.stat(cachefile)
except (OSError,IOError), e:
if e.errno == errno.ENOENT:
return False
else:
utils.warning("error while trying to stat %s : %s" % (cachefile, str(e)))
if time.time() - st.st_mtime <= self._timeout:
return False
if key in self._cache:
del self._cache[key]
return True
def keys(self):
keys = []
for k in os.listdir(self._cache_dir):
if not (k.startswith('.') or self.has_expired(k)):
keys.append(k)
return keys
def contains(self, key):
if key in self._cache:
return True
if self.has_expired(key):
return False
try:
st = os.stat("%s/%s" % (self._cache_dir, key))
return True
except (OSError,IOError), e:
if e.errno == errno.ENOENT:
return False
else:
utils.warning("error while trying to stat %s : %s" % (cachefile, str(e)))
def delete(self, key):
del self._cache[key]
try:
os.remove("%s/%s" % (self._cache_dir, key))
except (OSError,IOError), e:
pass #TODO: only pass on non existing?
def flush(self):
self._cache = {}
for key in self.keys():
self.delete(key)
def copy(self):
ret = dict()
for key in self.keys():
ret[key] = self.get(key)
return ret
|
#!/usr/bin/env python
"""Upserts Domains from Salesforce Domain__c.
"""
import logging
import os
from django.core.management.base import BaseCommand
import iss.models
import iss.salesforce
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert Domains modified within n-days')
def handle(self, *args, **options):
upsert_domains(options['modified_within'])
def upsert_domains(modified_since=7):
"""Upsert Domains for SF Domain__c modified in last `modified_s | ince` days.
"""
logger.info('upserting domains modified in last {since} days'.
format(since=modified_since))
modified_domains = (iss.salesforce.Domain.get_domains_modified_since(
days_ago=modified_since))
fo | r domain in modified_domains:
iss.models.Domain.upsert(domain)
|
from sslyze import ServerNetworkLocation
from sslyze.plugins.elliptic_curves_plugin import (
SupportedEllipticCurvesScanResult,
SupportedEllipticCurvesImplementation,
)
from tests.connectivity_utils import check_connectivity_to_server_and_return_info
from tests.markers import can_only_run_on_linux_64
from tests.openssl_server import ModernOpenSslServer
class TestEllipticCurvesPluginWithOnlineServer:
def test_supported_curves(self):
# Given a server to scan that supports ECDH cipher suites
server_location = ServerNetworkLocation("www.cloudflare.com", 443)
server_info = check_connectivity_to_server_and_return_info(server_location)
# When scanning for supported elliptic curves, it succeeds
result: SupportedEllipticCurvesScanResult = SupportedEllipticCurvesImplementation.scan_server(server_info)
# And the result confirms that some curves are supported and some are not
assert result.supp | orts_ecdh_key_exchange
assert result.supported_curves
assert result.rejected_curves
# And a CLI output can be generated
assert SupportedEllipticCurvesImplementati | on.cli_connector_cls.result_to_console_output(result)
@can_only_run_on_linux_64
class TestEllipticCurvesPluginWithLocalServer:
def test_supported_curves(self):
# Given a server to scan that supports ECDH cipher suites with specific curves
server_curves = ["X25519", "X448", "prime256v1", "secp384r1", "secp521r1"]
with ModernOpenSslServer(groups=":".join(server_curves)) as server:
server_location = ServerNetworkLocation(
hostname=server.hostname, ip_address=server.ip_address, port=server.port
)
server_info = check_connectivity_to_server_and_return_info(server_location)
# When scanning the server for supported curves, it succeeds
result: SupportedEllipticCurvesScanResult = SupportedEllipticCurvesImplementation.scan_server(server_info)
# And the supported curves were detected
assert set(server_curves) == {curve.name for curve in result.supported_curves}
|
# Copyright 2012-2013 Ravello Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, | software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_ | import, print_function
import argparse
import textwrap
from testmill import (console, manifest, keypair, login, error,
application, tasks, util, inflect)
from testmill.state import env
usage = textwrap.dedent("""\
usage: ravtest [OPTION]... run [-i] [-c] [--new] [--vms <vmlist>]
[--dry-run] <application> [<command>]
ravtest run --help
""")
description = textwrap.dedent("""\
Run automated tasks in a Ravello application.
The application defined by <application> is loaded from the manifest
(.ravello.yml). It is then created if it doesn't exist yet, and the
runbook defined in the manifest is run.
If --new is specified, a new application instance is always created,
even if one exists already.
The available options are:
-i, --interactive
Run in interactive mode. All tasks are run directly
connected to the console. In case of multiple virtual
machines, output will be interleaved and may be hard
to understand.
-c, --continue
Continue running even after an error.
--new
Never re-use existing applications.
--vms <vmlist>
Execute tasks only on these virtual machines, instead of on
all virtual machines in the application. <vmlist> is a
comma-separated list of VMs.
--dry-run
Do not execute any tasks. Useful for starting up an
application without doing anything yet.
""")
def add_args(parser):
parser.usage = usage
parser.description = description
parser.add_argument('-i', '--interactive', action='store_true')
parser.add_argument('-c', '--continue', action='store_true',
dest='continue_')
parser.add_argument('--new', action='store_true')
parser.add_argument('--vms')
parser.add_argument('--dry-run', action='store_true')
parser.add_argument('application')
parser.add_argument('command', nargs='?')
def do_run(args, env):
"""The "ravello run" command."""
login.default_login()
keypair.default_keypair()
manif = manifest.default_manifest()
appname = args.application
for appdef in manif.get('applications', []):
if appdef['name'] == appname:
break
else:
error.raise_error("Unknown application `{0}`.", appname)
vms = set((vm['name'] for vm in appdef.get('vms', [])))
if args.vms:
only = set((name for name in args.vms.split(',')))
if not only <= vms:
unknown = [name for name in only if name not in vms]
what = inflect.plural_noun('virtual machine', len(unknown))
error.raise_error("Unknown {0}: {1}", ', '.join(unknown), what)
vms = [name for name in vms if name in only]
if not vms:
error.raise_error('No virtual machines in application.')
app = application.create_or_reuse_application(appdef, args.new)
app = application.wait_for_application(app, vms)
if args.command:
for vm in appdef['vms']:
for task in vm['tasks']:
if task['name'] == 'execute':
task['commands'] = [args.command]
elif args.dry_run:
for vm in appdef['vms']:
vm['tasks'] = []
ret = tasks.run_all_tasks(app, vms)
console.info('\n== The following services will be available for {0} '
'minutes:\n', appdef['keepalive'])
for vm in app['vms']:
if vm['name'] not in vms:
continue
svcs = vm.get('suppliedServices')
if not svcs:
continue
console.info('On virtual machine `{0}`:', vm['name'])
for svc in svcs:
svc = svc['baseService']
addr = util.format_service(vm, svc)
console.info(' * {0}: {1}', svc['name'], addr)
console.info('')
return error.EX_OK if ret == 0 else error.EX_SOFTWARE
|
# Copyright (c) 2012, Tycho Andersen. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice s | hall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A P | ARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
class QtileState(object):
"""
Represents the state of the qtile object. Primarily used for restoring
state across restarts; any additional state which doesn't fit nicely
into X atoms can go here.
"""
def __init__(self, qtile):
# Note: window state is saved and restored via _NET_WM_STATE, so
# the only thing we need to restore here is the layout and screen
# configurations.
self.groups = {}
self.screens = {}
for group in qtile.groups:
self.groups[group.name] = group.layout.name
for index, screen in enumerate(qtile.screens):
self.screens[index] = screen.group.name
def apply(self, qtile):
"""
Rearrange the windows in the specified Qtile object according to
this QtileState.
"""
for (group, layout) in self.groups.items():
try:
qtile.groupMap[group].layout = layout
except KeyError:
pass # group missing
for (screen, group) in self.screens.items():
try:
group = qtile.groupMap[group]
qtile.screens[screen].setGroup(group)
except (KeyError, IndexError):
pass # group or screen missing
|
__author__ = 'thatcher'
from django.contrib import admin
# from django.contrib.auth.models import User
# from django.contrib.auth.admin import UserAdmin
# from django.contrib.sessions.
from django.contrib.sessions.models import Session
from .models import *
from base.forms import *
def images_thubmnail(self):
return '<img style="max-height: 80px; width: auto;" src="{}" alt="{}" >'.format(self.uri(), self.alt)
# return self.uri()
images_thubmnail.short_description = 'Thumbnail'
images_thubmnail.allow_tags = True
class TeamMemberAdmin(admin.ModelAdmin):
model = TeamMember
list_display = ['full_name', 'sort_weight', 'show_as_team']
admin.site.register(TeamMember, TeamMemberAdmin)
class NewsItemAdmin(admin.ModelAdmin):
model = NewsItem
list_display = ['id', 'title', 'publication_date' | , 'show', 'author']
admin.site.register(NewsItem, NewsItemAdmin)
class EventAdmin(admin.ModelAdmin):
model = Event
list_display = ['title', 'location', 'date_and_time']
admin.site.register(Event, EventAdmin)
class PostAdmin(admin.ModelAdmin):
| model = GenericPost
list_display = ['title', 'category', 'publication_date']
admin.site.register(GenericPost, PostAdmin)
class CategoryAdmin(admin.ModelAdmin):
model = PostCategory
list_display = ['name', 'added_date']
admin.site.register(PostCategory, CategoryAdmin)
class ImageAdmin(admin.ModelAdmin):
model = Image
list_display = [images_thubmnail, 'alt', 'image_caption', 'image', ]
admin.site.register(Image, ImageAdmin)
class TagAdmin(admin.ModelAdmin):
model = Tag
list_display = ['name', 'added_date']
admin.site.register(Tag, TagAdmin)
|
le uses imp for python up to 3.2 and importlib for python 3.3 on; the
correct implementation is delegated to _compatibility.
This module also supports import autocompletion, which means to complete
statements like ``from datetim`` (curser at the end would return ``datetime``).
"""
from __future__ import with_statement
import os
import pkgutil
import sys
import itertools
from jedi._compatibility import find_module
from jedi import modules
from jedi import common
from jedi import debug
from jedi.parser import representation as pr
from jedi import cache
import builtin
import evaluate
# for debugging purposes only
imports_processed = 0
class ModuleNotFound(Exception):
pass
class ImportPath(pr.Base):
"""
An ImportPath is the path of a `pr.Import` object.
"""
class GlobalNamespace(object):
def __init__(self):
self.line_offset = 0
GlobalNamespace = GlobalNamespace()
def __init__(self, import_stmt, is_like_search=False, kill_count=0,
direct_resolve=False, is_just_from=False):
self.import_stmt = import_stmt
self.is_like_search = is_like_search
self.direct_resolve = direct_resolve
self.is_just_from = is_just_from
self.is_partial_import = bool(max(0, kill_count))
path = import_stmt.get_parent_until().path
self.file_path = os.path.dirname(path) if path is not None else None
# rest is import_path resolution
self.import_path = []
if import_stmt.from_ns:
self.import_path += import_stmt.from_ns.names
if import_stmt.namespace:
if self._is_nested_import() and not direct_resolve:
self.import_path.append(import_stmt.namespace.names[0])
else:
self.import_path += import_stmt.namespace.names
for i in range(kill_count + int(is_like_search)):
self.import_path.pop()
def __repr__(self):
return '<%s: %s>' % (type(self).__name__, self.import_stmt)
def _is_nested_import(self):
"""
This checks for the special case of nested imports, without aliases and
from statement::
import foo.bar
"""
return not self.import_stmt.alias and not self.import_stmt.from_ns \
and len(self.import_stmt.namespace.names) > 1 \
and not self.direct_resolve
def _get_nested_import(self, parent):
"""
See documentation of `self._is_nested_import`.
Generates an Import statement, that can be used to fake nested imports.
"""
i = self.import_stmt
# This is not an existing Import statement. Therefore, set position to
# 0 (0 is not a valid line number).
zero = (0, 0)
names = i.namespace.names[1:]
n = pr.Name(i._sub_module, names, zero, zero, self.import_stmt)
new = pr.Import(i._sub_module, zero, zero, n)
new.parent = parent
debug.dbg('Generated a nested import: %s' % new)
return new
def get_defined_names(self, on_import_stmt=False):
names = []
for scope in self.follow():
if scope is ImportPath.GlobalNamespace:
if self._is_relative_import() == 0:
names += self._get_module_names()
if self.file_path is not None:
path = os.path.abspath(self.file_path)
for i in range(self.import_stmt.relative_count - 1):
path = os.path.dirname(path)
names += self._get_module_names([path])
if self._is_relative_import():
rel_path = self._get_relative_path() + '/__init__.py'
with common.ignored(IOError):
m = modules.Module(rel_path)
names += m.parser.module.get_defined_names()
else:
if on_import_stmt and isinstance(scope, pr.Module) \
and scope.path.endswith('__init__.py'):
pkg_path = os.path.dirname(scope.path)
paths = self._namespace_packages(pkg_path, self.import_path)
names += self._get_module_names([pkg_path] + paths)
if self.is_just_from:
# In the case of an import like `from x.` we don't need to
# add all the variables.
if ['os'] == self.import_path and not self._is_relative_import():
# os.path is a hardcoded exception, because it's a
# ``sys.modules`` modification.
p = (0, 0)
names.append(pr.Name(self.GlobalNamespace, [('path', p)],
p, p, self.import_stmt))
continue
for s, scope_names in evaluate.get_names_of_scope(scope,
include_builtin=False):
for n in scope_names:
if self.import_stmt.from_ns is None \
or self.is_partial_import:
# from_ns must be defined to access module
# values plus a partial import means that there
# is something after the import, which
# automatically implies that there must not be
# any non-module scope.
continue
names.append(n)
return names
def _get_module_names(self, search_path=None):
"""
Get the names of all modules in the search_path. This means file names
and not names defined in the files.
"""
def generate_name(name):
return pr.Name(self.GlobalNamespace, [(name, inf_pos)],
inf_pos, inf_pos, self.import_stmt)
names = []
inf_pos = float('inf'), float('inf')
# add builtin module names
if search_path is None:
names += [generate_name(name) for name in sys.builtin_module_names]
if search_path is None:
search_path = self._sys_path_with_modifications()
for module_loa | der, name, is_pkg in pkgutil.iter_modules(search_path):
names.append(generate_name(name))
return names
def _sys_path_with_modifications(self):
# If you edit e.g. gunicorn, there will be imports like this:
# `from gunicorn | import something`. But gunicorn is not in the
# sys.path. Therefore look if gunicorn is a parent directory, #56.
in_path = []
if self.import_path:
parts = self.file_path.split(os.path.sep)
for i, p in enumerate(parts):
if p == self.import_path[0]:
new = os.path.sep.join(parts[:i])
in_path.append(new)
module = self.import_stmt.get_parent_until()
return in_path + modules.sys_path_with_modifications(module)
def follow(self, is_goto=False):
"""
Returns the imported modules.
"""
if evaluate.follow_statement.push_stmt(self.import_stmt):
# check recursion
return []
if self.import_path:
try:
scope, rest = self._follow_file_system()
except ModuleNotFound:
debug.warning('Module not found: ' + str(self.import_stmt))
evaluate.follow_statement.pop_stmt()
return []
scopes = [scope]
scopes += remove_star_imports(scope)
# follow the rest of the import (not FS -> classes, functions)
if len(rest) > 1 or rest and self.is_like_search:
scopes = []
if ['os', 'path'] == self.import_path[:2] \
and not self._is_relative_import():
# This is a huge exception, we follow a nested import
# ``os.path``, because it's a very important one in Python
# |
from __future__ import division
from collections import deque
class MovingAverage(object):
def __init__(self, size):
"""
Initialize your data structure here.
:type size: int
"""
self.queue = deque(maxlen=size)
def next(self, val):
"""
:type val: int
:rtype: float
"""
self.queue.append(val)
return sum(self.queue) / len(self.queue)
# | Given a stream of integers and a window size,
# calculate the moving a | verage of all integers in the sliding window.
if __name__ == '__main__':
m = MovingAverage(3)
assert m.next(1) == 1
assert m.next(10) == (1 + 10) / 2
assert m.next(3) == (1 + 10 + 3) / 3
assert m.next(5) == (10 + 3 + 5) / 3
|
# -*- coding: utf-8 -*-
"""Racndicmd setup.py."""
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import os
import sys
class Tox(TestCommand):
"""Tox."""
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
def initialize_options(self):
"""Init."""
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self):
"""Finalize."""
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
"""Run."""
import tox
import shlex
if self.t | ox_args:
errno = tox.cmdline(args=shlex.split(self.tox_args))
else:
errno = tox.cmdline(self.tox_args)
sys.exit(errno)
classifiers = [
"Development Status :: 3 - Alpha",
"Programming Language :: Pyt | hon",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: System :: Networking",
"Topic :: System :: Networking :: Monitoring",
"Topic :: Utilities",
]
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as _file:
README = _file.read()
requires = []
with open('requirements.txt', 'w') as _file:
_file.write('\n'.join(requires))
EXCLUDE_FROM_PACKAGES = []
setup(
name="rancidcmd",
version="0.1.12",
description='RANCID Command Wrapper.',
long_description=README,
author='Toshikatsu Murakoshi',
author_email='mtoshi.g@gmail.com',
url='https://github.com/mtoshi/rancidcmd',
license='MIT',
classifiers=classifiers,
packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES),
py_modules=['rancidcmd'],
install_requires=requires,
include_package_data=True,
tests_require=['tox'],
cmdclass={'test': Tox},
)
|
from .c | ompany import Company
from .contact import Contact
from .deal import Deal
from .note import Note
from .requester import Requester
class AgileCRM:
def __init__(self, domain, email, api_key):
requester = Requester(domain, email, api_key)
self.contact = Contact(requester=requester)
self.company = Company(requester=requester)
| self.deal = Deal(requester=requester)
self.note = Note(requester=requester)
|
#!/usr/bin/env python3
#!/usr/bin/python
# https://en.wikipedia.org/wiki/Matplotlib
import numpy
import matplotlib.pyplot as plt
from numpy.random import rand
a = rand(100)
b = rand(100)
plt.scatter(a, b)
plt.show() | ||
'''
A MLP algorithm example using TensorFlow library.
This example is using generate random distribution
(http://cs231n.github.io/neural-networks-case-study/)
Code references:
https://github.com/shouvikmani/Tensorflow-Deep-Learning-Tutorial/blob/master/tutorial.ipynb
https://github.com/aymericdamien/TensorFlow-Examples/
http://cs231n.github.io/neural-networks-case-study/
The source code modified modified by S.W. Oh.
'''
from __future__ import print_function
import tensorflow as tf
import numpy as np
from matplotlib import pyplot as plt
# import Dense (fully-connected) layer
from util.layer import Dense
###### Generate 2D spiral random data and Plot ###################################
N = 200 # number of points per class
D = 2 # dimensionality
K = 4 # number of classes
X_train = np.zeros((N*K,D)) # data matrix (each row = single example)
y_train = np.zeros((N*K,K)) # class labels
yc = np.zeros(N*K, dtype='uint8')
for j in range(K):
ix = range(N*j,N*(j+1))
r = np.linspace(0.0,1,N) # radius
t = np.linspace(j*4.8,(j+1)*4.8,N) + np.random.randn(N)*0.2 # theta
X_train[ix] = np.c_[r*np.sin(t), r*np.cos(t)]
y_train[ix,j] = 1
yc[ix] = j
# lets visualize the data:
plt.scatter(X_train[:, 0], X_train[:, 1], c=yc, s=40, cmap=plt.cm.Spectral)
plt.show()
# Random shuffle
perm = np.random.permutation(len(y_train))
X_train = X_train[perm,:]
y_train = y_train[perm,:]
yc = yc[perm]
# Parameters
learning_rate = 0.01
training_epochs = 500
batch_size = 10
display_step = 1
###### Build graph ######################################################
# Place holders
x = tf.placeholder(tf.float32, [None, 2]) # 2 dimensional input
y = tf.placeholder(tf.float32, [None, 4]) # 4 classes
# Construct MLP with two hidden layer
h = Dense(x, [2,64], 'ih')
h = tf.nn.relu(h)
h = Dense(h, [64,64], 'hh')
h = tf.nn.relu(h)
logit = Dense(h, [64,4], 'hl')
pred = tf.nn.softmax(logit) # Softmax
# Directly compute loss from logit (to ensure stability and avoid overflow)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logit, labels=y))
# Define optimizer and train_op
train_op = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
###### Start Training ###################################################
# Open a Session
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
# Training cycle
for epoch in range(training_epochs):
avg_cost = 0.
total_batch = int(len(y_train)/batch_size)
# Loop over all batches
for i in range(total_batch):
batch_xs = X_train[i:i+batch_size,:]
batch_ys = y_train[i:i+batch_size,:]
# Run optimization op (backprop) and cost op (to get loss value)
_, c = sess.run([train_op, cost], feed_dict={x: batch_xs, y: batch_ys})
# Compute average loss
avg_cost += c / total_batch
# Display logs per epoch step
if (epoch+1) % display_step == 0:
print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(avg_cost))
print("Optimization Finished!")
# Visualize Dicision boundary
h = 0.02
x_min, x_max = X_train[:, 0].min() - 1, X_train[:, 0].max() + 1
y_min, y_max = X_train[:, 1].min() - 1, X_train[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = sess.run(pred, feed_dict={x: np.c_[xx | .ravel(), yy.ravel()]})
Z = np.argmax(Z, axis=1)
Z = Z.reshape(xx.shape)
fig = p | lt.figure()
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral, alpha=0.8)
plt.scatter(X_train[:, 0], X_train[:, 1], c=yc, s=40, cmap=plt.cm.Spectral)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.show()
|
# Copyright 2016 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module implements mock classes for minemed.traced tests
"""
import gevent
import gevent.event
import logging
from minemeld.traced.storage import TableNotFound
LOG = logging.getLogger(__name__)
CLOCK = -1
def _get_clock():
global CLOCK
CLOCK += 1
return CLOCK
MOCK_TABLES = []
class MockTable(object):
def __init__(self, name, create_if_missing=True):
self.name = name
self.create_if_missing = create_if_missing
self.last_used = None
self.refs = []
self.db_open = True
self.db = {}
self.max_counter = -1
def add_reference(self, refid):
self.refs.append(refid)
def remove_reference(self, refid):
try:
self.refs.remove(refid)
except ValueError:
pass
def ref_count(self):
return len(self.refs)
def put(self, key, value):
self.last_used = _get_clock()
|
self.max_counter += 1
new_max_counter = '%016x' % self.max_counter
self.db[key+new_max_counter] = value
def backwards_iterator(self, timestamp, counter):
starting_key = '%016x%016x' % (timestamp, counter)
items = [[k, v] for k, v in self.db.iteritems() if k <= starting_key]
items = sorted(items, cmp=lambda x, y: cmp(x[0], y[0]), reverse=True)
return items
def close(self):
| self.db_open = False
@staticmethod
def oldest_table():
tables = [t.name for t in MOCK_TABLES]
LOG.debug(tables)
if len(tables) == 0:
return None
return sorted(tables)[0]
def table_factory(name, create_if_missing=True):
table = next((t for t in MOCK_TABLES if t.name == name), None)
if table is not None:
return table
if not create_if_missing:
raise TableNotFound()
mt = MockTable(name, create_if_missing=create_if_missing)
MOCK_TABLES.append(mt)
return mt
def table_cleanup():
global MOCK_TABLES
MOCK_TABLES = []
class MockStore(object):
def __init__(self, config=None):
if config is None:
config = {}
self.config = config
self.writes = []
self.db = {}
self.counter = 0
self.release_alls = []
def write(self, timestamp, log):
self.writes.append({
'timestamp': timestamp,
'log': log
})
self.db['%016x%016x' % (timestamp, self.counter)] = log
self.counter += 1
def iterate_backwards(self, ref, timestamp, counter):
starting_key = '%016x%016x' % (timestamp, counter)
items = [[k, v] for k, v in self.db.iteritems() if k <= starting_key]
items = sorted(items, cmp=lambda x, y: cmp(x[0], y[0]), reverse=True)
for c, i in enumerate(items):
if c % 1 == 0:
yield {'msg': 'test message'}
yield {'timestamp': i[0], 'log': i[1]}
def release_all(self, ref):
self.release_alls.append(ref)
def store_factory(config=None):
return MockStore(config=config)
MOCK_QUERIES = []
class MockQuery(gevent.Greenlet):
def __init__(self, store, query, timestamp, counter,
num_lines, uuid, redis_config):
self.store = store
self.query = query
self.timestamp = timestamp
self.counter = counter
self.num_lines = num_lines
self.uuid = uuid
self.redis_config = redis_config
self.finish_event = gevent.event.Event()
super(MockQuery, self).__init__()
def kill(self):
LOG.debug("%s killed", self.uuid)
super(MockQuery, self).kill()
def _run(self):
LOG.debug("%s started", self.uuid)
self.finish_event.wait()
LOG.debug("%s finished", self.uuid)
class MockEQuery(gevent.Greenlet):
def __init__(self, store, query, timestamp, counter,
num_lines, uuid, redis_config):
self.store = store
self.query = query
self.timestamp = timestamp
self.counter = counter
self.num_lines = num_lines
self.uuid = uuid
self.redis_config = redis_config
self.finish_event = gevent.event.Event()
super(MockEQuery, self).__init__()
def kill(self):
LOG.debug("%s killed", self.uuid)
super(MockEQuery, self).kill()
def _run(self):
LOG.debug("%s started", self.uuid)
self.finish_event.wait()
raise RuntimeError("BAD BAD QUERY!")
def query_factory(store, query, timestamp, counter,
num_lines, uuid, redis_config):
if query == "bad":
mqf = MockEQuery
else:
mqf = MockQuery
mq = mqf(store, query, timestamp, counter,
num_lines, uuid, redis_config)
MOCK_QUERIES.append(mq)
return mq
def query_cleanup():
global MOCK_QUERIES
MOCK_QUERIES = []
|
from unittest import TestCase
from compile import add_jmp_opcodes, break_to_atoms
from compile.jmp_add import travel, shuffle
from opcode_ import PRT
class TestJMPAdd(TestCase):
def test_added_init_jmp(self):
node_chain = PRT.build_from_string('u', None)
atoms = break_to_atoms(node_chain)
atoms = add_jmp_opcodes(atoms)
self.assertEqual(len(atoms), 2)
self.assertEqual(len(atoms[0]), 2)
self.assertEqual(len(atoms[1]), 2)
def test_nothing_happend_on_one_and_no_jmp_init(self):
atom = PRT.build_from_string('i', None)
atoms = break_to_atoms(atom)
atoms = add_jmp_opcodes(
atoms,
first_step_is_jmp=False)
self.assertEqual(atoms[0][0], atom[0])
self.assertEqual(atoms[0][1], atom[1])
self.assertEqual(len(atoms), 1)
self.assertEqual(len(atoms[0]), 2)
def test_first_jmp_points_to_first_node(self):
atom = PRT.build_from_string('o', None)
first_node = atom[0]
atoms = break_ | to_atoms(atom)
atoms = add_jmp_opcodes(atoms)
self.assertEqual(atoms[0][0].target_uuid,
first_node.uuid)
def test_reach_to_end(s | elf):
node_chain = PRT.build_from_string('T', None) + \
PRT.build_from_string('h', None) + \
PRT.build_from_string('e', None) + \
PRT.build_from_string('A', None) + \
PRT.build_from_string('n', None) + \
PRT.build_from_string('o', None) + \
PRT.build_from_string('s', None) + \
PRT.build_from_string('m', None) + \
PRT.build_from_string('i', None) + \
PRT.build_from_string('c', None)
last = node_chain[-1]
atoms = break_to_atoms(node_chain)
atoms = add_jmp_opcodes(atoms)
atom = atoms[0]
for _ in range(len(node_chain) - 1):
next_atom = travel(atoms, atom)
if next_atom:
atom = next_atom
else:
self.fail("Chain ended too soon")
self.assertIn(last, atom)
def test_reach_to_end_with_shuffle(self):
# TODO why some are NC of NC and some NC of NODs?
node_chain = PRT.build_from_string('T', None) + \
PRT.build_from_string('h', None) + \
PRT.build_from_string('e', None) + \
PRT.build_from_string('A', None) + \
PRT.build_from_string('n', None) + \
PRT.build_from_string('o', None) + \
PRT.build_from_string('s', None) + \
PRT.build_from_string('m', None) + \
PRT.build_from_string('i', None) + \
PRT.build_from_string('c', None)
last = node_chain[-1]
atoms = break_to_atoms(node_chain)
atoms = add_jmp_opcodes(atoms)
atoms = shuffle(atoms)
atom = atoms[0]
for _ in range(len(node_chain) - 1):
next_atom = travel(atoms, atom)
if next_atom:
atom = next_atom
else:
self.fail("Chain ended too soon")
self.assertIn(last, atom)
|
from django.contrib import admin
from .models import Album, Song
# Re | gister your models here.
admin.site.register(Album)
admin.site.regis | ter(Song)
|
#!/usr/bin/env python
from traits.api import HasStrictTraits, Float
from mock import Mock
class MyClass(HasStrictTraits):
number = Float(2.0)
def add_to_number(self, value):
""" Add the value to `number`. """
self.number += value
my_class = MyClass()
# Using my_class.add_to_number = Mock() will fail.
# But setting the moc | k on the instance `__dict__` works.
my_class.__dict__['add_to_number'] = Mock()
# We can now use the mock in our tests.
my_class.add_to_number(42)
print my_class | .add_to_number.call_args_list
|
if thisage > age:
age = thisage
return age
def has_other_reviewers(self, excludeusers):
'''Determine if the patch has been reviewed by any
users that are not in 'excludeusers'''
hasReviewers = False
for approval in self.approvals:
if not approval.is_user_in_list(excludeusers):
hasReviewers = True
return hasReviewers
def has_reviewers(self, includeusers):
'''Determine if the patch has been reviewed by any
users that are in 'includeusers'''
hasReviewers = False
for approval in self.approvals:
if approval.user is None:
continue
if approval.is_user_in_list(includeusers):
hasReviewers = True
return hasReviewers
@staticmethod
def from_json(data):
files = []
for f in data.get("files", []):
files.append(ModelFile.from_json(f))
approvals = []
for f in data.get("approvals", []):
approvals.append(ModelApproval.from_json(f))
user = None
if "uploader" in data:
user = ModelUser.from_json(data["uploader"])
comments = []
for c in data.get("comments", []):
comments.append(ModelComment.from_json(c))
return ModelPatch(int(data.get("number", 0)),
data.get("revision"),
data.get("ref"),
user,
data.get("createdOn"),
approvals,
files,
comments)
class ModelChange(ModelBase):
def __init__(self, project, branch, topic, id, number, subject, owner, url, createdOn, lastUpdated, status, patches = [], comments = []):
self.project = project
self.branch = branch
self.topic = topic
self.id = id
self.number = number
self.subject = subject
self.owner = owner
self.url = url
if createdOn is not None:
self.createdOn = int(createdOn)
else:
self.createdOn = None
if lastUpdated is not None:
self.lastUpdated = int(lastUpdated)
else:
self.lastUpdated = None
self.status = status
self.patches = patches
self.comments = comments
def get_current_patch(self):
if len(self.patches) == 0:
return None
return self.patches[len(self.patches) - 1]
def get_first_patch(self):
if len(self.patches) == 0:
return None
return self.patches[0]
def get_reviewer_not_nacked_patch(self):
prev = None
for patch in reversed(self.patches):
if patch.is_reviewer_nacked():
break
prev = patch
return prev
def get_current_age(self):
patch = self.get_current_patch()
return patch.get_age(time.time())
def get_first_age(self):
patch = self.get_first_patch()
return patch.get_age(time.time())
def get_reviewer_not_nacked_age(self):
patch = self.get_reviewer_not_nacked_patch()
if patch is None:
return 0
return patch.get_age(time.time())
@staticmethod
def is_user_in_list(users, user):
if user.username is not None and user.username in users:
return True
if user.email is not None and user.email in users:
return True
return False
def has_any_other_reviewers(self, excludeusers):
'''Determine if any patch in the change has been
reviewed by any user not in the list of 'excludeusers'''
hasReviewers = False
for patch in self.patches:
if patch.has_other_reviewers(excludeusers):
hasReviewers = True
return hasReviewers
def has_any_reviewers(self, includeusers):
'''Determine if any patch in the change has been
reviewed by any user in the list of 'includeusers'''
hasReviewers = False
for patch in self.patches:
if patch.has_reviewers | (includeusers):
hasReviewers = True
return hasReviewers
def has_current_reviewers(self, includeusers):
'''Determine if the current patch version has
been reviewed by any of the | users in 'includeusers'. '''
patch = self.get_current_patch()
if patch is None:
return False
return patch.has_reviewers(includeusers)
def has_current_other_reviewers(self, excludeusers):
'''Determine if the current patch version has
been reviewed by any of the users not in 'excludeusers'. '''
patch = self.get_current_patch()
if patch is None:
return False
return patch.has_other_reviewers(excludeusers)
def has_owner(self, includeusers):
'''Determine if the change is owned by anyone
in 'incldueusers' list.'''
return self.is_user_in_list(includeusers, self.owner)
@staticmethod
def from_json(data):
patches = []
for p in data.get("patchSets", []):
patches.append(ModelPatch.from_json(p))
user = None
if "owner" in data:
user = ModelUser.from_json(data["owner"])
number = None
if "number" in data:
number = int(data.get("number"))
comments = []
for c in data.get("comments", []):
comments.append(ModelComment.from_json(c))
return ModelChange(data.get("project", None),
data.get("branch", None),
data.get("topic", None),
data.get("id", None),
number,
data.get("subject", None),
user,
data.get("url", None),
data.get("createdOn", None),
data.get("lastUpdated", None),
data.get("status", None),
patches,
comments)
class ModelEvent(ModelBase):
def __init__(self, change, patch, user):
self.change = change
self.patch = patch
self.user = user
def is_user_in_list(self, users):
if self.user is None:
return False
return self.user.is_in_list(users)
@staticmethod
def from_json(data):
if data["type"] == "comment-added":
return ModelEventCommentAdd.from_json(data)
elif data["type"] == "patchset-created":
return ModelEventPatchCreate.from_json(data)
elif data["type"] == "change-merged":
return ModelEventChangeMerge.from_json(data)
elif data["type"] == "change-abandoned":
return ModelEventChangeAbandon.from_json(data)
elif data["type"] == "change-restored":
return ModelEventChangeRestore.from_json(data)
elif data["type"] == "ref-updated":
return ModelEventRefUpdated.from_json(data)
elif data["type"] == "reviewer-added":
return ModelEventReviewerAdded.from_json(data)
elif data["type"] == "topic-changed":
return ModelEventTopicChanged.from_json(data)
else:
raise Exception("Unknown event '%s'" % data["type"])
class ModelEventCommentAdd(ModelEvent):
def __init__(self, change, patch, user, comment, approvals):
ModelEvent.__init__(self, change, patch, user)
self.comment = comment
self.approvals = approvals
@staticmethod
def from_json(data):
change = ModelChange.from_json(data["change"])
patch = ModelPatch.from_json(data["patchSet"])
user = ModelUser.from_json(data["author"])
comment = data["comment"]
approvals = []
for f in data.get("approvals", []):
approvals.append(ModelApproval.from_json(f))
return ModelEventCommentAdd(change, patch, user, comment, approvals)
class ModelEventPatchCreate(ModelEvent):
def __init_ |
#!/usr/bin/python
import Adafruit_GPIO as GPIO
import time, os
#print "GETTING GPIO OBJECT"
gpio = GPIO.get_platform_gpio()
#print "SETUP CSID1"
#gpio.setup("CSID1", GPIO.OUT)
#print os.path.exists('/sys/class/gpio/gpio133')
#print "SETUP XIO-P1"
#gpio.setup("XIO-P1", GPIO.IN)
#GPIO.setup("U14_13", GPIO.IN)
#print "READING XIO-P1"
#print "HIGH", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.LOW)
#time. | sleep(1)
#print "LOW", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.HIGH)
| #print "HIGH", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.LOW)
#print "LOW", gpio.input("XIO-P1")
#this example will test out CHIP XIO-P0 in to XIO-P1
#jumper the pins to test
#
#my test required sudo to work, gpio access requires sudo before changing permissions
#gpio.setup("XIO-P0", GPIO.OUT)
#gpio.setup("XIO-P1", GPIO.IN)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#gpio.output("XIO-P0", GPIO.HIGH)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#time.sleep(4)
#gpio.output("XIO-P0", GPIO.LOW)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#print "CLEANUP"
#gpio.cleanup()
gpio.setup("XIO-P0", GPIO.OUT)
gpio.output("XIO-P0", GPIO.HIGH)
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__author__ = "Felix Brezo, Yaiza Rubio <contacto@i3visio.com>"
__version__ = "2.0"
from osrframework.utils.platforms import Platform
class Teamtreehouse(Platform):
""" A <Platform> object for Teamtreehouse"""
def __init__(self):
self.platformName = "Teamtreehouse"
self.tags = ["social", "news"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "http://teamtreehouse.com/" + "<usufy>"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCr | edentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials[ | "searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query.
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["<title>Sorry, we can't find the page you are looking for</title>"]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
self.foundFields = {}
|
turn redirect(url)
@rendered_with("opendebates/list_ideas.html")
@allow_http("GET", "POST")
def questions(request):
# If the user is GETting the list of questions, then redirect to the list_ideas
# page for this Debate.
if request.method == 'GET':
return redirect(reverse("list_ideas"))
if not request.debate.allow_voting_and_submitting_questions:
raise Http404
form = QuestionForm(request.POST, request=request)
if not form.is_valid():
# form = QuestionForm(request=request)
messages.error(request, _('You have some errors in the form'))
return {
'form': form,
'categories': Category.objects.filter(debate=request.debate),
'ideas': [],
}
if not request.user.is_authenticated:
request.session['opendebates.stashed_submission'] = {
"category": request.POST['category'],
"headline": request.POST['headline'],
"question": request.POST['question'],
"citation": request.POST.get("citation"),
}
return redirect('registration_register')
category = request.POST.get('category')
form_data = form.cleaned_data
voter, created = Voter.objects.get_or_create(
email=request.user.email,
defaults=dict(
source=request.COOKIES.get('opendebates.source')
)
)
previous_debate_time = request.debate.previous_debate_time
created_at = timezone.now()
idea = Submission.objects.create(
voter=voter,
category_id=category,
headline=form_data['headline'],
followup=form_data['question'],
idea=(u'%s %s' % (form_data['headline'], form_data['question'])).strip(),
citation=form_data['citation'],
created_at=created_at,
ip_address=get_ip_address_from_request(request),
approved=True,
votes=1,
local_votes=1 if voter.state and voter.state == request.debate.debate_state else 0,
current_votes=(1 if previous_debate_time is None or created_at > previous_debate_time
else 0),
source=request.COOKIES.get('opendebates.source'),
)
Vote.objects.create(
submission=idea,
voter=voter,
source=idea.source,
ip_address=get_ip_address_from_request(request),
sessionid=request.session.session_key or '',
request_headers=get_headers_from_request(request),
created_at=created_at,
is_suspicious=False,
is_invalid=False,
)
send_email("submitted_new_idea", {"idea": idea})
send_email("notify_moderators_submitted_new_idea", {"idea": idea})
url = reverse("vote", kwargs={'id': idea.id})
return redirect(url + "#created=%s" % idea.id)
@rendered_with("opendebates/changelog.html")
def changelog(request):
moderated = Submission.objects.filter(
Q(approved=False) | Q(duplicate_of__isnull=False)
).select_related('duplicate_of').order_by('-moderated_at', '-id')
return {
'moderated': moderated
}
class OpenDebatesRegistrationView(RegistrationView):
form_class = OpenDebatesRegistrationForm
next = None
prefix = None
def get(self, request, *args, **kwargs):
self.next = request.GET.get('next', None)
return super(OpenDebatesRegistrationView, self).get(request)
def get_context_data(self, **kwargs):
data = super(OpenDebatesRegistrationView, self).get_context_data(**kwargs)
if self.next:
data['next'] = self.next
return data
def form_valid(self, form):
User = get_user_model()
if User.objects.filter(email__iexact=form.cleaned_data['email']).exists():
return redirect(reverse('registration_duplicate'))
return super(OpenDebatesRegistrationView, self).form_valid(form)
def register(self, form):
new_user = super(OpenDebatesRegistrationView, self).register(form)
voter, created = Voter.objects.update_or_create(
email=form.cleaned_data['email'],
defaults=dict(
source=self.request.COOKIES.get('opendebates.source'),
state=state_from_zip(form.cleaned_data['zip']),
zip=form.cleaned_data['zip'],
display_name=form.cleaned_data.get('display_name'),
twitter_handle=form.cleaned_data.get('twitter_handle'),
phone_number=form.cleaned_data.get('phone_number'),
user=new_user,
)
)
return new_user
def get_form_kwargs(self):
kwargs = super(OpenDebatesRegistrationView, self).get_form_kwargs()
kwargs.update({
'request': self.request,
})
return kwargs
def get_form(self, form_class=None):
form = super(OpenDebatesRegistrationView, self).get_form(form_class)
if not registration_needs_captcha(self.request):
form.ignore_captcha()
return form
def get_success_url(self, user=None):
if self.request.GET.get('next'):
return self.request.GET.get('next')
else:
return reverse('registration_complete')
def registration_complete(request):
request.session['events.account_created'] = True
return redirect(reverse('list_ideas'))
@rendered_with("registration/registration_duplicate.html")
def registration_duplicate(request):
return {}
@rendered_with("opendebates/list_candidates.html")
@allow_http("GET")
def list_candidates(request):
candidates = Candidate.objects.filter(
debate=request.debate,
).order_by('last_name', 'first_name')
return {
'candidates': candidates,
}
@rendered_with("opendebates/flag_report.html")
@allow_http("GET", "POST")
@login_required
def report(request, id):
if not request.debate.allow_voting_and_submitting_questions and not request.user.is_staff:
raise Http404
idea = get_object_or_404(Submission, pk=id, category__debate=request.debate)
voter = Voter.objects.get(user=request.user)
if request.method == 'POST':
flag, created = Flag.objects.get_or_create(
to_remove=idea,
voter=voter,
duplicate_of=None,
defaults=dict(note=request.POST.get("report_why"))
)
messages.info(request, _(u'This question has been flagged for removal.'))
return redirect(idea)
return {
'idea': idea,
}
@rendered_with("opendebates/flag_merge.html")
@allow_http("GET", "POST")
@login_required
def merge(request, id):
if not request.debate.allow_voting_and_submitting_questions and not request.user.is_staff:
raise Http404
idea = get_object_or_404(Submission, pk=id, category__debate=request.debate)
voter = Voter.objects.get(user=request.user)
if Flag.objects.filter(to_remove=idea, voter=voter).exists():
messages.info(request, _(u'You have already flagged this question.'))
return redirect(idea)
form = MergeFlagForm(idea=idea, voter=voter, data=request.POST or None)
if request.method == 'POST' and form.is_v | alid():
form.save()
messages.info(request, _(u'This question has been flagged for merging.'))
return redirect(idea)
return {
'idea': idea,
'form': form,
}
@rendered_with("opendebates/top_archive.html")
@allow_http("GET")
def top_archive(request, slug):
category = get_object | _or_404(TopSubmissionCategory,
debate=request.debate, slug=slug)
submissions = category.submissions.select_related(
"submission", "submission__voter", "submission__voter__user",
"submission__category").order_by("rank", "created_at").all()
return {
'category': category,
'submissions': submissions,
}
def od_logout(request, next_page=None,
template_name='registration/logged_out.html',
redirect_field_name=REDIRECT_FIELD_NAME,
current_app=None, extra_context=None):
if next_page is not None:
next_page = reverse(next_page)
return logout(request, next_page, template_name, redirect_field_name, |
# Copyright (C) 2017 Equinor ASA, Norway.
#
# The file 'site_config.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
from cwrap import BaseCClass
from ecl.util.util import StringList, Hash
from res import ResPrototype
from res.enkf import ConfigKeys
from res.job_queue import JobQueue, ExtJoblist, Driver
class QueueConfig(BaseCClass):
TYPE_NAME = "queue_config"
_free = ResPrototype("void queue_config_free( queue_config )")
_alloc = ResPrototype("void* queue_config_alloc_load(char*)", bind=False)
_alloc_full = ResPrototype(
"void* queue_config_alloc_full(char*, bool, int, int, queue_driver_enum)",
bind=False,
)
_alloc_content = ResPrototype(
"void* queue_config_alloc(config_content)", bind=False
)
_alloc_local_copy = ResPrototype(
"queue_config_obj queue_config_alloc_local_copy( queue_config )"
)
_has_job_script = ResPrototype("bool queue_config_has_job_script( queue_config )")
_get_job_script = ResPrototype("char* queue_config_get_job_script(queue_config)")
_max_submit = ResPrototype("int queue_config_get_max_submit(queue_config)")
_queue_system = ResPrototype("char* queue_config_get_queue_system(queue_config)")
_queue_driver = ResPrototype(
"driver_ref queue_config_get_queue_driver(queue_config, char*)"
)
_get_num_cpu = ResPrototype("int queue_config_get_num_cpu(queue_config)")
_lsf_queue_opt = ResPrototype("char* queue_config_lsf_queue_name()", bind=False)
_lsf_server_opt = ResPrototype("char* queue_config_lsf_server()", bind=False)
_lsf_resource_opt = ResPrototype("char* queue_config_lsf_resource()", bind=False)
_lsf_driver_opt = ResPrototype("char* queue_config_lsf_driver_name()", bind=False)
def __init__(self, user_config_file=None, config_content=None, config_dict=None):
configs = sum(
[
1
for x in [user_config_file, config_content, config_dict]
if x is not None
]
)
if configs > 1:
raise ValueError(
"Attempting to create QueueConfig object with multiple config objects"
)
if configs == 0:
raise ValueError(
"Attempting to create QueueConfig object with no config objects"
)
c_ptr = None
if user_config_file is not None:
c_ptr = self._alloc(user_config_file)
if config_content is not None:
c_ptr = self._alloc_content(config_content)
if config_dict is not None:
c_ptr = self._alloc_full(
config_dict[ConfigKeys.JOB_SCRIPT],
config_dict[ConfigKeys.USER_MODE],
config_dict[ConfigKeys.MAX_SUBMIT],
config_dict[ConfigKeys.NUM_CPU],
config_dict[ConfigKeys.QUEUE_SYSTEM],
)
if not c_ptr:
raise ValueError("Unable to create QueueConfig instance")
super(QueueConfig, self).__init__(c_ptr)
# Need to create
if config_dict is not None:
queue_options = config_dict.get(ConfigKeys.QUEUE_OPTION)
for option in queue_options: |
self.driver.set_option(
option[ConfigKeys.NAME], option[ConfigKeys.VALUE]
)
def create_job_queue(self):
queue = JobQueue(self.driver, max_submit=self.max_submit)
return qu | eue
def create_local_copy(self):
return self._alloc_local_copy()
def has_job_script(self):
return self._has_job_script()
def free(self):
self._free()
@property
def max_submit(self):
return self._max_submit()
@property
def queue_name(self):
return self.driver.get_option(QueueConfig.LSF_QUEUE_NAME_KEY)
@property
def queue_system(self):
"""The queue system in use, e.g. LSF or LOCAL"""
return self._queue_system()
@property
def job_script(self):
return self._get_job_script()
@property
def driver(self):
return self._queue_driver(self.queue_system).setParent(self)
def _assert_lsf(self, key="driver"):
sys = self.queue_system
if sys != QueueConfig.LSF_KEY:
fmt = "Cannot fetch LSF {key}, current queue is {system}"
raise ValueError(fmt.format(key=key, system=self.queue_system))
@property
def _lsf_driver(self):
self._assert_lsf()
driver = self._queue_driver(self.LSF_KEY)
return driver.setParent(self)
@property
def lsf_resource(self):
self._assert_lsf(key=QueueConfig.LSF_RESOURCE_KEY)
return self._lsf_driver.get_option(self.LSF_RESOURCE_KEY)
@property
def lsf_server(self):
self._assert_lsf(key=QueueConfig.LSF_SERVER_KEY)
return self._lsf_driver.get_option(self.LSF_SERVER_KEY)
@property
def num_cpu(self):
return self._get_num_cpu()
def __eq__(self, other):
if self.max_submit != other.max_submit:
return False
if self.queue_system != other.queue_system:
return False
if self.num_cpu != other.num_cpu:
return False
if self.job_script != other.job_script:
return False
if self.queue_system != "LOCAL":
if self.queue_name != other.queue_name:
return False
if self.lsf_resource != other.lsf_resource:
return False
if self.lsf_server != other.lsf_server:
return False
return True
LSF_KEY = _lsf_driver_opt()
LSF_QUEUE_NAME_KEY = _lsf_queue_opt()
LSF_RESOURCE_KEY = _lsf_resource_opt()
LSF_SERVER_KEY = _lsf_server_opt()
|
from setuptools import setup, find_packages
import os
import allensdk
# http://bugs.python.org/issue8876#msg208792
if hasattr(os, 'link'):
del os.link
def prepend_find_packages(*roots):
''' Recursively traverse nested packages under the root directories
'''
packages = []
for root in roots:
packages += [root]
packages += [root + '.' + s for s in find_packages(root)]
return packages
setup(
version = allensdk.__version__,
name = 'allensdk',
author = 'David Feng',
author_email = 'davidf@alleninstitute.org',
| packages = prepend_find_packages('allensdk'),
package_data={'': ['*.conf', '*.cfg', '*.md', '*.json', '*.dat', '*.env', '*.sh', 'bps', 'Makefile', 'COPYING'] },
description = 'core libraries for the allensdk.',
install_requires = ['h5py>=2.2.1',
'matplotlib>=1.4.2',
| 'pandas>=0.16.2',
'numpy>=1.8.2',
'six>=1.8.0',
'pynrrd <= 0.2.0.dev'],
dependency_links = [
'git+https://github.com/mhe/pynrrd.git@9e09b24ff1#egg=pynrrd-0.1.999.dev'
],
tests_require=['nose>=1.2.1',
'coverage>=3.7.1',
'mock'],
setup_requires=['setuptools', 'sphinx', 'numpydoc'],
url='http://alleninstitute.github.io/AllenSDK/',
scripts=['allensdk/model/biophys_sim/scripts/bps'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
|
import shutil
from nose.tools import *
from holland.lib.lvm import LogicalVolume
from holland.lib.lvm.snapshot import *
from tests.constants import *
class TestSnapshot(object):
def setup(self):
self.tmpdir = tempfile.mkdtemp()
def teardown(self):
shutil.rmtree(self.tmpdir)
def test_snapshot_fsm(self):
lv = LogicalVolume.lookup('%s/%s' % (TEST_VG, TEST_LV))
name = lv.lv_name + '_snapshot'
size = 1 # extent
snapshot = Snapshot(name, size, self.tmpdir)
snapshot.start(lv)
def test_snapshot_fsm_with_callbacks(self):
lv = LogicalVolume.lookup('%s/%s' % (TEST_VG, TEST_LV))
name = lv.lv_name + '_snapshot'
size = 1 # extent
snapshot = Snapshot(name, size, self.tmpdir)
def handle_event(event, *args, **kwargs):
pass
snapshot.register('pre-mount', handle_event)
snapshot.register('post-mount', handle_event)
snapshot.start(lv)
def test_snapshot_fsm_with_failures(self):
lv = LogicalVolume.lookup('%s/%s' % (TEST_VG, TEST_LV))
name = lv.lv_name + '_snapshot'
size = 1 # extent
snapshot = Snapshot(name, size, self.tmpdir)
def bad_callback(event, *args, **kwargs):
raise Exception("Oooh nooo!")
for evt in ('initi | alize', 'pre-snapshot', 'post-snapshot',
'pre-mount', 'post-mount', 'pre-unmount', 'post-unmount',
'pre-remove', 'post-remove', 'finish'):
snapshot.register(evt, bad_callback)
assert_raises(CallbackFailuresError, snapshot.start, lv)
snapshot.unregister(ev | t, bad_callback)
if snapshot.sigmgr._handlers:
raise Exception("WTF. sigmgr handlers still exist when checking event => %r", evt)
|
#!/usr/bin/env python3
import json
import os
import subprocess
def connection_lost(network_id, timeout_seconds):
p = | subprocess.Popen(["hamachi", "go-online", network_id])
try:
p.wai | t(timeout_seconds)
except subprocess.TimeoutExpired:
p.kill()
return True
return False
if __name__ == "__main__":
with open("/etc/hamachi-watchdog/hamachi-watchdog.conf", "r") as f:
config = json.load(f)
network_id = config['network_id']
timeout_seconds = config['timeout_seconds']
if connection_lost(network_id, timeout_seconds):
print("Hamachi looks down. Restarting it...")
os.system("systemctl restart logmein-hamachi.service")
print("Hamachi was restarted")
|
#!/usr/bin/env python
## \file merge_solution.py
# \brief Python script for merging of the solution files.
# \author F. Palacios
# \version 6.1.0 "Falcon"
#
# The current SU2 release has been coordinated by the
# SU2 International Developers Society <www.su2devsociety.org>
# with selected contributions from the open-source community.
#
# The main research teams contributing to the current release are:
# - Prof. Juan J. Alonso's group at Stanford University.
# - Prof. Piero Colonna's group at Delft University of Technology.
# - Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# - Prof. Alberto Guardone's group at Polytechnic University of Milan.
# - Prof. Rafael Palacios' group at Imperial College London.
# - Prof. Vincent Terrapon's group at the University of Liege.
# - Prof. Edwin van der Weide's group at the University of Twente.
# - Lab. of New Concepts in Aeronautics at Tech. Institute of Aeronautics.
#
# Copyright 2012-2018, Francisco D. Palacios, Thomas D. Economon,
# Tim Albring, and the SU2 contributors.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
from optparse import OptionParser
import SU2
# -------------------------------------------------------------------
# Main
# -------------------------------------------------------------------
def main():
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="read config from FILE", metavar="FILE")
parser.add_option("-n", "--partitions", dest="partitions", default=-1,
help="number of PARTITIONS", metavar="PARTITIONS")
(options, args)=parser.parse_args()
options.partitions = int(options.partitions)
merge_solution( options.filename ,
options.partitions )
# -------------------------------------------------------------------
# MERGE SOLUTION
# -------------------------------------------------------------------
def merge_solution( filename ,
parti | tions = -1 ):
config = SU2.io. | Config(filename)
if partitions > -1 :
config.NUMBER_PART = partitions
SU2.run.merge(config)
#: def merge_solution()
if __name__ == '__main__':
main()
|
# - | *- coding: utf-8 -*-
| # Copyright 2014-17 Eficent Business and IT Consulting Services S.L.
# <contact@eficent.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from . import progress_measurements_entry
|
"""
Texture Replacement
+++++++++++++++++++
Example of how to replace a texture in game with an external image.
``createTexture()`` and ``removeTexture()`` are to be called from a
module Python Controller.
"""
from bge import logic
from bge import texture
def createTexture(cont):
"""Create a new Dynamic Texture"""
ob | j = cont.owner
# get the reference pointer (ID) of the internal texture
ID = texture.materialID(obj, 'IMoriginal.png')
# create a texture object
object_texture = texture.Texture(obj, ID)
# create a new source with an external image
url = logic.expandPath("//newtexture.jpg")
new_source = texture.ImageFFmpeg(url)
# the texture has to be stored in a per | manent Python object
logic.texture = object_texture
# update/replace the texture
logic.texture.source = new_source
logic.texture.refresh(False)
def removeTexture(cont):
"""Delete the Dynamic Texture, reversing back the final to its original state."""
try:
del logic.texture
except:
pass
|
'r' )
self.ls = {}
self.__load_ls()
def __load_ls( self ):
ils = self.zf.infolist()
for i in ils:
try:
ids, e = i.filename.split( '.' )
id = int( ids, 16 )
self.ls[id] = i
except:
print 'WARNING: %s not loaded from zip' % ( i.filename, )
pass
def verify( self ):
return self.zf.testzip() is None
def read( self, id, extension ):
try:
info = self.ls[id]
return self.zf.open( info, 'r' )
except KeyError:
return None
def _debug_write( self, id, extension ):
assert False
def get_state( self ):
return 'clean'
def reset_state( self ):
pass
class FileVolume:
def __init__( self, data_config, vol_id ):
self.data_config = data_config
self.vol_id = vol_id
self.to_commit = []
self.state = 'clean'
self.rm_dir = None
def __get_path( self, id, priority, extension ):
path = self.data_config.get_file_vol_path( self.vol_id, priority )
return os.path.join( path, '%016x.%s' % ( id, extension ) )
def verify( self ):
return True
def read( self, id, priority, extension ):
p = self.__get_path( id, priority, extension )
if( not os.path.isfile( p ) ):
return None
else:
try:
return open( p, 'rb' )
except IndexError:
return None
def _debug_write( self, id, priority, extension ):
p = self.__get_path( id, priority, extension )
try:
return open( p, 'wb' )
except IndexError:
return None
def get_state( self ):
return self.state
def reset_state( self ):
self.to_commit = []
self.state = 'clean'
rm_dir = self.rm_dir
self.rm_dir = None
self.to_commit = []
if( rm_dir is not None ):
shutil.rmtree( rm_dir )
def commit( self ):
completion = 0
try:
for t in self.to_commit:
shutil.move( t[0], t[1] )
completion += 1
except:
# Something went wrong, rollback
for t in s | elf.to_commit[:completion]:
shutil.move( t[1], t[0] )
# Sometimes move() seems to leave files behin | d
for t in self.to_commit:
try:
if( os.path.isfile( t[1] ) ):
os.remove( t[1] )
except:
pass
raise
# Comitted
self.state = 'committed'
def rollback( self ):
if( self.state == 'dirty' ):
self.to_commit = []
self.state = 'clean'
elif( self.state == 'committed' ):
for t in self.to_commit:
shutil.move( t[1], t[0] )
# Sometimes move() seems to leave files behind
for t in self.to_commit:
try:
if( os.path.isfile( t[1] ) ):
os.remove( t[1] )
except:
pass
self.state = 'dirty'
def load_data( self, path, id, priority, extension ):
if( self.state == 'committed' ):
self.reset_state()
self.state = 'dirty'
new_path = self.data_config.get_file_vol_path( self.vol_id, priority )
if( not os.path.isdir( new_path ) ):
os.makedirs( new_path )
tgt = os.path.join( new_path, '%016x.%s' % ( id, extension ) )
self.to_commit.append( ( path, tgt, ) )
def delete( self, id, priority, extension ):
if( self.state == 'committed' ):
self.reset_state()
self.state = 'dirty'
if( self.rm_dir is None ):
self.rm_dir = tempfile.mkdtemp()
src = self.__get_path( id, priority, extension )
if( not os.path.isfile( src ) ):
return
name = os.path.split( src )[-1]
tgt = os.path.join( self.rm_dir, name )
self.to_commit.append( ( src, tgt, ) )
class StreamDatabase:
def __init__( self, data_config ):
self.volumes = {}
self.data_config = data_config
self.state = 'clean'
def __get_volume( self, vol_id ):
if( self.volumes.has_key( vol_id ) ):
return self.volumes[vol_id]
vol = FileVolume( self.data_config, vol_id )
self.volumes[vol_id] = vol
return vol
def __get_vol_for_id( self, id ):
return self.__get_volume( id >> 12 )
def get_state( self ):
return self.state
def reset_state( self ):
for vol in self.volumes.values():
vol.reset_state()
self.state = 'clean'
def prepare_commit( self ):
if( self.state == 'clean' ):
return
assert self.state != 'prepared'
vols = self.volumes.values()
# Clean things up before we begin. We need to do this so that
# We can determine the volumes that changes as part of this
# commit
for vol in vols:
assert vol.get_state() != 'committed'
try:
# Try to commit all the dirty volumes
for vol in vols:
if( vol.get_state() == 'dirty' ):
vol.commit()
except:
# Something went wrong, rollback
for vol in vols:
if( vol.get_state() == 'committed' ):
vol.rollback()
raise
# Comitted
self.state = 'prepared'
def unprepare_commit( self ):
if( self.state == 'clean' ):
return
assert self.state == 'prepared'
vols = self.volumes.values()
for vol in vols:
assert vol.get_state() != 'dirty'
if( vol.get_state() == 'committed' ):
vol.rollback()
for vol in vols:
assert vol.get_state() != 'committed'
self.state = 'dirty'
def complete_commit( self ):
if( self.state == 'clean' ):
return
assert self.state == 'prepared'
vols = self.volumes.values()
for vol in vols:
if( vol.get_state() == 'committed' ):
vol.reset_state()
self.state = 'clean'
def commit( self ):
self.prepare_commit()
self.complete_commit()
def rollback( self ):
vols = self.volumes.values()
if( self.state == 'clean' ):
for vol in vols:
assert vol.get_state() == 'clean'
return
if( self.state == 'prepared' ):
self.unprepare_commit()
if( self.state == 'dirty' ):
for vol in vols:
assert vol.get_state() != 'committed'
if( vol.get_state() == 'dirty' ):
vol.rollback()
for vol in vols:
assert vol.get_state() == 'clean'
self.state = 'clean'
def load_data( self, path, id, priority, extension ):
if( self.state == 'committed' ):
# Clean things up before we begin. We need to do this so that
# We can determine the volumes that changes as part of this
# commit
self.reset_state()
self.state = 'dirty'
v = self.__get_vol_for_id( id )
v.load_data( path, id, priority, extension )
def delete( self, id, priority, extension ):
if( self.state == 'committed' ):
# Clean things up before we begin. We need to do this so that
# We can determine the volumes that changes as part of this
# commit
self.reset_state()
self.state = 'dirty'
v = self.__get_vol_for_id( id )
v.delete( id, priority, extension )
def read( self, id, priority, extension ):
v = self.__get_vol_for_id( id )
return v.read( id, priority, extension )
def _debug_write( self, id, priority, extension ):
v = self.__get_vol_for_id( id )
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
# Copyright (C) 2009, 2011, 2013 David Aguilar (davvid -at- gmail.com)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
"""Python library for serializing any arbitrary object graph into JSON.
jsonpickle can take almost any Python object and turn the object into JSON.
Additionally, it can reconstitute the object back into Python.
The object must be accessible globally via a module and must
inherit from object (AKA new-style classes).
Create an object::
class Thing(object):
def __init__(self, name):
self.name = name
obj = Thing('Awesome')
Use jsonpickle to transform the object into a JSON string::
import jsonpickle
frozen = jsonpickle.encode(obj)
Use jsonpickle to recreate a Python object from a JSON string::
thawed = jsonpickle.decode(frozen)
.. warning::
Loading a JSON string from an untrusted source represents a potential
security vulnerability. jsonpickle makes no attempt to sanitize the input.
The new object has the same type and data, but essentially is now a copy of
the original.
.. code-block:: python
assert obj.name == thawed.name
If you will never need to load (regenerate the Python class from JSON), you can
pass in the keyword unpicklable=False to prevent extra information from being
added to JSON::
oneway = jsonpickle.encode(obj, unpicklable=False)
result = jsonpickle.decode(oneway)
assert obj.name == result['name'] == 'Awesome'
"""
import sys, os
from music21 import common
sys.path.append(common.getSourceFilePath() + os.path.sep + 'ext')
from jsonpickle import pickler
from jsonpickle import unpickler
from jsonpickle.backend import JSONBackend
from jsonpickle.version import VERSION
# ensure built-in handlers are loaded
__import__('jsonpickle.handlers')
__all__ = ('encode', 'decode')
__version__ = VERSION
json = JSONBackend()
# Export specific JSONPluginMgr methods into the jsonpickle namespace
set_preferred_backend = json.set_preferred_backend
set_encoder_options = json.set_encoder_options
load_backend = json.load_backend
remove_backend = json.remove_backend
enable_fallthrough = json.enable_fallthrough
def encode(value,
unpicklable=True,
make_refs=True,
keys=False,
max_depth=None,
backend=None,
warn=False,
max_iter=None):
"""Return a JSON formatted representation of value, a Python object.
:param unpicklable: If set to False then the output will not contain the
in | formation necessary to turn the JSON data back into Python objects,
but a simpler JSON stream is produced.
:param max_depth: If set to a non-negative integer then jsonpickle will
not recurse deeper than 'max_depth' steps into the object. Anything
deeper than 'max_depth' is represented using a Python repr() of the
object.
:param make_refs: If set to False jsonpickle's referencing support is
disabled. Objects that are id()- | identical won't be preserved across
encode()/decode(), but the resulting JSON stream will be conceptually
simpler. jsonpickle detects cyclical objects and will break the cycle
by calling repr() instead of recursing when make_refs is set False.
:param keys: If set to True then jsonpickle will encode non-string
dictionary keys instead of coercing them into strings via `repr()`.
:param warn: If set to True then jsonpickle will warn when it
returns None for an object which it cannot pickle
(e.g. file descriptors).
:param max_iter: If set to a non-negative integer then jsonpickle will
consume at most `max_iter` items when pickling iterators.
>>> encode('my string')
'"my string"'
>>> encode(36)
'36'
>>> encode({'foo': True})
'{"foo": true}'
>>> encode({'foo': True}, max_depth=0)
'"{\\'foo\\': True}"'
>>> encode({'foo': True}, max_depth=1)
'{"foo": "True"}'
"""
if backend is None:
backend = json
return pickler.encode(value,
backend=backend,
unpicklable=unpicklable,
make_refs=make_refs,
keys=keys,
max_depth=max_depth,
warn=warn)
def decode(string, backend=None, keys=False):
"""Convert a JSON string into a Python object.
The keyword argument 'keys' defaults to False.
If set to True then jsonpickle will decode non-string dictionary keys
into python objects via the jsonpickle protocol.
>>> str(decode('"my string"'))
'my string'
>>> decode('36')
36
"""
if backend is None:
backend = json
return unpickler.decode(string, backend=backend, keys=keys)
# json.load(),loads(), dump(), dumps() compatibility
dumps = encode
loads = decode
|
import random
class intDict(object):
"""A dictionary with integer keys"""
def | __init__(self, numBuckets):
"""Create an empty dictionary"""
self.buckets = []
self.numBuckets = numBuckets
for i in range(numBuckets):
self.buckets.append([])
def add | Entry(self, dictKey, dictVal):
"""Assumes dictKey an int. Adds an entry."""
hashBucket = self.buckets[dictKey%self.numBuckets]
for i in range(len(hashBucket)):
if hashBucket[i][0] == dictKey:
hashBucket[i] = (dictKey, dictVal)
return
hashBucket.append((dictKey, dictVal))
def getValue(self, dictKey):
"""Assumes dictKey an int. Returns entry associated
with the key dictKey"""
hashBucket = self.buckets[dictKey%self.numBuckets]
for e in hashBucket:
if e[0] == dictKey:
return e[1]
return None
def __str__(self):
res = ''
for b in self.buckets:
for t in b:
res = res + str(t[0]) + ':' + str(t[1]) + ','
return '{' + res[:-1] + '}' #res[:-1] removes the last comma
D = intDict(29)
for i in range(29):
#choose a random int in range(10**5)
key = random.choice(range(10**5))
D.addEntry(key, i)
print '\n', 'The buckets are:'
for hashBucket in D.buckets: #violates abstraction barrier
print ' ', hashBucket
|
#!/usr/bin/env p | ython
from sys import argv
def calcRabbits(n,k):
pairs = [1, 1]
for i in range(2,n):
#try:
f1 = pairs[i-1]
f2 = pairs[i-2] * 3
pairs.append((f1+f2))
# except IndexError:
# pass
return pairs
if __name__ == "__main__":
try:
n = int(argv[1])
k = | int(argv[2])
print(calcRabbits(n,k))
except (IndexError, ValueError):
print("Usage: python fib.py <intN> <intK>")
|
from cms.models.pluginmod | el import CMSPlugin
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import gettext_lazy as _
from djan | go.utils.translation import get_language
from partners.models import Partner
class PartnersPlugin(CMSPluginBase):
name = _("Partners")
model = CMSPlugin
render_template = "partners/partners_plugin.html"
text_enabled = False
allow_children = False
def render(self, context, instance, placeholder):
language = get_language()
if language is None:
language = 'en'
partners = Partner.objects.filter(active=True).translated(language).order_by('translations__name').all()
context.update({
'partners': partners,
})
return context
plugin_pool.register_plugin(PartnersPlugin)
|
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.translation import ugettext
from django.contrib import messages
from django.contrib.admin.views.decorators import staff_member_required
from pinax.apps.account.utils import get_default_redirect, user_display
from pinax.apps.signup_codes.models import SignupCode
from pinax.apps.signup_codes.forms import SignupForm, InviteUserForm
def group_and_bridge(request):
"""
Given the request we can depend on the GroupMiddleware to provide the
group and bridge.
"""
# be group aware
group = getattr(request, "group", None)
if group:
bridge = request.bridge
else:
bridge = None
r | eturn group, bridge
def group_context(group, bridge):
# @@@ use bridge
ctx = {
"group": group,
}
if group:
ctx["group_base"] = bridge.group_base_te | mplate()
return ctx
def signup(request, **kwargs):
form_class = kwargs.pop("form_class", SignupForm)
template_name = kwargs.pop("template_name", "account/signup.html")
template_name_failure = kwargs.pop("template_name_failure", "signup_codes/failure.html")
success_url = kwargs.pop("success_url", None)
group, bridge = group_and_bridge(request)
ctx = group_context(group, bridge)
if success_url is None:
if hasattr(settings, "SIGNUP_REDIRECT_URLNAME"):
fallback_url = reverse(settings.SIGNUP_REDIRECT_URLNAME)
else:
if hasattr(settings, "LOGIN_REDIRECT_URLNAME"):
fallback_url = reverse(settings.LOGIN_REDIRECT_URLNAME)
else:
fallback_url = settings.LOGIN_REDIRECT_URL
success_url = get_default_redirect(request, fallback_url)
code = request.GET.get("code")
if request.method == "POST":
form = form_class(request.POST, group=group)
if form.is_valid():
user = form.save(request=request)
signup_code = form.cleaned_data["signup_code"]
if signup_code:
signup_code.use(user)
form.login(request, user)
messages.add_message(request, messages.SUCCESS,
ugettext("Successfully logged in as %(username)s.") % {
"username": user_display(user),
}
)
return HttpResponseRedirect(success_url)
else:
signup_code = SignupCode.check(code)
if signup_code:
initial = {
"signup_code": code,
"email": signup_code.email,
}
form = form_class(initial=initial, group=group)
else:
if not settings.ACCOUNT_OPEN_SIGNUP:
ctx.update({
"code": code,
})
ctx = RequestContext(request, ctx)
# if account signup is not open we want to fail when there is
# no sign up code or what was provided failed.
return render_to_response(template_name_failure, ctx)
else:
form = form_class(group=group)
ctx.update({
"code": code,
"form": form,
})
return render_to_response(template_name, RequestContext(request, ctx))
@staff_member_required
def admin_invite_user(request, **kwargs):
"""
This view, by default, works inside the Django admin.
"""
form_class = kwargs.pop("form_class", InviteUserForm)
template_name = kwargs.pop("template_name", "signup_codes/admin_invite_user.html")
group, bridge = group_and_bridge(request)
if request.method == "POST":
form = form_class(request.POST, group=group)
if form.is_valid():
email = form.cleaned_data["email"]
form.send_signup_code()
messages.add_message(request, messages.INFO,
ugettext("An email has been sent to %(email)s.") % {
"email": email
}
)
form = form_class() # reset
else:
form = form_class(group=group)
ctx = group_context(group, bridge)
ctx.update({
"title": ugettext("Invite user"),
"form": form,
})
return render_to_response(template_name, RequestContext(request, ctx))
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", | "sleeptomusicweb.settings")
from django.core.management import execute_from_command_line
execute_from_command_ | line(sys.argv)
|
import mimetypes
import unittest
from os import path
from django.conf.urls.static import static
from django.http import FileResponse, HttpResponseNotModified
from django.test import SimpleTestCase, override_settings
from django.utils.http import http_date
from django.views.static import was_modified_since
from .. import urls
from ..urls import media_dir
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
class StaticTests(SimpleTestCase):
"""Tests django views in django/views/static.py"""
prefix = 'site_media'
def test_serve(self):
"The static view can serve static media"
media_files = ['file.txt', 'file.txt.gz']
for filename in media_files:
response = self.client.get('/%s/%s' % (self.prefix, filename))
response_content = b''.join(response)
file_path = path.join(media_dir, filename)
with open(file_path, 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
self.assertEqual(mimetypes.guess_type(file_path)[1], response.get('Content-Encoding', None))
def test_chunked(self):
"The static view should stream files in chunks to avoid large memory usage"
response = self.client.get('/%s/%s' % (self.prefix, 'long-line.txt'))
first_chunk = next(response.streaming_content)
self.assertEqual(len(first_chunk), FileResponse.block_size)
second_chunk = next(response.streaming_content)
response.close()
# strip() to prevent OS line endings from causing differences
self.assertEqual(len(second_chunk.strip()), 1449)
def test_unknown_mime_type(self):
response = self.client.get('/%s/file.unknown' % self.prefix)
self.assertEqual('application/octet-stream', response['Content-Type'])
response.close()
def test_copes_with_empty_path_component(self):
file_name = 'file.txt'
response = self.client.get('/%s//%s' % (self.prefix, file_name))
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
def test_is_modified_since(self):
file_name = 'file.txt'
response = self.client.get(
'/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE='Thu, 1 Jan 1970 00:00:00 GMT'
)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
def test_not_modified_since(self):
file_name = 'file.txt'
response = self.client.get(
'/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE='Mon, 18 Jan 2038 05:14:07 GMT'
# This is 24h before max Unix time. Remember to fix Django and
# update this test well before 2038 :)
)
self.assertIsInstance(response, HttpResponseNotModified)
def test_invalid_if_modified_since(self):
"""Handle bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
2616, section 14.25.
"""
file_name = 'file.txt'
invalid_date = 'Mon, 28 May 999999999999 28:25:26 GMT'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE=invalid_date)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
def test_invalid_if_modified_since2(self):
"""Handle even more bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per R | FC
2616, section 14.25.
"""
file_name = 'file.txt'
invalid_date = ': 1291108438, Wed, 20 Oct 2010 14:05:00 GMT'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE=invalid_date)
response_content = b''. | join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
def test_404(self):
response = self.client.get('/%s/non_existing_resource' % self.prefix)
self.assertEqual(404, response.status_code)
def test_index(self):
response = self.client.get('/%s/' % self.prefix)
self.assertContains(response, 'Index of /')
class StaticHelperTest(StaticTests):
"""
Test case to make sure the static URL pattern helper works as expected
"""
def setUp(self):
super(StaticHelperTest, self).setUp()
self._old_views_urlpatterns = urls.urlpatterns[:]
urls.urlpatterns += static('/media/', document_root=media_dir)
def tearDown(self):
super(StaticHelperTest, self).tearDown()
urls.urlpatterns = self._old_views_urlpatterns
class StaticUtilsTests(unittest.TestCase):
def test_was_modified_since_fp(self):
"""
A floating point mtime does not disturb was_modified_since (#18675).
"""
mtime = 1343416141.107817
header = http_date(mtime)
self.assertFalse(was_modified_since(header, mtime))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Triangle Project Code.
# Triangle analyzes the lengths of the sides of a triangle
# (represented by a, b and c) | and returns the type of triangle.
#
# It returns:
# 'equilateral' if all sides are equal
# 'isosceles' if exactly 2 sides are equal
# 'scalene' if no si | des are equal
#
# The tests for this method can be found in
# about_triangle_project.py
# and
# about_triangle_project_2.py
#
def triangle(a, b, c):
if a <=0 or b <= 0 or c <= 0:
raise TriangleError(f"Non-positive value passed for sides:{a},{b},{c}")
sum1 = a + b
sum2 = a + c
sum3 = b + c
if sum1 <= c or sum2 <= b or sum3 <= a:
raise TriangleError("Sum of any two sides must be greater than third one.")
if a == b == c:
return 'equilateral'
if a == b or b == c or a == c:
return 'isosceles'
return 'scalene'
# Error class used in part 2. No need to change this code.
class TriangleError(Exception):
pass
|
#!/usr/bin/python
#! -*- coding:utf-8 -*-
from sqlalchemy import Column, Integer, String
from database import Base
class Message(Base):
__tablename__ = 'message'
MessageId = Column(Integer, primary_key=True)
DeviceId = Column(String(50))
MessageBody = Column(String(1000))
MessageType = Column(Integer)
CreatedTime = Column(String(50))
def __init__(self, json):
self.DeviceId = json["DeviceId"]
self.CreatedTime = json["CreatedTime"]
self.MessageType = json["MessageType"]
self.MessageBody = json["MessageBody"]
def get_json(self):
return {
"MessageId":self.MessageId,
"DeviceId":self.DeviceId,
"CreatedTime":self.CreatedTime,
"MessageType":self.MessageType,
"MessageBody":self.MessageBody
}
def __repr__(self):
return repr(self.get_json())
class UserInfo(Base):
__tablename__ = 'userinfo'
DeviceId = Column(String(50), primary_key=True)
UseTimes = | Column(Integer)
LastUseTime = Column(String(50))
def __init__(self, json):
self.DeviceId = json["DeviceId"]
self.UseTimes = json["UseTimes"]
self.LastUseTime = json["LastUseTime"]
def get_json(self):
| return {
"DeviceId":self.DeviceId,
"UseTimes":self.UseTimes,
"LastUseTime":self.LastUseTime
}
def __repr__(self):
return repr(self.get_json())
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/lic | enses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class OneDeviceStrategy implementing DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib.distribute.python import values
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.training import distribute as distribute_lib
# TODO(josh11b): Replace asserts in this file with if ...: raise ...
class OneDeviceStrategy(distribute_lib.DistributionStrategy):
"""A distribution strategy for running on a single device."""
# TODO(josh11b): Do we wrap values in types to generate errors if you are
# doing something that won't work with other DistributionStrategy
# implementations?
def __init__(self, device, prefetch_on_device=None):
super(OneDeviceStrategy, self).__init__()
self._device = device
self._prefetch_on_device = prefetch_on_device
self._default_device = device
def _create_variable(self, next_creator, *args, **kwargs):
# No need to distinguish tower-local variables when not mirroring,
# we just enforce that they are not trainable.
if kwargs.pop("tower_local_reduce_method", None) is not None:
kwargs["trainable"] = False
colocate_with = kwargs.pop("colocate_with", None)
if colocate_with is None:
with ops.device(self._device):
return next_creator(*args, **kwargs)
if isinstance(colocate_with, six.string_types):
with ops.device(colocate_with):
return next_creator(*args, **kwargs)
if (isinstance(colocate_with, list) and len(colocate_with) == 1 and
isinstance(colocate_with[0], six.string_types)):
with ops.device(colocate_with[0]):
return next_creator(*args, **kwargs)
with ops.colocate_with(colocate_with):
return next_creator(*args, **kwargs)
def distribute_dataset(self, dataset_fn):
return values.PerDeviceDataset(
self._call_dataset_fn(dataset_fn), [self._device],
self._prefetch_on_device)
def _broadcast(self, tensor, destinations):
return tensor
def _call_for_each_tower(self, fn, *args, **kwargs):
# We don't run `fn` in multiple threads in OneDeviceStrategy.
kwargs.pop("run_concurrently", None)
with ops.device(self._device), _OneDeviceTowerContext(self):
return fn(*args, **kwargs)
def map(self, map_over, fn, *args, **kwargs):
with ops.device(self._device):
return values.MapOutput([fn(m, *args, **kwargs) for m in map_over])
def _reduce(self, method_string, value, destinations):
if not isinstance(value, values.MapOutput):
return value
l = value.get()
assert l
with ops.device(self._device):
if method_string == "sum":
return math_ops.add_n(l)
elif method_string == "mean":
return math_ops.add_n(l) / len(l)
else:
assert False
def _update(self, var, fn, *args, **kwargs):
with ops.device(self._device), distribute_lib.UpdateContext(self._device):
return fn(var, *args, **kwargs)
def _update_non_slot(self, colocate_with, fn, *args, **kwargs):
del colocate_with
with ops.device(self._device), distribute_lib.UpdateContext(self._device):
return fn(*args, **kwargs)
def _fetch(self, val, destination, fn):
"""Return a copy of `val` or `fn(val)` on `destination`."""
with ops.device(self._device):
v = fn(val)
with ops.device(destination):
return array_ops.identity(v)
def _unwrap(self, value):
return [value]
@property
def is_single_tower(self):
return True
@property
def num_towers(self):
return 1
@property
def worker_devices(self):
return [self._device]
@property
def parameter_devices(self):
return [self._device]
def non_slot_devices(self, var_list):
del var_list
return [self._device]
def _worker_device_index(self):
return 0
class _OneDeviceTowerContext(distribute_lib.TowerContext):
def __init__(self, distribution_strategy):
distribute_lib.TowerContext.__init__(
self, distribution_strategy, tower_id=0)
@property
def device(self):
return self._distribution_strategy.worker_devices[0]
|
"""
A HTML5 target.
"""
from targets import _
from html import TYPE
import html
NAME = _('HTML5 page')
EXTENSION = 'html'
HEADER = """\
<!DOCTYPE html>
<html>
<head>
<meta charset="%(ENCODING)s">
<title>%(HEADER1)s</title>
<meta name="generator" content="http://txt2tags.org">
<link rel="stylesheet" href="%(STYLE)s">
<style>
body{background-color:#fff;color:#000;}
hr{background-color:#000;border:0;color:#000;}
hr.heavy{height:5px;}
hr.light{height:1px;}
img{border:0;display:block;}
img.right{margin:0 0 0 auto;}
img.center{border:0;margin:0 auto;}
table th,table td{padding:4px;}
.center,header{text-align:center;}
table.center {margin-left:auto; margin-right:auto;}
.right{text-align:right;}
.left{text-align:left;}
.tableborder,.tableborder td,.tableborder th{border:1px solid #000;}
.underline{text-decoration:underline;}
</style>
</head>
<body>
<header>
<hgroup>
<h1>%(HEADER1)s</h1>
<h2>%(HEADER2)s</h2>
<h3>%(HEADER3)s</h3>
</hgroup>
</header>
<article>
"""
HEADERCSS = """\
<!DOCTYPE html>
<html>
<head>
<meta charset="%(ENCODING)s">
<title>%(HEADER1)s</title>
<meta name="generator" content="http://txt2tags.org">
<link rel="stylesheet" href="%(STYLE)s">
</head>
<body>
<header>
<hgroup>
<h1>%(HEADER1)s</h1>
<h2>%(HEADER2)s</h2>
<h3>%(HEADER3)s</h3>
</hgroup>
</header>
<article>
"""
TAGS = html.TAGS.copy()
for tag in TAGS:
TAGS[tag] = TAGS[tag].lower()
HTML5TAGS = {
'title1Open' : '<section~A~>\n<h1>\a</h1>' ,
'title1Close' : '</section>' ,
'title2Open' : '<section~A~>\n<h2>\a</h2>' ,
'title2Close' : '</section>' ,
'title3Open' : '<section~A~>\n<h3>\a</h3>' ,
'title3Close' : '</section>' ,
'title4Open' : '<section~A~>\n<h4>\a</h4>' ,
'title4Close' : '</section>' ,
'title5Open' : '<section~A~>\n<h5>\a</h5>' ,
'title5Close' : '</section>' ,
'fontBoldOpen' : '<strong>' ,
'fontBoldClose' : '</strong>' ,
'fontItalicOpen' : '<em>' ,
'fontItalicClo | se' : '</em>' ,
'fontUnderlineOpen' : '<span class="underline">',
'fontUnderlineClose' : '</span>' ,
'fontStrikeOpen' : '<del>' ,
'fontStrikeClose' : '</del>' ,
'listItemClose' : '</li>' ,
'numlistItemClose' : '</li>' ,
| 'deflistItem2Close' : '</dd>' ,
'bar1' : '<hr class="light">' ,
'bar2' : '<hr class="heavy">' ,
'img' : '<img~a~ src="\a" alt="">' ,
'imgEmbed' : '<img~a~ src="\a" alt="">' ,
'_imgAlignLeft' : ' class="left"' ,
'_imgAlignCenter' : ' class="center"',
'_imgAlignRight' : ' class="right"' ,
'tableOpen' : '<table~a~~b~>' ,
'_tableBorder' : ' class="tableborder"' ,
'_tableAlignCenter' : ' style="margin-left: auto; margin-right: auto;"',
'_tableCellAlignRight' : ' class="right"' ,
'_tableCellAlignCenter': ' class="center"',
'cssOpen' : '<style>' ,
'tocOpen' : '<nav>' ,
'tocClose' : '</nav>' ,
'EOD' : '</article></body></html>'
}
TAGS.update(HTML5TAGS)
RULES = html.RULES.copy()
#Update the rules to use explicit <section> </section> tags
HTML5RULES = {
'titleblocks' : 1,
}
RULES.update(HTML5RULES)
|
class Solution(object):
def distributeCandies(self, candies):
"""
:type candies: List[int]
:rtype: int
"""
result = 0
kind = list(set(candies))
if len(kind) > len(candies)/2:
result = len(candies)/2
else:
| result = len(kind)
| return result
|
import numpy
from chainer import cuda, Function
def _cu_conv_sum(y, x, n):
# Convolutional sum
# TODO(beam2d): Use scan computation
rdim = x.size / (x.shape[0] * x.shape[1])
cuda.elementwise(
'float* y, const float* x, int rdim, int N, int n_',
'''
int half_n = n_ / 2;
int offset = i / rdim * N * rdim + i % rdim;
float* xi = x + offset;
float* yi = y + offset;
float sum_part = 0;
for (int j = 0; j < N + half_n; ++j) {
if (j < N) {
sum_part += xi[j * rdim];
}
if (j >= n_) {
sum_part -= xi[(j - n_) * rdim];
}
if (j >= half_n) {
yi[(j - half_n) * rdim] = sum_part;
}
}
''', 'lrn_conv_sum')(y, x, rdim, x.shape[1], n,
range=slice(0, x.shape[0] * rdim, 1))
class LocalResponseNormalization(Function):
"""Cross-channel normalization function used in AlexNet."""
def __init__(self, n=5, k=2, alpha=1e-4, beta=.75):
self.n = n
self.k = k
self.alpha = alpha
self.beta = beta
def forward_cpu(self, x):
half_n = self.n / 2
x2 = x[0] * x[0]
sum_part = x2.copy()
for i in xrange(1, half_n + 1):
sum_part[:, i: ] += x2[:, :-i]
sum_part[:, :-i] += x2[:, i: ]
self.unit_scale = self.k + self.alpha * sum_part
self.scale = self.unit_scale ** -self.beta
self.y = x[0] * self.scale
return self.y,
def backward_cpu(self, x, gy):
half_n = self.n / 2
summand = self.y * gy[0] / self.unit_scale
sum_part = summand.copy()
for i in xrange(1, half_n + 1):
sum_part[:, i: ] += summand[:, :-i]
sum_part[:, :-i] += summand[:, i: ]
gx = gy[0] * self.scale - 2 * self.alpha * self.beta * x[0] * sum_part
return gx,
def forward_gpu(self, x):
self.y = x[0] * x[0] # temporary
self.scale = cuda.empty_like(self.y)
_cu_conv_sum(self.scale, self.y, self.n)
cuda.elementwise(
'''float* y, float* scale, const float* x,
float k, float alpha, float beta''',
'''scale[i] = k + alpha * scale[i];
y[i] = x[i] * __powf(scale[i], -beta);''',
'lrn_fwd')(self.y, self.scale, x[0], self.k, self.alpha, self.beta)
return self.y,
def backward_gpu(self, x, gy):
summand = cuda.empty_like(x[0])
cuda.elementwise(
'''float* summand, const float* scale, const float* y,
const float* gy''',
'summand[i] = y[i] * gy[i] / scale[i]',
'lrn_bwd_summand')(summand, self.scale, self.y, gy[0])
gx = cuda.empty_like(x[0])
_cu_conv_sum(gx, summand, self.n)
cuda.elementwise(
'''float* gx, const float* x, const float* gy, const float* scale,
float beta, float coeff''',
'gx[i] = __powf(scale[i], -beta) * gy[i] - coeff * x[i] * gx[i]',
'lrn_bwd' | )(gx, x[0], gy[0], self.scale, self.beta,
2 * self.alpha * self.beta)
return gx,
def local_response_normalization(x, n=5, k=2, alpha=1e-4, bet | a=.75):
"""Local response normalization across neighboring channels.
This function implements normalization across channels. Let :math:`x` an
input image with :math:`N` channels. Then, this function computes an output
image :math:`y` by following formula:
.. math::
y_i = {x_i \\over \\left( k + \\
\\alpha \\sum_{j=\\max{1, i - n/2}}^{\\min{N, i + n/2}} \\
x_j^2 \\right)^\\beta}.
Args:
x (Variable): Input variable.
n (int): Normalization window width.
k (float): Smoothing parameter.
alpha (float): Normalizer scaling parameter.
beta (float): Normalizer power parameter.
Returns:
Variable: Output variable.
See: SSec. 3.3 of `ImageNet Classification with Deep Convolutional Neural \\
Networks <http://www.cs.toronto.edu/~fritz/absps/imagenet.pdf>`_
"""
return LocalResponseNormalization(n, k, alpha, beta)(x)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.