prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from model import FreebieItem, Distributor, Contributor
import datetime
import logging
head = '''
<html>
<head>
<title>%s</title>
<script src="/static/sorttable.js"></script>
<style>
body {
background-color: #000000;
color: #FFFFFF;
}
input {
background-color: #000000;
color: #FF0000;
outline-color: #000000;
border-color: #FF0000;
}
table.sortable thead {
background-color:#202020;
color:#FFFFFF;
font-weight: bold;
cursor: default;
}
</style>
</head>
<body>
<b><a href="/freebielist/">Freebies</a> | <a href="/freebielist/distributors">Distributors</a> | <a href="/freebielist/contributors">Contributors</a></b><p>
'''
end = '''
</body>
</html>
'''
class Distributors(webapp.RequestHandler):
def get(self):
message = '''<h1>List of Distributors</h1>
<p>This lists all Distributors currently in the distribution system as of %s.</p>
<table class="sortable" border=\"1\">''' % datetime.datetime.utcnow().isoformat(' ')
message += '<tr><th>Row</th><th>Distributor</th><th>Key</th></tr><br />\n'
query = Distributor.gql("")
dists = []
for record in query:
s = '<td>%s</td><td>%s</td>\n' % (record.avname, record.avkey)
if (s in dists) == False:
dists += [s]
for i in range(0,len(dists)):
message += '<tr><td>%d</td>%s' % (i+1, dists[i])
message += "</table>"
self.response.out.write((head % 'Distributor List') + message + end)
class Contributors(webapp.RequestHandler):
def get(self):
message = '''<h1>List of Contributors</h1>
<p>This lists all Contributors currently in the distribution system as of %s.</p>
<table class="sortable" border=\"1\">''' % datetime.datetime.utcnow().isoformat(' ')
message += '<tr><th>Row</th><th>Contributor</th><th>Key</th></tr><br />\n'
query = Contributor.gql("")
dists = []
for record in query:
s = '<td>%s</td><td>%s</td>\n' % (record.avname, record.avkey)
if (s in dists) == False:
dists += [s]
for i in range(0,len(dists)):
message += '<tr><td>%d</td>%s' % (i+1, dists[i])
message += "</table>"
self.response.out.write((head % 'Contributor List') + message + end)
class MainPage(webapp.RequestHandler):
def get(self):
message = '''<h1>List of Freebie items</h1>
<p>This lists all item currently in the distribution system as of %s.</p>
<table class="sortable" border=\"1\">''' % datetime.datet | ime.utcnow().isoformat(' ')
message += '<tr><th>Row</th><th>Owner</th><th>Giver ID</th><th>Name</th><th>Version</th><th>Update Date</th><th>Distributor Locatio | n</th><th>Texture Key</th><th>Texture Server</th><th>Texture Updatetime</th></tr><br />\n'
query = FreebieItem.gql("")
content =[]
for record in query:
owner = record.freebie_owner
if (owner == None):
owner = '***Not assigned***'
if (record.freebie_texture_update == None):
i = -1
else:
i = record.freebie_texture_update
content += ['<td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%d</td>\n' % (owner, record.freebie_giver, record.freebie_name, record.freebie_version, record.freebie_timedate, record.freebie_location, record.freebie_texture_key, record.freebie_texture_serverkey, i)]
content = sorted(content)
for i in range(0,len(content)):
message += '<tr><td>%d</td>%s' % (i+1, content[i])
message += "</table>"
self.response.out.write((head % 'Freebie Items List') + message + end)
application = webapp.WSGIApplication([
(r'/.*?/distributors',Distributors),
(r'/.*?/contributors',Contributors),
('.*', MainPage)
],
debug=True)
def real_main():
run_wsgi_app(application)
def profile_main():
# This is the main function for profiling
# We've renamed our original main() above to real_main()
import cProfile, pstats, StringIO
prof = cProfile.Profile()
prof = prof.runctx("real_main()", globals(), locals())
stream = StringIO.StringIO()
stats = pstats.Stats(prof, stream=stream)
stats.sort_stats("time") # Or cumulative
stats.print_stats(80) # 80 = how many to print
# The rest is optional.
# stats.print_callees()
# stats.print_callers()
logging.info("Profile data:\n%s", stream.getvalue())
if __name__ == "__main__":
profile_main() |
from PyQt5.Q | tDesigner import * | |
# no instance_key attached to it), and another instance
# with the same identity key already exists as persistent.
# convert to an UPDATE if so.
if not has_identity and \
instance_key in uowtransaction.session.identity_map:
instance = \
uowtransaction.session.identity_map[instance_key]
existing = attributes.instance_state(instance)
if not uowtransaction.was_already_deleted(existing):
if not uowtransaction.is_deleted(existing):
raise orm_exc.FlushError(
"New instance %s with identity key %s conflicts "
"with persistent instance %s" %
(state_str(state), instance_key,
| state_str(existing)))
base_mapper._log_debug(
"detected row switch for identity %s. "
"will update %s, remove %s from "
"transaction", instance_key,
state_str(state), state_str(existing))
# remove the "delete" flag from the existing element
| uowtransaction.remove_state_actions(existing)
row_switch = existing
if (has_identity or row_switch) and mapper.version_id_col is not None:
update_version_id = mapper._get_committed_state_attr_by_column(
row_switch if row_switch else state,
row_switch.dict if row_switch else dict_,
mapper.version_id_col)
yield (state, dict_, mapper, connection,
has_identity, row_switch, update_version_id)
def _organize_states_for_post_update(base_mapper, states,
uowtransaction):
"""Make an initial pass across a set of states for UPDATE
corresponding to post_update.
This includes obtaining key information for each state
including its dictionary, mapper, the connection to use for
the execution per state.
"""
return _connections_for_states(base_mapper, uowtransaction, states)
def _organize_states_for_delete(base_mapper, states, uowtransaction):
"""Make an initial pass across a set of states for DELETE.
This includes calling out before_delete and obtaining
key information for each state including its dictionary,
mapper, the connection to use for the execution per state.
"""
for state, dict_, mapper, connection in _connections_for_states(
base_mapper, uowtransaction,
states):
mapper.dispatch.before_delete(mapper, connection, state)
if mapper.version_id_col is not None:
update_version_id = \
mapper._get_committed_state_attr_by_column(
state, dict_,
mapper.version_id_col)
else:
update_version_id = None
yield (
state, dict_, mapper, connection, update_version_id)
def _collect_insert_commands(
table, states_to_insert,
bulk=False, return_defaults=False):
"""Identify sets of values to use in INSERT statements for a
list of states.
"""
for state, state_dict, mapper, connection in states_to_insert:
if table not in mapper._pks_by_table:
continue
params = {}
value_params = {}
propkey_to_col = mapper._propkey_to_col[table]
eval_none = mapper._insert_cols_evaluating_none[table]
for propkey in set(propkey_to_col).intersection(state_dict):
value = state_dict[propkey]
col = propkey_to_col[propkey]
if value is None and propkey not in eval_none:
continue
elif not bulk and isinstance(value, sql.ClauseElement):
value_params[col.key] = value
else:
params[col.key] = value
if not bulk:
for colkey in mapper._insert_cols_as_none[table].\
difference(params).difference(value_params):
params[colkey] = None
if not bulk or return_defaults:
has_all_pks = mapper._pk_keys_by_table[table].issubset(params)
if mapper.base_mapper.eager_defaults:
has_all_defaults = mapper._server_default_cols[table].\
issubset(params)
else:
has_all_defaults = True
else:
has_all_defaults = has_all_pks = True
if mapper.version_id_generator is not False \
and mapper.version_id_col is not None and \
mapper.version_id_col in mapper._cols_by_table[table]:
params[mapper.version_id_col.key] = \
mapper.version_id_generator(None)
yield (
state, state_dict, params, mapper,
connection, value_params, has_all_pks,
has_all_defaults)
def _collect_update_commands(
uowtransaction, table, states_to_update,
bulk=False):
"""Identify sets of values to use in UPDATE statements for a
list of states.
This function works intricately with the history system
to determine exactly what values should be updated
as well as how the row should be matched within an UPDATE
statement. Includes some tricky scenarios where the primary
key of an object might have been changed.
"""
for state, state_dict, mapper, connection, \
update_version_id in states_to_update:
if table not in mapper._pks_by_table:
continue
pks = mapper._pks_by_table[table]
value_params = {}
propkey_to_col = mapper._propkey_to_col[table]
if bulk:
params = dict(
(propkey_to_col[propkey].key, state_dict[propkey])
for propkey in
set(propkey_to_col).intersection(state_dict).difference(
mapper._pk_keys_by_table[table])
)
has_all_defaults = True
else:
params = {}
for propkey in set(propkey_to_col).intersection(
state.committed_state):
value = state_dict[propkey]
col = propkey_to_col[propkey]
if isinstance(value, sql.ClauseElement):
value_params[col] = value
# guard against values that generate non-__nonzero__
# objects for __eq__()
elif state.manager[propkey].impl.is_equal(
value, state.committed_state[propkey]) is not True:
params[col.key] = value
if mapper.base_mapper.eager_defaults:
has_all_defaults = mapper._server_onupdate_default_cols[table].\
issubset(params)
else:
has_all_defaults = True
if update_version_id is not None and \
mapper.version_id_col in mapper._cols_by_table[table]:
if not bulk and not (params or value_params):
# HACK: check for history in other tables, in case the
# history is only in a different table than the one
# where the version_id_col is. This logic was lost
# from 0.9 -> 1.0.0 and restored in 1.0.6.
for prop in mapper._columntoproperty.values():
history = (
state.manager[prop.key].impl.get_history(
state, state_dict,
attributes.PASSIVE_NO_INITIALIZE))
if history.added:
break
else:
# no net change, break
continue
col = mapper.version_id_col
params[col._label] = update_version_id
if (bulk or col.key not in params) and \
mapper.version_id_generator is not False:
val = mapper.version_id_generator(update_version_id)
params[col.key] = val
elif not (params or value_params):
continue
if bulk:
pk_params = dict(
|
#!/usr/bin/env python
import yaml
import pwd
import sys
import subprocess
import json
import os
__author__ = "Anoop P Alias"
__copyright__ = "Copyright Anoop P Alias"
__license__ = "GPL"
__email__ = "anoopalias01@gmail.com"
installation_path = "/opt/nDeploy" # Absolute Installation Path
if __name__ == "__main__":
# This script is mostly intended to be called from a cronjob
conf_list = os.listdir("/opt/nDeploy/hhvm.d")
for filename in conf_list:
cpaneluser, extension = filename.split('.')
# if user is not in /etc/passwd we dont proceed any further
try:
pwd.getpwnam(cpaneluser)
except KeyError:
sys.exit(0)
else:
# Update the userdata cache
subprocess.Popen(['/scripts/updateuserdatacache', '--force', cpaneluser], shell=True)
# Try loading the main userdata cache file
cpuserdatajson = "/var/cpanel/userdata/" + cpaneluser + "/main.cache"
with open(cpuserdatajson) as cpaneluser_data_stream:
json_parsed_cpaneluser = json.load(cpaneluser_data_stream)
main_domain = json_parsed_cpaneluser.get('main_domain')
# parked_domains = yaml_parsed_cpaneluser.get('parked_domains') # This data is irrelevant as parked domain list is in ServerAlias
# addon_domains_dict = json_parsed_cpaneluser.get('addon_domains') # So we know which addon is mapped to which sub-domain
sub_domains = json_parsed_cpan | eluser.get('sub_domains')
# Since we have all domains now..check XtendWeb domain-data files for HHVM enabled
# Turn off HHVM if no domain using HHVM
hhvm_flag = False
with open(installation_path + "/domain-data/" + main_domain, 'r') as domain_data_stream:
yaml_parsed_domain_data = yaml.safe_load(domain_data_stream)
ba | ckend_category = yaml_parsed_domain_data.get('backend_category', None)
if backend_category == 'HHVM':
hhvm_flag = True
for the_sub_domain in sub_domains:
if the_sub_domain.startswith("*"):
subdom_config_dom = "_wildcard_."+the_sub_domain.replace('*.', '')
else:
subdom_config_dom = the_sub_domain
with open(installation_path + "/domain-data/" + subdom_config_dom, 'r') as domain_data_stream:
yaml_parsed_domain_data = yaml.safe_load(domain_data_stream)
backend_category = yaml_parsed_domain_data.get('backend_category', None)
if backend_category == 'HHVM':
hhvm_flag = True
if hhvm_flag is False:
# This means none of the domain has HHVM enabled and we can shut down HHVM for the user
subprocess.call(['systemctl', 'stop', 'ndeploy_hhvm@'+cpaneluser+'.service'])
subprocess.call(['systemctl', 'disable', 'ndeploy_hhvm@'+cpaneluser+'.service'])
if os.path.isfile(installation_path+"/conf/ndeploy_cluster.yaml"):
subprocess.call('ansible -i /opt/nDeploy/conf/nDeploy-cluster/hosts ndeployslaves -m systemd -a "name=ndeploy_hhvm@'+cpaneluser+'.service state=stopped enabled=no"', shell=True)
|
class Corpus:
""" | Interface for corpus
"""
def __i | nit__(self):
pass
|
ptr = self.malloc(S)
old1_ptr.someInt = 900
self.stackroots.append(old1_ptr)
old2_ptr = self.malloc(S)
old2_ptr.someInt = 800
self.stackroots.append(old2_ptr)
pinned_ptr = self.malloc(T)
pinned_ptr.someInt = 100
assert self.gc.pin(llmemory.cast_ptr_to_adr(pinned_ptr))
self.write(old1_ptr, 'next', pinned_ptr)
self.write(old1_ptr, 'data', pinned_ptr)
self.write(old2_ptr, 'next', pinned_ptr)
self.write(old2_ptr, 'data', pinned_ptr)
self.gc.collect()
old1_ptr = self.stackroots[0]
old2_ptr = self.stackroots[1]
assert not self.gc.is_in_nursery(llmemory.cast_ptr_to_adr(old1_ptr))
assert not self.gc.is_in_nursery(llmemory.cast_ptr_to_adr(old2_ptr))
# do multiple rounds to make sure
for _ in range(10):
assert self.gc.old_objects_pointing_to_pinned.length() == 2
self.gc.debug_gc_step()
def pin_shadow_1(self, collect_func):
ptr = self.malloc(T)
| adr = llmemory.cast_ptr_to_adr(ptr)
self.stackroots.append(ptr)
ptr.someInt = 100
assert self.gc.pin(adr)
self.gc.id(ptr) # allocate shadow
collect_func()
assert self.gc.is_in_nursery(adr)
assert ptr.someInt == 100
self.gc.unpin(adr)
collect_func() # move to shadow
adr = llmemory.cast_ptr_to_adr(self.stackroots[0])
assert not self.gc.is_in_nursery(a | dr)
def test_pin_shadow_1_minor_collection(self):
self.pin_shadow_1(self.gc.minor_collection)
def test_pin_shadow_1_major_collection(self):
self.pin_shadow_1(self.gc.collect)
def test_malloc_different_types(self):
# scenario: malloc two objects of different type and pin them. Do a
# minor and major collection in between. This test showed a bug that was
# present in a previous implementation of pinning.
obj1 = self.malloc(T)
self.stackroots.append(obj1)
assert self.gc.pin(llmemory.cast_ptr_to_adr(obj1))
#
self.gc.collect()
#
obj2 = self.malloc(T)
self.stackroots.append(obj2)
assert self.gc.pin(llmemory.cast_ptr_to_adr(obj2))
def test_objects_to_trace_bug(self):
# scenario: In a previous implementation there was a bug because of a
# dead pointer inside 'objects_to_trace'. This was caused by the first
# major collection step that added the pointer to the list and right
# after the collection step the object is unpinned and freed by the minor
# collection, leaving a dead pointer in the list.
pinned_ptr = self.malloc(T)
pinned_ptr.someInt = 101
self.stackroots.append(pinned_ptr)
pinned_adr = llmemory.cast_ptr_to_adr(pinned_ptr)
assert self.gc.pin(pinned_adr)
self.gc.debug_gc_step()
self.gc.unpin(pinned_adr)
self.gc.debug_gc_step()
def pin_shadow_2(self, collect_func):
ptr = self.malloc(T)
adr = llmemory.cast_ptr_to_adr(ptr)
self.stackroots.append(ptr)
ptr.someInt = 100
assert self.gc.pin(adr)
self.gc.identityhash(ptr) # allocate shadow
collect_func()
assert self.gc.is_in_nursery(adr)
assert ptr.someInt == 100
self.gc.unpin(adr)
collect_func() # move to shadow
adr = llmemory.cast_ptr_to_adr(self.stackroots[0])
assert not self.gc.is_in_nursery(adr)
def test_pin_shadow_2_minor_collection(self):
self.pin_shadow_2(self.gc.minor_collection)
def test_pin_shadow_2_major_collection(self):
self.pin_shadow_2(self.gc.collect)
def test_pin_nursery_top_scenario1(self):
ptr1 = self.malloc(T)
adr1 = llmemory.cast_ptr_to_adr(ptr1)
ptr1.someInt = 101
self.stackroots.append(ptr1)
assert self.gc.pin(adr1)
ptr2 = self.malloc(T)
adr2 = llmemory.cast_ptr_to_adr(ptr2)
ptr2.someInt = 102
self.stackroots.append(ptr2)
assert self.gc.pin(adr2)
ptr3 = self.malloc(T)
adr3 = llmemory.cast_ptr_to_adr(ptr3)
ptr3.someInt = 103
self.stackroots.append(ptr3)
assert self.gc.pin(adr3)
# scenario: no minor collection happened, only three mallocs
# and pins
#
# +- nursery
# |
# v
# +--------+--------+--------+---------------------...---+
# | pinned | pinned | pinned | empty |
# +--------+--------+--------+---------------------...---+
# ^ ^
# | |
# nursery_free -+ |
# nursery_top -+
#
assert adr3 < self.gc.nursery_free
assert self.gc.nursery_free < self.gc.nursery_top
def test_pin_nursery_top_scenario2(self):
ptr1 = self.malloc(T)
adr1 = llmemory.cast_ptr_to_adr(ptr1)
ptr1.someInt = 101
self.stackroots.append(ptr1)
assert self.gc.pin(adr1)
ptr2 = self.malloc(T)
adr2 = llmemory.cast_ptr_to_adr(ptr2)
ptr2.someInt = 102
self.stackroots.append(ptr2)
assert self.gc.pin(adr2)
ptr3 = self.malloc(T)
adr3 = llmemory.cast_ptr_to_adr(ptr3)
ptr3.someInt = 103
self.stackroots.append(ptr3)
assert self.gc.pin(adr3)
# scenario: after first GC minor collection
#
# +- nursery
# |
# v
# +--------+--------+--------+---------------------...---+
# | pinned | pinned | pinned | empty |
# +--------+--------+--------+---------------------...---+
# ^
# |
# +- nursery_free
# +- nursery_top
#
self.gc.collect()
assert self.gc.nursery_free == self.gc.nursery_top
assert self.gc.nursery_top == self.gc.nursery
assert self.gc.nursery_top < adr3
def test_pin_nursery_top_scenario3(self):
ptr1 = self.malloc(T)
adr1 = llmemory.cast_ptr_to_adr(ptr1)
ptr1.someInt = 101
self.stackroots.append(ptr1)
assert self.gc.pin(adr1)
ptr2 = self.malloc(T)
adr2 = llmemory.cast_ptr_to_adr(ptr2)
ptr2.someInt = 102
self.stackroots.append(ptr2)
assert self.gc.pin(adr2)
ptr3 = self.malloc(T)
adr3 = llmemory.cast_ptr_to_adr(ptr3)
ptr3.someInt = 103
self.stackroots.append(ptr3)
assert self.gc.pin(adr3)
# scenario: after unpinning first object and a minor
# collection
#
# +- nursery
# |
# v
# +--------+--------+--------+---------------------...---+
# | empty | pinned | pinned | empty |
# +--------+--------+--------+---------------------...---+
# ^ ^
# | |
# | +- nursery_top
# +- nursery_free
#
self.gc.unpin(adr1)
self.gc.collect()
assert self.gc.nursery_free == self.gc.nursery
assert self.gc.nursery_top > self.gc.nursery_free
assert self.gc.nursery_top < adr2
def test_pin_nursery_top_scenario4(self):
ptr1 = self.malloc(T)
adr1 = llmemory.cast_ptr_to_adr(ptr1)
ptr1.someInt = 101
self.stackroots.append(ptr1)
assert self.gc.pin(adr1)
ptr2 = self.malloc(T)
adr2 = llmemory.cast_ptr_to_adr(ptr2)
ptr2.someInt = 102
self.stackroots.append(ptr2)
assert self.gc.pin(adr2)
ptr3 = self.malloc(T)
adr3 = llmemory.cast_ptr_to_adr(ptr3)
ptr3.someInt = 103
self.stackroots.append(ptr3)
assert self.gc.pin(adr3)
# scenario: after unpinning first & second object and a minor
# collection
#
# +- nursery
# |
# v
# +-----------------+-------- |
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
log = CPLog(__name__)
class Automation(Plugin):
def __init__(self):
addEvent('app.load', self.setCrons)
if not Env.get('dev'):
addEvent('app.load', self.addMovies)
addEvent('setting.save.automation.hour.after', self.setCrons)
def setCrons(self):
fireEvent('schedule.interval', 'automation.add_movies', self.addMovies, hours = self.conf('hour', default = 12))
def addMovies(self):
|
movies = fireEvent('automation.get_mo | vies', merge = True)
movie_ids = []
for imdb_id in movies:
prop_name = 'automation.added.%s' % imdb_id
added = Env.prop(prop_name, default = False)
if not added:
added_movie = fireEvent('movie.add', params = {'identifier': imdb_id}, force_readd = False, search_after = False, update_library = True, single = True)
if added_movie:
movie_ids.append(added_movie['id'])
Env.prop(prop_name, True)
for movie_id in movie_ids:
movie_dict = fireEvent('movie.get', movie_id, single = True)
fireEvent('searcher.single', movie_dict)
|
#!/usr/bin/env python
# Standard packages
import sys
import cyvcf2
import argparse
import geneimpacts
from cyvcf2 import VCF
def get_effects(variant, annotation_keys):
effects = []
effects += [geneimpacts.SnpEff(e, annotation_keys) for e in variant.INFO.get("ANN").split(",")]
return effects
def get_top_impact(effects):
top_impact = geneimpacts.Effect.top_severity(effects)
if isinstance(top_impact, list):
top_impact = top_impact[0]
return top_impact
def get_genes(effects):
genes_list | = []
for effect in effects:
if effect.gene not in genes_list:
genes_list.append(effect.gene)
return genes_list
def get_transcript_effects(effects):
transcript_effects = dict()
for effect in effects:
if effect.transcript is not None:
transcript_effects[effect.transcript] = "{biotype}|{effect}".format(biotype=effect.biotype,
| effect=effect.impact_severity)
return transcript_effects
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--annotated_vcf', help="snpEff annotated VCF file to scan")
parser.add_argument('-o', '--output', help="File for output information")
args = parser.parse_args()
sys.stdout.write("Parsing VCFAnno VCF with CyVCF2\n")
reader = cyvcf2.VCFReader(args.annotated_vcf)
desc = reader["ANN"]["Description"]
annotation_keys = [x.strip("\"'") for x in re.split("\s*\|\s*", desc.split(":", 1)[1].strip('" '))]
sys.stdout.write("Parsing VCFAnno VCF\n")
vcf = VCF(args.annotated_vcf)
for variant in vcf:
effects = get_effects(variant, annotation_keys)
top_impact = get_top_impact(effects)
gene_effects = dict()
for effect in effects:
if effect.gene not in gene_effects.keys():
if effect.transcript is not None:
|
#
# Copyright (C) 2013 Stanislav Bohm
#
# This file is part of Kaira.
#
# Kaira is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License, or
# (at your option) any later version.
#
# Kaira is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kaira. If not, see <http://www.gnu.org/licenses/>.
#
import utils
def all_free_variables(edges):
return utils.unions(edges, lambda edge: edge.get_free_vars())
def get_variable_sources(inscriptions):
sources = {}
for inscription in inscriptions:
if not inscription.is_expr_variable():
continue
if sources.get(inscription.expr):
continue
if inscription.is_bulk():
sources[inscription.expr] = None
else:
sources[inscription.expr] = inscription.uid
return sources
def is_dependant(inscription1, inscription2):
if inscription1.edge is inscription2.edge and \
inscription2.index < inscription1.index:
return True
if not inscription2.is_expr_variable():
return False
return inscription2.expr in inscription1.get_foreign_variables()
def analyze_transition(tr):
variable_sources = {} # string -> uid - which inscriptions carry input variables
reuse_tokens = {} # uid -> uid - identification number of token for output inscpription
fresh_tokens = [] # (uid, type) - what tokens has to be created for output
used_tokens = [] # [uid] - Tokens from input inscriptions that are reused on output
variable_sources_out = {} # string -> uid or None
bulk_overtake = [] # [uid]
overtaken_variables = set()
def inscription_out_weight(inscription):
# Reorder edges, bulk edges first because we want them send first
# Otherwise it can cause problems like in sending results in "workers" example
s = inscription.config.get("seq")
if s is None:
seq = 0
else:
seq = int(s) * 3
if inscription.is_bulk():
return seq
# Unconditional edges has higher priority
if inscription.is_conditioned():
return seq + 2
else:
return seq + 1
def inscription_in_weight(inscription):
if inscription.is_conditioned():
return 1
else:
return 0
inscriptions_in = sum((edge.inscriptions for edge in tr.edges_in), [])
inscriptions_in.sort(key=inscription_in_weight)
inscriptions_out = sum((edge.inscriptions for edge in tr.edges_out), [])
inscriptions_out.sort(key=inscription_out_weight)
variable_sources = get_variable_sources(inscriptions_in)
# Order input inscriptions by variable dependancy
inscriptions_in = utils.topological_ordering(inscriptions_in, is_dependant)
if inscriptions_in is None:
raise utils.PtpException("Circle variable dependancy", tr.get_source())
# Try reuse tokens
for inscription in inscriptions_out:
if inscription.is_bulk() or not inscription.is_local():
continue # Bulk and nonlocal edge cannot use token reusage
if not inscription.is_expr_variable():
continue # Current implementation reuses tokens only for variable expression
if inscription.is_collective():
continue # Collective operations cannot use token reusage
token_uid = variable_sources.get(inscription.expr)
if token_uid is None or token_uid in used_tokens:
# Variable is not taken from input as token
# or token is already reused --> reusage not possible
continue
reuse_tokens[inscription.uid] = token_uid
used_tokens.append(token_uid)
# Setup fresh variables where token was not reused
for inscription in inscriptions_out:
if not inscription.is_expr_variable():
continue # We are int | erested only in variables
variable = inscription.expr
if variable in variabl | e_sources:
# Variable take from input so we do not have to deal here with it
continue
if variable in variable_sources_out:
# Variable already prepared for output
continue
if inscription.is_bulk():
# No token, just build variable
variable_sources_out[variable] = None
continue
if inscription.is_local():
# Local send, we prepare token
fresh_tokens.append((inscription.uid, inscription.edge.place.type))
variable_sources_out[variable] = inscription.uid
reuse_tokens[inscription.uid] = inscription.uid # Use this fresh new token
else:
# Just create variable
variable_sources_out[variable] = None
for inscription in reversed(inscriptions_out):
# Now we are checking overtake. It has to be in reversed order
# becacase overtake has to be the last operation on variable
if not inscription.is_bulk() or not inscription.is_expr_variable():
continue # We are interested only in variables and bulk inscriptions
if inscription.expr not in overtaken_variables:
overtaken_variables.add(inscription.expr)
bulk_overtake.append(inscription.uid)
for inscription in inscriptions_out:
for variable in inscription.get_other_variables():
if variable not in variable_sources and \
variable not in variable_sources_out:
variable_sources_out[variable] = None
tr.inscriptions_in = inscriptions_in
tr.inscriptions_out = inscriptions_out
tr.variable_sources = variable_sources
tr.reuse_tokens = reuse_tokens
tr.variable_sources_out = variable_sources_out
tr.fresh_tokens = fresh_tokens
tr.bulk_overtake = bulk_overtake
|
import mock
from olympia.amo.tests import addon_factory, TestCase, user_factory
from olympia.ratings.models import Rating
from olympia.ratings.tasks import addon_rating_aggregates
class TestAddonRatingAggregates(TestCase):
# Prevent <Rating>.refresh() from being fired when setting up test data,
# since it'd call addon_rating_aggregates too early.
@mock.patch.object(Rating, 'refresh', lambda x, update_denorm=False: None)
def test_addon_rating_aggregates(self):
addon = addon_factory()
addon2 = addon_factory()
# Add a purely unlisted add-on. It should not be considered when
# calculating bayesian rating for the other add-ons.
addon3 = addon_factory(total_ratings=3, average_rating=4)
self.make_addon_unlisted(addon3)
# Create a few ratings with various scores.
user = user_factory()
# Add an old rating that should not be used to calculate the average,
# because the same user posts a new one right after that.
old_rating = Rating.objects.create(
| addon=addon, rating=1, user=user, is_latest=False, body=u'old')
new_rating = Rating.objects.create(addon=addon, rating=3, user=user,
body=u'new')
Rating.objects.create(addon=addon, rating=3, user=user_factory(),
body=u'foo')
Rating.objects.create(addon=addon, rating=2, user=user_factory())
Rating.objects.create(addon=addon, rating=1, user=user_factory())
# On another addon as well | .
Rating.objects.create(addon=addon2, rating=1, user=user_factory())
Rating.objects.create(addon=addon2, rating=1, user=user_factory(),
body=u'two')
# addon_rating_aggregates should ignore replies, so let's add one.
Rating.objects.create(
addon=addon, rating=5, user=user_factory(), reply_to=new_rating)
# Make sure old_review is considered old, new_review considered new.
old_rating.reload()
new_rating.reload()
assert old_rating.is_latest is False
assert new_rating.is_latest is True
# Make sure total_ratings hasn't been updated yet (because we are
# mocking Rating.refresh()).
addon.reload()
addon2.reload()
assert addon.total_ratings == 0
assert addon2.total_ratings == 0
assert addon.bayesian_rating == 0
assert addon.average_rating == 0
assert addon2.bayesian_rating == 0
assert addon2.average_rating == 0
assert addon.text_ratings_count == 0
assert addon2.text_ratings_count == 0
# Trigger the task and test results.
addon_rating_aggregates([addon.pk, addon2.pk])
addon.reload()
addon2.reload()
assert addon.total_ratings == 4
assert addon2.total_ratings == 2
assert addon.bayesian_rating == 1.9821428571428572
assert addon.average_rating == 2.25
assert addon2.bayesian_rating == 1.375
assert addon2.average_rating == 1.0
assert addon.text_ratings_count == 2
assert addon2.text_ratings_count == 1
# Trigger the task with a single add-on.
Rating.objects.create(addon=addon2, rating=5, user=user_factory(),
body=u'xxx')
addon2.reload()
assert addon2.total_ratings == 2
addon_rating_aggregates(addon2.pk)
addon2.reload()
assert addon2.total_ratings == 3
assert addon2.text_ratings_count == 2
assert addon.bayesian_rating == 1.9821428571428572
assert addon.average_rating == 2.25
assert addon2.bayesian_rating == 1.97915
assert addon2.average_rating == 2.3333
|
of the layer (IGNORED)
Returns:
int: index of embedding layer
"""
return 0
def _recompile_model(self, emb_layer_idx):
"""Change model by removing the embedding layer and .
Args:
emb_layer_idx (int): index of the embedding layer
Returns:
void:
Note:
modifies `self._model` in place
"""
layers = self._model.layers
emb_layer = layers.pop(emb_layer_idx)
first_layer = layers.pop(emb_layer_idx)
layer_config = first_layer.get_config()
layer_config["input_shape"] = (None, emb_layer.output_dim)
new_layer = first_layer.__class__.from_config(
layer_config
)
new_layer.build((emb_layer.input_dim, emb_layer.output_dim))
new_layer.set_weights(first_layer.get_weights())
layers.insert(emb_layer_idx, new_layer)
self._model = self._model.__class__(layers=layers)
self._model.compile(**self._train_params)
def _init_wemb_funcs(self):
"""Initialize functions for obtaining word embeddings.
"""
if self.ndim < 0:
self.ndim = DFLT_VDIM
if self._w2v:
self._embeddings.load()
self.ndim = self._embeddings.ndim
self.init_w_emb = self._init_w2v_emb
self.get_train_w_emb_i = self._get_train_w2v_emb_i
if self._trained:
self.get_test_w_emb = self._get_test_w2v_emb
else:
self.get_test_w_emb = self._get_train_w2v_emb_i
elif self._lstsq:
self._embeddings.load()
self.ndim = self._embeddings.ndim
self.init_w_emb = self._init_w2v_emb
self.get_train_w_emb_i = self._get_train_w2v_emb_i
if self._trained:
self.get_test_w_emb = self._get_test_w2v_lstsq_emb
else:
self.get_test_w_emb = self._get_train_w2v_emb_i
else:
# checked
self.init_w_emb = self._init_w_emb
self.get_train_w_emb_i = self._get_train_w_emb_i
self.get_test_w_emb = self._get_test_w_emb_i
def _reset_funcs(self):
"""Set all compiled theano functions to None.
Note:
modifies instance variables in place
"""
self.get_train_w_emb_i = None
self.get_test_w_emb_i = None
self.init_w_emb = None
def _init_w_emb(self):
"""Initialize task-specific word embeddings.
"""
self.W_EMB = Embedding(len(self._w2i), self.ndim,
embeddings_initializer=DFLT_INITIALIZER,
embeddings_regularizer=l2(L2_COEFF))
def _init_w2v_emb(self):
"""Initialize word2vec embedding matrix.
"""
self._embeddings.load()
self.ndim = self._embeddings.ndim
self._embs = np.empty((len(self._w2i), self.ndim))
self._embs[EMPTY_IDX, :] *= 0
self._embs[UNK_IDX, :] = 1e-2 # prevent zeros in this row
for w, i in iteritems(self._w2i):
if i == EMPTY_IDX or i == UNK_IDX:
continue
self._embs[i] = self._embeddings[w]
# initialize custom keras layer
self.W_EMB = Word2Vec(self._embs, trainable=self._lstsq)
# We unload embeddings every time before the training to free more
# memory. Feel free to comment the line below, if you have plenty of
# RAM.
self._embeddings.unload()
def _get_train_w_emb_i(self, a_word):
"""Obtain embedding index for the | given word.
Args:
a_word (str):
word whose embedding index should be retrieved
Returns:
int:
embedding index of the given word
"""
a_word = normlex(a_word)
if a_word in self._w2i:
return self._w2i[a_word]
elif self._w_stat[a_word] < 2 and np.random.binomial(1, UNK_PROB):
return UNK_IDX
else:
i = self._w2i[a_word] = len(self._w2i)
return i
def _get_tes | t_w_emb_i(self, a_word):
"""Obtain embedding index for the given word.
Args:
a_word (str):
word whose embedding index should be retrieved
Returns:
int:
embedding index od the given word
"""
a_word = normlex(a_word)
return self._w2i.get(a_word, UNK_IDX)
def _get_train_w2v_emb_i(self, a_word):
"""Obtain embedding index for the given word.
Args:
a_word (str):
word whose embedding index should be retrieved
Returns:
int: embedding index of the given word
"""
a_word = normlex(a_word)
if a_word in self._w2i:
return self._w2i[a_word]
elif a_word in self._embeddings:
i = self._w2i[a_word] = len(self._w2i)
return i
else:
return UNK_IDX
def _get_test_w2v_emb(self, a_word):
"""Obtain embedding index for the given word.
Args:
a_word (str):
word whose embedding index should be retrieved
Returns:
np.array:
embedding of the input word
"""
a_word = normlex(a_word)
emb_i = self._w2i.get(a_word)
if emb_i is None:
if a_word in self._embeddings:
return self._embeddings[a_word]
return self._embs[UNK_IDX]
return self._embs[emb_i]
def _get_test_w2v_lstsq_emb(self, a_word):
"""Obtain embedding index for the given word.
Args:
a_word (str):
word whose embedding index should be retrieved
Returns:
np.array:
embedding of the input word
"""
a_word = normlex(a_word)
emb_i = self._w2i.get(a_word)
if emb_i is None:
if a_word in self._embeddings:
return np.dot(self._embeddings[a_word],
self._lstsq_mtx)
return self._embs[UNK_IDX]
return self._embs[emb_i]
def _prepare_data(self, train_x, train_y, dev_x, dev_y):
"""Provide train/test split and digitize the data.
"""
if not dev_x:
n = len(train_x)
n_dev = int(n / 15)
idcs = list(range(n))
np.random.shuffle(idcs)
def get_split(data, idcs):
return [data[i] for i in idcs]
dev_x = get_split(train_x, idcs[:n_dev])
dev_y = get_split(train_y, idcs[:n_dev])
train_x = get_split(train_x, idcs[n_dev:])
train_y = get_split(train_y, idcs[n_dev:])
# convert tweets to word indices
train_x, dev_x = self._digitize_data(train_x, dev_x)
self._n_y = len(set(train_y) | set(dev_y))
train_y = to_categorical(np.asarray(train_y))
dev_y = to_categorical(np.asarray(dev_y))
return (train_x, train_y, dev_x, dev_y)
def _compute_w_stat(self, train_x):
"""Compute word frequencies on the corpus.
Args:
train_x (list[list[str]]): training instances
Returns:
void:
Note:
modifies instance variables in place
"""
self._w_stat = Counter(w for t in train_x for w in t)
def _digitize_data(self, train_x, dev_x):
"""Convert sequences of words to sequences of word indices.
Args:
train_x (list[list[str]]): training set
dev_x (list[list[str]]): development set
Returns:
2-tuple[list, list]: digitized training and development sets
"""
train_x = [self._tweet2wseq(x) for x in train_x]
dev_x = [self._tweet2wseq(x) for x in dev_x]
self._compute_w_stat(train_x)
self._wseq2emb_ids(train_x, self.get_train_w_emb_i)
self._wseq2emb_ids(dev_x, self.get_test_w_emb)
train_x = self._pad_sequences(train_x)
dev_x = self._pad_sequences(dev_x)
return (train_x, dev_x)
def _pad(self, xlen, pad_value=EMPTY_IDX):
"""Add indices o |
tip, which no longer falls back to
# 0-8 for unknown ids.
transport.AMQP_PROTOCOL_HEADER = str_to_bytes("AMQP\x01\x01\x08\x00")
class Connection(amqp.Connection): # pragma: no cover
def _dispatch_basic_return(self, channel, args, msg):
reply_code = args.read_short()
reply_text = args.read_shortstr()
exchange = args.read_shortstr()
routing_key = args.read_shortstr()
exc = AMQPChannelException(reply_code, reply_text, (50, 60))
if channel.events["basic_return"]:
for callback in channel.events["basic_return"]:
callback(exc, exchange, routing_key, msg)
else:
raise exc
def __init__(self, *args, **kwargs):
super(Connection, self).__init__(*args, **kwargs)
self._method_override = {(60, 50): self._dispatch_basic_return}
def drain_events(self, allowed_methods=None, timeout=None):
"""Wait for an event on any channel."""
return self.wait_multi(self.channels.values(), timeout=timeout)
def wait_multi(self, channels, allowed_methods=None, timeout=None):
"""Wait for an event on a channel."""
chanmap = dict((chan.channel_id, chan) for chan in channels)
chanid, method_sig, args, content = self._wait_multiple(
chanmap.keys(), allowed_methods, timeout=timeout)
channel = chanmap[chanid]
if content \
and channel.auto_decode \
and hasattr(content, 'content_encoding'):
try:
content.body = content.body.decode(content.content_encoding)
except Exception:
pass
amqp_method = self._method_override.get(method_sig) or \
channel._METHOD_MAP.get(method_sig, None)
if amqp_method is None:
raise Exception('Unknown AMQP method (%d, %d)' % method_sig)
if content is None:
return amqp_method(channel, args)
else:
| return amqp_method(channel, args, content)
def read_timeout(self, timeout=None):
if timeout is None:
return self.method_reader.read_method()
sock = self.transport.sock
prev = sock.gettimeout()
sock.settimeout(timeout)
try:
try:
return self.method_reader.read_method()
except SSLError, exc:
# http://bugs.python.org/issue10272
if "timed out" in str(exc):
| raise socket.timeout()
raise
finally:
sock.settimeout(prev)
def _wait_multiple(self, channel_ids, allowed_methods, timeout=None):
for channel_id in channel_ids:
method_queue = self.channels[channel_id].method_queue
for queued_method in method_queue:
method_sig = queued_method[0]
if (allowed_methods is None) \
or (method_sig in allowed_methods) \
or (method_sig == (20, 40)):
method_queue.remove(queued_method)
method_sig, args, content = queued_method
return channel_id, method_sig, args, content
# Nothing queued, need to wait for a method from the peer
read_timeout = self.read_timeout
channels = self.channels
wait = self.wait
while 1:
channel, method_sig, args, content = read_timeout(timeout)
if (channel in channel_ids) \
and ((allowed_methods is None) \
or (method_sig in allowed_methods) \
or (method_sig == (20, 40))):
return channel, method_sig, args, content
# Not the channel and/or method we were looking for. Queue
# this method for later
channels[channel].method_queue.append((method_sig, args, content))
#
# If we just queued up a method for channel 0 (the Connection
# itself) it's probably a close method in reaction to some
# error, so deal with it right away.
#
if channel == 0:
wait()
def channel(self, channel_id=None):
try:
return self.channels[channel_id]
except KeyError:
return Channel(self, channel_id)
class Message(base.Message):
"""A message received by the broker.
.. attribute:: body
The message body.
.. attribute:: delivery_tag
The message delivery tag, uniquely identifying this message.
.. attribute:: channel
The channel instance the message was received on.
"""
def __init__(self, channel, msg, **kwargs):
props = msg.properties
super(Message, self).__init__(channel,
body=msg.body,
delivery_tag=msg.delivery_tag,
content_type=props.get("content_type"),
content_encoding=props.get("content_encoding"),
delivery_info=msg.delivery_info,
properties=msg.properties,
headers=props.get("application_headers"),
**kwargs)
class Channel(_Channel, base.StdChannel):
Message = Message
events = {"basic_return": []}
def __init__(self, *args, **kwargs):
self.no_ack_consumers = set()
super(Channel, self).__init__(*args, **kwargs)
def prepare_message(self, message_data, priority=None,
content_type=None, content_encoding=None, headers=None,
properties=None):
"""Encapsulate data into a AMQP message."""
return amqp.Message(message_data, priority=priority,
content_type=content_type,
content_encoding=content_encoding,
application_headers=headers,
**properties)
def message_to_python(self, raw_message):
"""Convert encoded message body back to a Python value."""
return self.Message(self, raw_message)
def close(self):
try:
super(Channel, self).close()
finally:
self.connection = None
def basic_consume(self, *args, **kwargs):
consumer_tag = super(Channel, self).basic_consume(*args, **kwargs)
if kwargs["no_ack"]:
self.no_ack_consumers.add(consumer_tag)
return consumer_tag
def basic_cancel(self, consumer_tag, **kwargs):
self.no_ack_consumers.discard(consumer_tag)
return super(Channel, self).basic_cancel(consumer_tag, **kwargs)
class Transport(base.Transport):
Connection = Connection
default_port = DEFAULT_PORT
# it's very annoying that amqplib sometimes raises AttributeError
# if the connection is lost, but nothing we can do about that here.
connection_errors = (AMQPConnectionException,
socket.error,
IOError,
OSError,
AttributeError)
channel_errors = (AMQPChannelException, )
def __init__(self, client, **kwargs):
self.client = client
self.default_port = kwargs.get("default_port") or self.default_port
def create_channel(self, connection):
return connection.channel()
def drain_events(self, connection, **kwargs):
return connection.drain_events(**kwargs)
def establish_connection(self):
"""Establish connection to the AMQP broker."""
conninfo = self.client
for name, default_value in self.default_connection_params.items():
if not getattr(conninfo, name, None):
setattr(conninfo, name, default_value)
if conninfo.hostname == "localhost":
conninfo.hostname = "127.0.0.1"
conn = self.Connection(host=conninfo.host,
userid=conninfo.userid,
password=conninfo.password,
login_method=conninfo.login_method,
virtual_host=conninfo.virtual_host,
insist=conninfo.insist,
ssl=conninfo.ssl,
|
# -*- coding: utf-8 -*-
# Generated by Djan | go 1.11.3 on 2017-08-14 06:27
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('edgar', '0007_auto_20170706_2215'),
]
operations = [
migrations.AddField(
model_name='edgardocumentcontent',
name='urls',
field=django.contrib.postgres.fields.Ar | rayField(base_field=models.TextField(blank=True), blank=True, help_text='URL we parsed out of the content', null=True, size=None),
),
]
|
as pickle
import eventlet.queue
import fairywren
import itertools
import logging
import array
def sendBencodedWsgiResponse(env,start_response,responseDict):
headers = [('Content-Type','text/plain')]
headers.append(('Cache-Control','no-cache'))
start_response('200 OK',headers)
yield bencode.bencode(responseDict)
def getClientAddress(environ):
try:
return environ['HTTP_X_FORWARDED_FOR'].split(',')[-1].strip()
except KeyError:
return environ['REMOTE_ADDR']
def dottedQuadToInt(dq):
#Change the peer IP into an integer
try:
peerIp = socket.inet_aton(dq)
except socket.error:
raise ValueError('Not a valid IP address:%s' % peerIp)
#Convert from network byte order to integer
try:
peerIp, = struct.unpack('!I',peerIp)
except struct.error:
raise ValueError('Serious wtf, how did this fail')
return peerIp
class Tracker(object):
def __init__(self,auth,peers,pathDepth):
self.auth = auth
self.peers = peers
self.pathDepth = pathDepth
self.announceLog = logging.getLogger('fairywren.announce')
self.trackerLog = logging.getLogger('fairywren.tracker')
self.afterAnnounce = []
self.trackerLog.info('Created')
def addAfterAnnounce(self,callback):
self.afterAnnounce.append(callback)
def getScrape(self,info_hashes):
"""Return a dictionary object that contains a tracker scrape.
@param info_hashes: list on info_hashes to include in the scrape
"""
retval = {}
retval['files'] = {}
for info_hash in info_hashes:
result = {}
result['downloaded'] = 0
result['complete'] = self.peers.getNumberOfSeeds(info_hash)
result['incomplete'] = self.peers.getNumberOfLeeches(info_hash)
retval['files'][info_hash] = result
return retval
def announce(self,env,start_response):
#Extract and normalize the path
#Posix path may not be the best approach here but
#no alternate has been found
pathInfo = posixpath.normpath(env['PATH_INFO'])
#Split the path into components. Drop the first
#since it should always be the empty string
pathComponents = pathInfo.split('/')[1+self.pathDepth:]
#A SHA512 encoded in base64 is 88 characters
#but the last two are always '==' so
#86 is used here
if len(pathComponents) !=2 or len(pathComponents[0]) != 86 or pathComponents[1] != 'announce':
return vanilla.http_error(404,env,start_response)
#Only GET requests are valid
if env['REQUEST_METHOD'] != 'GET':
return vanilla.http_error(405,env,start_response)
#Add the omitted equals signs back in
secretKey = pathComponents[0] + '=='
#base64 decode the secret key
try:
secretKey = base64.urlsafe_b64decode(secretKey)
except TypeError:
return vanilla.http_error(404,env,start_response)
#Extract the IP of the peer
peerIp = getClientAddress(env)
peerIpAsString = peerIp
try:
peerIp = dottedQuadToInt(peerIp)
except ValueError:
return vanilla.http_error(500,env,start_response)
#Parse the query string. Absence indicates error
if 'QUERY_STRING' not in env:
return vanilla.http_error(400,env,start_response)
query = urlparse.parse_qs(env['QUERY_STRING'])
#List of tuples. Each tuple is
#
#Parameter name
#default value (if any)
#type conversion, side-effect free callable
params = []
def validateInfoHash(info_hash):
#Info hashes are a SHA1 hash, and are always 20 bytes
if len(info_hash) != 20:
raise ValueError("Length " + str(len(info_hash)) + ' not acceptable')
return info_hash
params.append(('info_hash',None,validateInfoHash))
def validatePeerId(peer_id):
#Peer IDs are a string chosen by the peer to identify itself
#and are always 20 bytes
if len(peer_id) != 20:
raise ValueError("Improper Length")
return peer_id
params.append(('peer_id',None,validatePeerId))
def validatePort(port):
port = int(port)
#Ipv4 ports should not be higher than this value
if port > 2 ** 16 - 1 or port <= 0:
raise ValueError("Port outside of range")
return port
def validateByteCount(byteCount):
byteCount = int(byteCount)
if byteCount < 0:
raise ValueError('byte count cannot be negative')
return byteCount
params.append(('port',None,validatePort))
params.append(('uploaded',None,validateByteCount))
params.append(('downloaded',None,validateByteCount))
params.append(('left',None,validateByteCount))
#If the client doesn't specify the compact parameter, it is
#safe to assume that compact responses are understood. So a
#default value of 1 is used. Additionally, any non zero
#value provided assumes the client wants a compact response
params.append(('compact',1,int))
def validateEvent(event):
event = event.lower()
if event not in ['started','stopped','completed']:
raise ValueError("Unknown event")
return event
params.append(('event','update',validateEvent))
maxNumWant = 35
def limitNumWant(numwant):
numwant = int(numwant)
if numwant < 0:
raise ValueError('numwant cannot be negative')
numwant = min(numwant,maxNumWant)
return numwant
params.append(('numwant',maxNumWant,limitNumWant))
#Dictionary holding parameters to query
p = dict()
#Use the params to generate the parameters
for param,defaultValue,typeConversion in params:
#If the parameter is in the query, extract the first
#occurence and type convert if requested
if param in query:
p[param] = query[param][0]
if typeConversion:
try:
p[param] = typeConversion(p[param])
except ValueError as e:
return vanilla.http_error(400,env,start_response,msg='bad value for ' + param)
#If the parameter is not in the query, then
#use a default value is present. Otherwise this is an error
else:
if defaultValue == None:
return vanilla.http_error(400,env,start_response,msg='missing ' + param)
p[param] = defaultValue
#Make sure the secret key is valid
userId = self.auth.authenticateSecretKey(secretKey)
if userId == None:
response = {}
response['failure reason'] = 'failed to authenticate secret key'
return sendBencodedWsgiResponse(env,start_response,response)
#Make sure the info hash is allowed
torrentId = self.auth.authorizeInfoHash(p['info_hash'])
if torrentId == None:
response = {}
response['failure reason'] = 'unauthorized info hash'
return sendBencodedWsgiResponse(env,start_response,response)
#Construct the peers entry
peer = peers.Peer(peerIp,p['port'],p['left'])
#This is the basic response format
response = {}
response['interval'] = 5*60
response['complete'] = 0
response['incomplete'] = 0
response['peers'] = []
#This value is set to True if the number of seeds or leeches
#changes in the course of processing this result
change = False
#This value is set to true if the peer is added, false if removed
addPeer = False
#For all 3 cases here just return peers
if p['event'] in ['started','completed','update']:
response['complete'] = self.peers.getNumberOfLeeches(p['info_hash'])
response['incomplete'] = self.peers.getNumberOfSeeds(p['info_hash'])
change = self.peers.updatePeer(p['info_hash'],peer)
if change:
addPeer = True
peersForResponse = self.peers.getPeers(p['info_hash'])
#Return a compact response or a traditional response
#based on what is requested
if p['compact'] != 0:
peerStruct = struct.Struct('!IH')
maxSize = p['numwant'] * peerStruct.size
peersBuffer = array.array('c')
for peer in itertools.islice(peersForResponse,0,p['numwant']):
peersBuffer.fromstring(peerStruct.pack(peer.ip,peer.port))
response['peers'] = peersB | uffer.tostring()
else:
for peer in itertools.islice(peersForResponse,0,p['numwant']):
#For non-compact responses, use a bogus peerId. Hardly any client
#uses this type of response anyways. There is no real meaning to the
#peer ID except informal agreements.
response['peers'].append({'peer id':'0'*20,'ip':socket.inet_ntoa(struct.pack | ('!I',peer.ip)),'port':peer.port})
#For stop event, just remove the |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2021-2022 Daniel Estevez <daniel@destevez.net>
#
# This file is part of gr-satellites
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
from gnuradio import gr, digital
import pmt
from ...hier.sync_to_pdu_packed import sync_to_pdu_packed
from ...hdlc_deframer import hdlc_crc_check
# HDLC 0x7e flag
_syncword = '01111110'
class crop_and_check_crc(gr.basic_block):
"""
Helper block to crop using the final 0x7e flag and check CRC-16
"""
def __init__(self):
gr.basic_block.__init__(
self,
name='crop_and_check_crc',
in_sig=[],
out_sig=[])
self.crc_check = hdlc_crc_check()
self.message_port_register_in(pmt.intern('in'))
self.set_msg_handler(pmt.intern('in'), self.handle_msg)
self.message_port_register_out(pmt.intern('out'))
def handle_msg(self, msg_pmt):
msg = pmt.cdr(msg_pmt)
if not pmt.is_u8vector(msg):
print('[ERROR] Received invalid message type. Expected u8vector')
return
packet = pmt.u8vector_elements(msg)
start = 0
while True:
try:
idx = packet[start:].index(0x7e)
except ValueError:
return
start += idx + 1
p = packet[:idx]
if self.crc_check.fcs_ok(p):
p = p[:-2]
self.message_port_pub(
pmt.intern('out'),
pmt.cons(pmt.PMT_NIL, pmt.init_u8vector(len(p), p)))
return
class yusat_deframer(gr.hier_block2):
"""
Hierarchical block to deframe YUSAT ad-hoc AX.25-like protocol
The input is a float stream of soft symbols. The output are PDUs
with YUSAT frames.
Args:
options: Options from argparse
"""
def __init__(self, options=None):
gr.hier_block2.__init__(
self,
'yusat_deframer',
gr.io_signature(1, 1, gr.sizeof_float),
gr.io_signature(0, 0, 0))
self.message_port_register_hier_out('out')
self.slicer = digital.binary_slicer_fb()
# We hope that 256 bytes is long enough to contain the full packet
| self.deframer = sync_to_pdu_packed(
packlen=256, sync=_syncword, threshold=0)
self.crop = crop_and | _check_crc()
self.connect(self, self.slicer, self.deframer)
self.msg_connect((self.deframer, 'out'), (self.crop, 'in'))
self.msg_connect((self.crop, 'out'), (self, 'out'))
|
# Generated by Django 3.0.5 on 2020-04-17 14:12
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import easy_thumbnails.fields
import userena.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('primary_contact', models.ForeignKey(help_text='Contact for org.', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('name', 'primary_contact')},
},
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mugshot', easy_thumbnails.fields.ThumbnailerImageField(blank=True, help_text='A personal image displayed in your profile.', upload_to=userena.models.upload_to_mugshot, verbose_name='mugshot')),
('priva | cy', models.CharField(choices=[('open', 'Open'), ('registered', 'Registered'), ('closed', | 'Closed')], default='registered', help_text='Designates who can view your profile.', max_length=15, verbose_name='privacy')),
('email', models.CharField(blank=True, max_length=250, null=True)),
('score', models.IntegerField(default=1)),
('last_activity', models.DateTimeField(auto_now_add=True)),
('openbadge_id', models.CharField(blank=True, max_length=250, null=True)),
('organization', models.ForeignKey(blank=True, help_text="If '------', no Organization records share the email domain.", null=True, on_delete=django.db.models.deletion.PROTECT, to='accounts.Organization')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
options={
'permissions': (('view_profile', 'Can view profile'),),
'abstract': False,
'default_permissions': ('add', 'change', 'delete'),
},
),
migrations.CreateModel(
name='UserAuthorization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('authorized', models.BooleanField(help_text='Check this to approve member access.')),
('permission_granted_on', models.DateTimeField(auto_now_add=True)),
('user_accepted_terms_on', models.DateTimeField(blank=True, null=True)),
('permissions_granted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='permissions_granted_by', to=settings.AUTH_USER_MODEL)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('user_profile', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='accounts.UserProfile')),
],
),
migrations.CreateModel(
name='EmailDomain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email_domain', models.CharField(max_length=50)),
('organization', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='accounts.Organization')),
],
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-13 11:29
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='DashboardStats',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('graph_key', models.CharField(help_text='it needs to be one word unique. ex. auth, mygraph', max_length=90, unique=True, verbose_name='graph key')),
('graph_title', models.CharField(db_index=True, help_text='heading title of graph box', max_length=90, verbose_name='graph title')),
('model_app_name', models.CharField(help_text='ex. auth / dialer_cdr', max_length=90, verbose_name='app name')),
('model_name', models.CharField(help_text='ex. User', max_length=90, verbose_name='model name')),
('date_field_name', models.CharField(help_text='ex. date_joined', max_length=90, verbose_name='date field name')),
('operation_field_name', models.CharField(blank=True, help_text='The field you want to aggregate, ex. amount', max_length=90, null=True, verbose_name='Operate field name')),
('type_operation_field_name', models.CharField(blank=True, choices=[(b'Count', b'Count'), (b'Sum', b'Sum'), (b'Avg', b'Avg'), (b'Max', b'Max'), (b'Min', b'Min'), (b'StdDev', b'StdDev'), (b'Variance', b'Variance')], help_text='choose the type operation what you want to aggregate, ex. Sum', max_length=90, null=True, verbose_name=' | Choose Type operation')),
('is_visible', models.BooleanField(default=True, verbose_name='visible')),
('created_date', models.DateTimeField(auto_now_add=Tru | e, verbose_name='date')),
('updated_date', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'dashboard_stats',
'verbose_name': 'dashboard stats',
'verbose_name_plural': 'dashboard stats',
},
),
migrations.CreateModel(
name='DashboardStatsCriteria',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('criteria_name', models.CharField(db_index=True, help_text='it needs to be one word unique. Ex. status, yesno', max_length=90, verbose_name='criteria name')),
('criteria_fix_mapping', jsonfield.fields.JSONField(blank=True, help_text='a JSON dictionary of key-value pairs that will be used for the criteria', null=True, verbose_name='fixed criteria / value')),
('dynamic_criteria_field_name', models.CharField(blank=True, help_text='ex. for call records - disposition', max_length=90, null=True, verbose_name='dynamic criteria field name')),
('criteria_dynamic_mapping', jsonfield.fields.JSONField(blank=True, help_text='a JSON dictionary of key-value pairs that will be used for the criteria', null=True, verbose_name='dynamic criteria / value')),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name='date')),
('updated_date', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'dash_stats_criteria',
'verbose_name': 'dashboard stats criteria',
'verbose_name_plural': 'dashboard stats criteria',
},
),
migrations.AddField(
model_name='dashboardstats',
name='criteria',
field=models.ManyToManyField(blank=True, to='admin_tools_stats.DashboardStatsCriteria'),
),
]
|
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from neutron.agent.common import config
from neutron.plugins.common import constants as p_const
from neutron.plugins.openvswitch.common import constants
DEFAULT_BRIDGE_MAPPINGS = []
DEFAULT_VLAN_RANGES = []
DEFAULT_TUNNEL_RANGES = []
DEFAULT_TUNNEL_TYPES = []
ovs_opts = [
cfg.StrOpt('integration_bridge', default='br-int',
help=_("Integration bridge to use.")),
cfg.BoolOpt('enable_tunneling', default=False,
help=_("Enable tunneling support.")),
cfg.StrOpt('tunnel_bridge', def | ault='br-tun',
help=_("Tunnel bridge to use.")),
cfg.StrOpt('int_peer_patch_port', default='patch-tun',
help=_("Peer patch port in integration bridge for tunnel "
"bridge.")),
cfg.StrOpt('tun_peer_patch_port', | default='patch-int',
help=_("Peer patch port in tunnel bridge for integration "
"bridge.")),
cfg.StrOpt('local_ip', default='',
help=_("Local IP address of GRE tunnel endpoints.")),
cfg.ListOpt('bridge_mappings',
default=DEFAULT_BRIDGE_MAPPINGS,
help=_("List of <physical_network>:<bridge>. "
"Deprecated for ofagent.")),
cfg.StrOpt('tenant_network_type', default='local',
help=_("Network type for tenant networks "
"(local, vlan, gre, vxlan, or none).")),
cfg.ListOpt('network_vlan_ranges',
default=DEFAULT_VLAN_RANGES,
help=_("List of <physical_network>:<vlan_min>:<vlan_max> "
"or <physical_network>.")),
cfg.ListOpt('tunnel_id_ranges',
default=DEFAULT_TUNNEL_RANGES,
help=_("List of <tun_min>:<tun_max>.")),
cfg.StrOpt('tunnel_type', default='',
help=_("The type of tunnels to use when utilizing tunnels, "
"either 'gre' or 'vxlan'.")),
cfg.BoolOpt('use_veth_interconnection', default=False,
help=_("Use veths instead of patch ports to interconnect the "
"integration bridge to physical bridges.")),
#added by jiahaojie 00209498
cfg.StrOpt('user_interface_driver',
default='neutron.agent.linux.interface.OVSInterfaceDriver',
help='Driver used to create user devices.'),
cfg.StrOpt('vm_interface',
default='eth0',
help='Visual Machine Device used to get user port.'),
cfg.IntOpt('vm_device_mtu', default=1350,
help=_('MTU setting for device.')),
cfg.BoolOpt('enable_vtep',
default=False,
help='use to enbale vtep function.'),
]
agent_opts = [
cfg.IntOpt('polling_interval', default=2,
help=_("The number of seconds the agent will wait between "
"polling for local device changes.")),
cfg.BoolOpt('minimize_polling',
default=True,
help=_("Minimize polling by monitoring ovsdb for interface "
"changes.")),
cfg.IntOpt('ovsdb_monitor_respawn_interval',
default=constants.DEFAULT_OVSDBMON_RESPAWN,
help=_("The number of seconds to wait before respawning the "
"ovsdb monitor after losing communication with it.")),
cfg.ListOpt('tunnel_types', default=DEFAULT_TUNNEL_TYPES,
help=_("Network types supported by the agent "
"(gre and/or vxlan).")),
cfg.IntOpt('vxlan_udp_port', default=p_const.VXLAN_UDP_PORT,
help=_("The UDP port to use for VXLAN tunnels.")),
cfg.IntOpt('veth_mtu',
help=_("MTU size of veth interfaces")),
cfg.BoolOpt('l2_population', default=False,
help=_("Use ML2 l2population mechanism driver to learn "
"remote MAC and IPs and improve tunnel scalability.")),
cfg.BoolOpt('arp_responder', default=False,
help=_("Enable local ARP responder if it is supported. "
"Requires OVS 2.1 and ML2 l2population driver. "
"Allows the switch (when supporting an overlay) "
"to respond to an ARP request locally without "
"performing a costly ARP broadcast into the overlay.")),
cfg.BoolOpt('dont_fragment', default=True,
help=_("Set or un-set the don't fragment (DF) bit on "
"outgoing IP packet carrying GRE/VXLAN tunnel.")),
cfg.BoolOpt('enable_distributed_routing', default=False,
help=_("Make the l2 agent run in DVR mode.")),
cfg.ListOpt('l2pop_network_types', default=['flat', 'vlan', 'vxlan'],
help=_("L2pop network types supported by the agent.")),
cfg.BoolOpt('enable_port_multi_device', default=False,
help=_("Port has multiple devices on bridge for XenServer.")),
]
qos_opts = [
cfg.BoolOpt('enable_dscp_vlanpcp_mapping', default=False,
help=_("Enable dscp map vlan pcp")),
]
cfg.CONF.register_opts(ovs_opts, "OVS")
cfg.CONF.register_opts(agent_opts, "AGENT")
cfg.CONF.register_opts(qos_opts, "qos")
config.register_agent_state_opts_helper(cfg.CONF)
config.register_root_helper(cfg.CONF)
|
#author CongThuc 12/13/2015
import MySQLdb
from database.DBHelper import DBHelper
from database.DBConnectManager import DBConnectManager
from resourcefactories.AnalysisInitDefaultValue import AnalysisInitDefaultValue
db_helper = DBHelper()
class DataUtils:
def __init__(self):
print "init DataUtils"
def get_ActivitiesFromDB(self, db_connector):
activities = []
if db_connector is not None:
try:
query = "select * from activities"
activities = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return activities
def get_ActivitiesFromXML(self, db_connector):
activities = []
if db_connector is not None:
try:
query = "select * from activities_from_xml"
activities = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return activities
def get_PermissionFromDB(self, db_connector):
permissions = []
if db_connector is not None:
try:
query = "select * from permissions"
permissions = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return permissions
def get_PermissionFromXML(self, db_connector):
permissions = []
if db_connector is not None:
try:
query = "select * from permissions_from_xml"
permissions = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return permissions
def get_PermissionAnalysis(self, db_connector):
permission_detail = []
if db_connector is not None:
try:
query = "select permission_name, srcClass, srcMethod, srcMethodDes, dstClass, dstMethod, dstMethodDes " \
"from permission_analysis P1 INNER JOIN permissions P2 ON P1.permission_id = P2.id;"
permission_detail = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return permission_detail
def get_PackageFilter_Activity(self,db_connector, activities):
packages = []
if activities:
for ac in | activities:
if db_connector is not None:
try:
select_stmt = "SELECT * FROM package_analysis WHERE srcClass like %(ac_name)s"
cursor = db_connector.cursor()
cursor.execute(select_stmt, { 'ac_name': "%" + ac[1]+ "%"})
rows = cursor | .fetchall()
packages.extend(rows)
except Exception as e:
print e
return packages
def get_SensitiveAPIs(self, db_connector, table):
packages = []
if db_connector is not None:
for sen_APIs in AnalysisInitDefaultValue.Sensitive_APIs:
try:
select_stmt = "SELECT package_id, dstClass, dstMethod, dstMethodDes, srcClass, srcMethod, srcMethodDes FROM " + table + " WHERE dstMethod like %(sen_APIs)s"
cursor = db_connector.cursor()
cursor.execute(select_stmt, {'sen_APIs': "%" + sen_APIs + "%"})
rows = cursor.fetchall()
packages.extend(rows)
except Exception as e:
print e
return packages
def get_SensitiveAPIsFromDB(self, db_connector):
sensitive_apis = []
if db_connector is not None:
try:
query = "select * from sensitive_apis"
sensitive_apis = db_helper.get_Data(db_connector, db_connector.cursor(), query);
except Exception as e:
print e
return sensitive_apis |
dels = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'package.category': {
'Meta': {'ordering': "['title']", 'object_name': 'Category'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'show_pypi': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': "'50'"}),
'title_plural': ('django.db.models.fields.CharField', [], {'max_length': "'50'", 'blank': 'True'})
},
'package.commit': {
'Meta': {'ordering': "['-commit_date']", 'object_name': 'Commit'},
'commit_date': ('django.db.models.fields.DateTimeField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.Package']"})
},
'package.package': {
'Meta': {'ordering': "['title']", 'object_name': 'Package'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.Category']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'creator'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modifier'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'participants': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'pypi_downloads': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pypi_home_page': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'pypi_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'related_packages': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_packages_rel_+'", 'blank': 'True', 'to': "orm['package.Package']"}),
'repo_commits': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'repo_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'repo_forks': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'repo_url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200', 'blank': 'True'}),
'repo_watchers': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': "'100'"}),
'usage': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'})
},
'package.packageexample': {
'Meta': {'ordering': "['title']", 'object_name': 'PackageExample'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'package': | ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.Package']"}),
'title' | : ('django.db.models.fields.CharField', [], {'max_length': "'100'"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'package.version': {
'Meta': {'ordering': "['-created']", 'object_name': 'Version'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'downloads': ('django.db.models.fields.IntegerField', |
#!/usr/bin/python
import os, s | ubprocess
amsDecode = "/usr/local/bin/amsDecode"
path = "/usr/local/bin"
specDataFile = "specData.csv"
f = open("processFile.log", "w")
if os.path.exists(specDataFile):
os.remove(specDataFile)
for fileName in os.listdir('.'):
if fileName.endswith('.bin'):
#print 'file :' + fileName
cmnd = [amsDecode,
fileName,
"-t -95",
"-b",
"68",
"468" ]
subprocess.call(cmnd,std | out=f)
f.close
|
Crypt2Selected", "B2", True),
("CryptoCaidTandbergSelected", "TB", True),
)
self.ecmdata = GetEcmInfo()
self.feraw = self.fedata = self.updateFEdata = None
def getCryptoInfo(self, info):
if info.getInfo(iServiceInformation.sIsCrypted) == 1:
data = self.ecmdata.getEcmData()
self.current_source = data[0]
| self.current_caid = data[1]
self.current_provid = data[2]
self.current_ecmpid = data[3 | ]
else:
self.current_source = ""
self.current_caid = "0"
self.current_provid = "0"
self.current_ecmpid = "0"
def createCryptoBar(self, info):
res = ""
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
for caid_entry in self.caid_data:
if int(caid_entry[0], 16) <= int(self.current_caid, 16) <= int(caid_entry[1], 16):
color="\c0000??00"
else:
color = "\c007?7?7?"
try:
for caid in available_caids:
if int(caid_entry[0], 16) <= caid <= int(caid_entry[1], 16):
color="\c00????00"
except:
pass
if color != "\c007?7?7?" or caid_entry[4]:
if res: res += " "
res += color + caid_entry[3]
res += "\c00??????"
return res
def createCryptoSpecial(self, info):
caid_name = "FTA"
try:
for caid_entry in self.caid_data:
if int(caid_entry[0], 16) <= int(self.current_caid, 16) <= int(caid_entry[1], 16):
caid_name = caid_entry[2]
break
return caid_name + ":%04x:%04x:%04x:%04x" % (int(self.current_caid,16), int(self.current_provid,16), info.getInfo(iServiceInformation.sSID), int(self.current_ecmpid,16))
except:
pass
return ""
def createResolution(self, info):
xres = info.getInfo(iServiceInformation.sVideoWidth)
if xres == -1:
return ""
yres = info.getInfo(iServiceInformation.sVideoHeight)
mode = ("i", "p", " ")[info.getInfo(iServiceInformation.sProgressive)]
fps = str((info.getInfo(iServiceInformation.sFrameRate) + 500) / 1000)
return str(xres) + "x" + str(yres) + mode + fps
def createVideoCodec(self, info):
return ("MPEG2", "AVC", "MPEG1", "MPEG4-VC", "VC1", "VC1-SM", "HEVC", "")[info.getInfo(iServiceInformation.sVideoType)]
def createPIDInfo(self, info):
vpid = info.getInfo(iServiceInformation.sVideoPID)
apid = info.getInfo(iServiceInformation.sAudioPID)
pcrpid = info.getInfo(iServiceInformation.sPCRPID)
sidpid = info.getInfo(iServiceInformation.sSID)
tsid = info.getInfo(iServiceInformation.sTSID)
onid = info.getInfo(iServiceInformation.sONID)
if vpid < 0 : vpid = 0
if apid < 0 : apid = 0
if pcrpid < 0 : pcrpid = 0
if sidpid < 0 : sidpid = 0
if tsid < 0 : tsid = 0
if onid < 0 : onid = 0
return "%d-%d:%05d:%04d:%04d:%04d" % (onid, tsid, sidpid, vpid, apid, pcrpid)
def createTransponderInfo(self, fedata, feraw):
if not feraw:
return ""
elif "DVB-T" in feraw.get("tuner_type"):
tmp = addspace(self.createChannelNumber(fedata, feraw)) + addspace(self.createFrequency(feraw)) + addspace(self.createPolarization(fedata))
else:
tmp = addspace(self.createFrequency(feraw)) + addspace(self.createPolarization(fedata))
return addspace(self.createTunerSystem(fedata)) + tmp + addspace(self.createSymbolRate(fedata, feraw)) + addspace(self.createFEC(fedata, feraw)) \
+ addspace(self.createModulation(fedata)) + addspace(self.createOrbPos(feraw))
def createFrequency(self, feraw):
frequency = feraw.get("frequency")
if frequency:
if "DVB-T" in feraw.get("tuner_type"):
return str(int(frequency / 1000000. + 0.5))
else:
return str(int(frequency / 1000 + 0.5))
return ""
def createChannelNumber(self, fedata, feraw):
return "DVB-T" in feraw.get("tuner_type") and fedata.get("channel") or ""
def createSymbolRate(self, fedata, feraw):
if "DVB-T" in feraw.get("tuner_type"):
bandwidth = fedata.get("bandwidth")
if bandwidth:
return bandwidth
else:
symbolrate = fedata.get("symbol_rate")
if symbolrate:
return str(symbolrate / 1000)
return ""
def createPolarization(self, fedata):
return fedata.get("polarization_abbreviation") or ""
def createFEC(self, fedata, feraw):
if "DVB-T" in feraw.get("tuner_type"):
code_rate_lp = fedata.get("code_rate_lp")
code_rate_hp = fedata.get("code_rate_hp")
if code_rate_lp and code_rate_hp:
return code_rate_lp + "-" + code_rate_hp
else:
fec = fedata.get("fec_inner")
if fec:
return fec
return ""
def createModulation(self, fedata):
if fedata.get("tuner_type") == _("Terrestrial"):
constellation = fedata.get("constellation")
if constellation:
return constellation
else:
modulation = fedata.get("modulation")
if modulation:
return modulation
return ""
def createTunerType(self, feraw):
return feraw.get("tuner_type") or ""
def createTunerSystem(self, fedata):
return fedata.get("system") or ""
def createOrbPos(self, feraw):
orbpos = feraw.get("orbital_position")
if orbpos > 1800:
return str((float(3600 - orbpos)) / 10.0) + "\xc2\xb0 W"
elif orbpos > 0:
return str((float(orbpos)) / 10.0) + "\xc2\xb0 E"
return ""
def createOrbPosOrTunerSystem(self, fedata,feraw):
orbpos = self.createOrbPos(feraw)
if orbpos is not "":
return orbpos
return self.createTunerSystem(fedata)
def createProviderName(self, info):
return info.getInfoString(iServiceInformation.sProvider)
@cached
def getText(self):
service = self.source.service
if service is None:
return ""
info = service and service.info()
if not info:
return ""
if self.type == "CryptoInfo":
self.getCryptoInfo(info)
if config.usage.show_cryptoinfo.value:
return addspace(self.createCryptoBar(info)) + self.createCryptoSpecial(info)
else:
return addspace(self.createCryptoBar(info)) + addspace(self.current_source) + self.createCryptoSpecial(info)
if self.type == "CryptoBar":
self.getCryptoInfo(info)
return self.createCryptoBar(info)
if self.type == "CryptoSpecial":
self.getCryptoInfo(info)
return self.createCryptoSpecial(info)
if self.type == "ResolutionString":
return self.createResolution(info)
if self.type == "VideoCodec":
return self.createVideoCodec(info)
if self.updateFEdata:
feinfo = service.frontendInfo()
if feinfo:
self.feraw = feinfo.getAll(config.usage.infobar_frontend_source.value == "settings")
if self.feraw:
self.fedata = ConvertToHumanReadable(self.feraw)
feraw = self.feraw
if not feraw:
feraw = info.getInfoObject(iServiceInformation.sTransponderData)
fedata = ConvertToHumanReadable(feraw)
else:
fedata = self.fedata
if self.type == "All":
self.getCryptoInfo(info)
if config.usage.show_cryptoinfo.value:
return addspace(self.createProviderName(info)) + self.createTransponderInfo(fedata, feraw) + "\n" \
+ addspace(self.createCryptoBar(info)) + addspace(self.createCryptoSpecial(info)) + "\n" \
+ addspace(self.createPIDInfo(info)) + addspace(self.createVideoCodec(info)) + self.createResolution(info)
else:
return addspace(self.createProviderName(info)) + self.createTransponderInfo(fedata, feraw) + "\n" \
+ addspace(self.createCryptoBar(info)) + self.current_source + "\n" \
+ addspace(self.createCryptoSpecial(info)) + addspace(self.createVideoCodec(info)) + self.createResolution(info)
if self.type == "PIDInfo":
return self.createPIDInfo(info)
if not feraw:
return ""
if self.type == "ServiceInfo":
return addspace(self.createProviderName(info)) + addspace(self.createTunerSystem(fedata)) + addspace(self.createFrequency(feraw)) + addspace(self.createPolarization(fedata)) \
+ addspace(self.createSymbolRate(fedata, feraw)) + addspace(self.createFEC(fedata, feraw)) + addspace(self.createModulation(fedata)) + addspace(self.createOrbPos(feraw)) \
+ addspace(self.createVideoCodec(info)) + self.createResolution(info)
if self.type == "TransponderInfo":
return self.createTransponderInfo(fedata, feraw)
if self.type == "TransponderFrequency":
return self.createFrequency(feraw)
if self.type == "TransponderSymbolRate":
return self.createSymbolRate(fedata, feraw)
if self.type == "TransponderPolarization":
return self.createPolarization(fedata)
if self.type == "TransponderFEC":
return se |
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
from u | nittest import TestCase
from unittest.mo | ck import patch, MagicMock
import sqlalchemy as sa
from sqlalchemy.orm import Session
from sqlalchemy.ext.declarative import declarative_base
from crate.client.cursor import Cursor
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
class SqlAlchemyBulkTest(TestCase):
def setUp(self):
self.engine = sa.create_engine('crate://')
Base = declarative_base(bind=self.engine)
class Character(Base):
__tablename__ = 'characters'
name = sa.Column(sa.String, primary_key=True)
age = sa.Column(sa.Integer)
self.character = Character
self.session = Session()
@patch('crate.client.connection.Cursor', FakeCursor)
def test_bulk_save(self):
chars = [
self.character(name='Arthur', age=35),
self.character(name='Banshee', age=26),
self.character(name='Callisto', age=37),
]
fake_cursor.description = ()
fake_cursor.rowcount = len(chars)
fake_cursor.executemany.return_value = [
{'rowcount': 1},
{'rowcount': 1},
{'rowcount': 1},
]
self.session.bulk_save_objects(chars)
(stmt, bulk_args), _kwargs = fake_cursor.executemany.call_args
expected_stmt = "INSERT INTO characters (name, age) VALUES (?, ?)"
self.assertEqual(expected_stmt, stmt)
expected_bulk_args = (
('Arthur', 35),
('Banshee', 26),
('Callisto', 37)
)
self.assertEqual(expected_bulk_args, bulk_args)
|
def is_lazy_user(user):
""" Return True if the passed user is a lazy user. """
# Anonymous users are not lazy.
if user.is_anonymous:
return False
# Check the user backend. If the lazy sign | up backend
# authenticated them, th | en the user is lazy.
backend = getattr(user, 'backend', None)
if backend == 'lazysignup.backends.LazySignupBackend':
return True
# Otherwise, we have to fall back to checking the database.
from lazysignup.models import LazyUser
return bool(LazyUser.objects.filter(user=user).count() > 0)
|
from djpcms import test
from djpcms.core.exceptions import AlreadyRegistered
import djpcms
class TestSites(test.TestCase):
def | testMake(self):
self.assertRaises(AlreadyRegistered,djpcms.MakeSite,__file__)
site = djpcms.MakeSite(__file__, route = '/extra/')
self.assertEqual(site.route,'/extra/')
def testClenUrl(self):
p = self.makepage(bit = 'test')
self.assertEqual(p.url,'/test/')
res = self.get('/test', status = 302, response = True)
self.assertEqual(res['location'],'http://testserver/test/')
| res = self.get('/test////', status = 302, response = True)
self.assertEqual(res['location'],'http://testserver/test/')
|
# -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from decimal import Decimal
from weboob.deprecated.browser import Page
from web | oob.capabilities import NotAvailable
from weboob.capabilities.pricecomparison import Product, Shop, Price
class IndexPage(Page):
def get_token(self):
input = self.parser.select(self.document.getroot(), 'div#localisation input#recherche_recherchertype__token', 1)
return input.attrib['v | alue']
def iter_products(self):
for li in self.parser.select(self.document.getroot(), 'div#choix_carbu ul li'):
input = li.find('input')
label = li.find('label')
product = Product(input.attrib['value'])
product.name = unicode(label.text.strip())
if '&' in product.name:
# "E10 & SP95" produces a non-supported table.
continue
yield product
class ComparisonResultsPage(Page):
def get_product_name(self):
th = self.document.getroot().cssselect('table#tab_resultat tr th')
if th and len(th) == 9:
return u'%s' % th[5].find('a').text
def iter_results(self, product=None):
price = None
product.name = self.get_product_name()
for tr in self.document.getroot().cssselect('table#tab_resultat tr'):
tds = self.parser.select(tr, 'td')
if tds and len(tds) == 9 and product is not None:
price = Price('%s.%s' % (product.id, tr.attrib['id']))
price.product = product
price.cost = Decimal(tds[5].text.replace(',', '.'))
price.currency = u'€'
shop = Shop(price.id)
shop.name = unicode(tds[3].text.strip())
shop.location = unicode(tds[2].text.strip())
price.shop = shop
price.set_empty_fields(NotAvailable)
yield price
class ShopInfoPage(Page):
def get_info(self):
return self.parser.tostring(self.parser.select(self.document.getroot(), 'div.infos', 1))
|
# Copyright 2012 the rootpy developers
# distributed under the terms of the GNU General Public License
"""
This module supports monitoring TObject deletions.
.. warning::
This is not recommended for production
"""
from __future__ import absolute_import
from weakref import ref
import ctypes
from ctypes import CFUNCTYPE, py_object, addressof, c_int
from .. import compiled as C
from .. import QROOT, log
from ..utils.cinterface import callback, objectproxy_realaddress
__all__ = [
'monitor_deletion',
'monitor_object_deletion',
]
def monitor_deletion():
"""
Function for checking for correct deletion of weakref-able objects.
Example usage::
monitor, is_alive = monitor_deletion()
obj = set()
monitor(obj, "obj")
assert is_alive("obj") # True because there is a ref to `obj` is_alive
del obj
assert not is_alive("obj") # True because there `obj` is deleted
"""
monitors = {}
def set_deleted(x):
def _(weakref):
del monitors[x]
return _
def monitor(item, name):
monitors[name] = ref(item, set_deleted(name))
def is_alive(name):
return monitors.get(name, None) is not None
return monitor, is_alive
cleanuplog = log["memory.cleanup"]
cleanuplog.show_stack()
# Add python to the include path
C.add_python_includepath()
C.register_code("""
#ifndef __CINT__
#include <Python.h>
#endif
#include <TObject.h>
#include <TPython.h>
class RootpyObjectCleanup : public TObject {
public:
type | def void (*CleanupCallback)(PyObject*);
CleanupCallback _callback;
RootpyObjectCleanup(CleanupCallback callback) : _callback(callback) {}
virtual void RecursiveRemove(TObject* object) {
// When arriving here, object->ClassName() will _always_ be TObject
// since we're c | alled by ~TObject, and virtual method calls don't
// work as expected from there.
PyObject* o = TPython::ObjectProxy_FromVoidPtr(object, "TObject");
PyGILState_STATE gstate;
gstate = PyGILState_Ensure();
PyObject *ptype, *pvalue, *ptraceback;
PyErr_Fetch(&ptype, &pvalue, &ptraceback);
_callback(o);
PyErr_Restore(ptype, pvalue, ptraceback);
PyGILState_Release(gstate);
}
ClassDef(RootpyObjectCleanup, 0);
};
ClassImp(RootpyObjectCleanup);
""", ["RootpyObjectCleanup"])
MONITORED = {}
@CFUNCTYPE(None, py_object)
def on_cleanup(tobject):
# Note, when we arrive here, tobject is in its ~TObject, and hence the
# subclass part of the object doesn't exist, in some sense. Hence why we
# store information about the object on the MONITORED dict.
addr = objectproxy_realaddress(tobject)
if addr in MONITORED:
args = MONITORED[addr]
fn, args = args[0], args[1:]
fn(tobject, *args)
del MONITORED[addr]
initialized = False
def init():
global initialized
if initialized: return
initialized = True
cleanup = C.RootpyObjectCleanup(callback(on_cleanup))
cleanups = QROOT.gROOT.GetListOfCleanups()
cleanups.Add(cleanup)
import atexit
@atexit.register
def exit():
# Needed to ensure we don't get called after ROOT has gone away
cleanups.RecursiveRemove(cleanup)
def monitor_object_deletion(o, fn=lambda *args: None):
init()
# Required so that GetListOfCleanups().RecursiveRemove() is called.
o.SetBit(o.kMustCleanup)
args = fn, type(o).__name__, o.GetName(), o.GetTitle(), repr(o)
MONITORED[objectproxy_realaddress(o)] = args
|
# -*- coding: utf-8 -*-
from flask import Flask, jsonify, request, abort, make_response
from futu_server_api import *
from db import save_update_token
from db import delete_tokens
from db import list_cards
import logging
import logging.config
import json
app = Flask(__name__)
logging.config.fileConfig('./conf/log.ini')
no_db_logger = logging.getLogger()
def check_parameters(pjson):
if not pjson or not 'app_account' in pjson or not 'card' in pjson or not 'appid' in pjson:
no_db_logger.info('No Parameter')
abort(400)
cli = {'account':pjson['app_account'], 'card':pjson['card'], 'appid':pjson['appid']}
return client(cli['account'], cli['card'], cli['appid'])
def log_handler(myjson, mytitle):
if 'ClientWarning' in myjson:
return '%s' % myjson['ClientWarning']
elif myjson['result_code'] == 0:
return 'SUCCESS'
else:
return 'FAIL ,REASON OF FAILURE:%s ,PARAMETER:%s' % (myjson['error_msg'], request.json)
@app.route('/')
def hello_world():
no_db_logger.info('server start#####')
return 'hello 22222222 world!'
@app.route('/api/v1/tradetoken', methods=['POST'])
def trade_token():
trade_pswd = request.json['trade_pswd']
account = request.json['app_account']
card = request.json['card']
appid = request.json['appid']
cc = check_parameters(request.json)
message = cc.get_trade_token(trade_pswd)
if message['result_code'] != 0 and message['error_msg'] == 'didn\'t get accesstoken':
no_db_logger.info('didn\'t get accesstoken')
return json.dumps({'result_code':2,'error_msg':'didn\'t get accesstoken'}, ensure_ascii=False)
if message['result_code'] == 0:
token = message['data']['trade_token']
save_update_token(account, appid, None, token, card, True)
return jsonify(**message)
@app.route('/api/v1/account', methods=['POST'])
def get_account_detail():
cc = check_parameters(request.json)
message = cc.get_account_detail()
logtext = log_handler(message, '获取账户信息')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/account/cash', methods=['POST'])
def get_account_cash():
cc = check_parameters(request.json)
message = cc.get_account_cash()
logtext = log_handler(message, '获取账户现金')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/account/portfolio', methods=['POST'])
def get_account_portfolio():
cc = check_parameters(request.json)
message = cc.get_account_portfolio()
logtext = log_handler(message, '获取账户持仓')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/get_list_orders', methods=['POST'])
def get_list_orders():
date_begin = request.json['date_begin']
date_end = request.json['date_end']
cc = check_parameters(request.json)
message = cc.get_list_orders()
logtext = log_handler(message, ' | 获取订单列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/get_list_trades', methods=['POST'])
def get_list_trades():
cc = check_parameters(request.json)
message = cc.get_list_trades()
logtext = log_handler(message, '获取交易列表')
n | o_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/place_order', methods=['POST'])
def place_order():
code = request.json['code']
quantity = request.json['quantity']
price = request.json['price']
side = request.json['side']
ltype = request.json['type']
cc = check_parameters(request.json)
message = cc.place_order(code, quantity, price, side, ltype)
logtext = log_handler(message, '下单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/change_order', methods=['POST'])
def change_order():
order_id = request.json['order_id']
quantity = request.json['quantity']
price = request.json['price']
cc = check_parameters(request.json)
message = cc.change_order(order_id, quantity, price)
logtext = log_handler(message, '改单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/cancle_order', methods=['POST'])
def cancle_order():
order_id = request.json['order_id']
cc = check_parameters(request.json)
message = cc.cancel_order(order_id)
logtext = log_handler(message, '撤单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/ap1/v1/save_token', methods=['POST'])
def save_token():
account = request.json['app_account']
appid = request.json['appid']
market = request.json['market']
token = request.json['token']
card = request.json['card']
card_desc = request.json['text']
DB_result = save_update_token(account, appid, market, token, card, False, card_desc)
if DB_result == 'success':
no_db_logger.info('token save success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token save fail')
return json.dumps({'result_code':1,'error_msg':'token保存失败'}, ensure_ascii=False)
@app.route('/api/v1/delete_token', methods=['POST'])
def delete_token():
appid = request.json['appid']
account = request.json['app_account']
DB_result = delete_tokens(account, appid)
if DB_result == 'success':
no_db_logger.info('token delete success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token delete fail')
return json.dumps({'result_code':1,'error_msg':'token删除失败'}, ensure_ascii=False)
@app.route('/api/v1/list_card', methods=['POST'])
def list_card():
appid = request.json['appid']
account = request.json['app_account']
cards = list_cards(account, appid)
message = dict(cards=cards)
if isinstance(cards, list):
no_db_logger.info('list cards success')
return json.dumps({'result_code':0,'error_msg':'','data':message}, ensure_ascii=False)
else:
no_db_logger.info('list cards fail')
return json.dumps({'result_code':1,'error_msg':'查询账户卡号失败'}, ensure_ascii=False)
if __name__ == '__main__':
app.run()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Modulos
import sys
import pygame
from pygame.locals import *
# Constantes
venx = 640
veny = 4 | 48
# Clases
class Pieza(pygame.sprite.Sprite): # 64x64 px tamaño
def __init | __(self, tipo):
pygame.sprite.Sprite.__init__(self)
if tipo == 0:
self.image = load_image("tablero.png", True)
elif tipo == 1:
self.image = load_image("laser.png", True)
elif tipo == 2:
self.image = load_image("diana.png", True)
elif tipo == 3:
self.image = load_image("diana_espejo.png", True)
elif tipo == 4:
self.image = load_image("espejo.png", True)
elif tipo == 5:
self.image = load_image("espejotraves.png", True)
elif tipo == 6:
self.image = load_image("tunel.png", True)
elif tipo == 7:
self.image = load_image("bloqueo.png", True)
elif tipo == 8:
self.image = load_image("bloqueo_g.png", True)
elif tipo == 9:
self.image = load_image("portal.png", True)
else:
tipo = 0
self.image = load_image("tablero.png", True)
# Funciones
def load_image(filename, transparent=False):
try:
image = pygame.image.load(filename)
except pygame.error:
raise SystemExit
image = image.convert()
if transparent:
color = image.get_at((0, 0))
image.set_colorkey(color, RLEACCEL)
return image
#------------------------------------------
def main():
screen = pygame.display.set_mode((venx, veny))
pygame.display.set_caption("Laser Game")
background_image = load_image('fondo.png')
bola = Bola()
while True:
for eventos in pygame.event.get():
if eventos.type == QUIT:
sys.exit(0)
screen.blit(background_image, (0, 0))
screen.blit(bola.image, bola.rect)
pygame.display.flip()
return 0
if __name__ == '__main__':
pygame.init()
main()
|
from Chip import OpCodeDefinitions
from Test | s.OpCodeTests.OpCodeTestBase import OpCodeTestBase
class TestNopOpCode(OpCodeTestBase):
def test_nop_implied_command_calls_nop_method(self):
self.assert_opcode_execution(OpCodeDef | initions.nop_implied_command, self.target.get_nop_command_executed)
|
# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the | Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial po | rtions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Check that all of the certs on all service endpoints validate.
"""
import unittest
import boto.dynamodb2
class CertVerificationTest(unittest.TestCase):
dynamodb2 = True
ssl = True
def test_certs(self):
for region in boto.dynamodb2.regions():
c = region.connect()
c.list_tables()
|
sholds((x for x in [200, 200, 900]), [300, 400], 900), [2, 2, -999])
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 200, 400]), [300, 400], 400), [2, 2, -999])
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 350, 450, 550]), [300, 400], 550), [1, 2, -999])
# 4 detectors
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 200, 900]), [300, 400, 500], 900), [2, 2, 2])
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 200, 400]), [300, 400, 500], 400), [2, 2, -999])
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 350, 450, 550]), [300, 400, 500], 550), [1, 2, 3])
# No signal
self.assertEqual(self.proc._first_above_thresholds((x for x in [200, 250, 200, 2000]), [300, 400, 500], 250), [-999, -999, -999])
def test__first_value_above_threshold(self):
trace = [200, 200, 300, 200]
self.assertEqual(self.proc._first_value_above_threshold(trace, 200), (0, 200))
self.assertEqual(self.proc._first_value_above_threshold(trace, 250), (2, 300))
self.assertEqual(self.proc._first_value_above_threshold(trace, 250, 4), (6, 300))
self.assertEqual(self.proc._first_value_above_threshold(trace, 500), (-999, 0))
def test__reconstruct_trigger(self):
self.proc.trigger = (0, 0, False, 0)
low_idx = [-999, -999, -999, -999]
high_idx = [-999, -999, -999, -999]
result = -999
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
self.proc.trigger = (0, 0, True, 0)
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
# Standard two detector trigger
self.proc.trigger = (2, 0, False, 0)
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
high_idx = [-999, -999, 10, -999]
low_idx = [-999, -999, 3, -999]
result = -999
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
low_idx = [-999, 0, 3, 2]
result = 2
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
low_idx = [0, 2, 4, -999]
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
low_idx = [0, 2, 3, -999]
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
low_idx = [0, 2, -999, -999]
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
low_idx = [-999, -999, 3, 6]
result = 6
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
# Standard four detector trigger
self.proc.trigger = (3, 2, True, 0)
low_idx = [-999, -999, -999, -999]
high_idx = [-999, -999, -999, -999]
result = -999
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
# Trigger on low
low_idx = [7, 4, 1, -999]
high_idx = [-999, -999, -999, -999]
result = 7
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
high_idx = [8, 5, -999, -999]
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
high_idx = [8, 9, 2, -999]
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
# Trigger on high
high_idx = [-999, 5, 2, -999]
result = 5
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
# Other triggers
self.proc.trigger = (1, 2, False, 0)
low_idx = [1, 3, 5, 7]
high_idx = [2, 4, -999, -999]
result = 5
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
self.proc.trigger = (3, 0, False, 0)
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
self.proc.trigger = (0, 2, False, 0)
result = 4
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
self.proc.trigger = (0, 4, False, 0)
result = -999
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
self.proc.trigger = (1, 3, False, 0)
self.assertEqual(self.proc._reconstruct_trigger(low_idx, high_idx), result)
class ProcessEventsFromSourceTests(ProcessEventsTests):
def setUp(self):
warnings.filterwarnings('ignore')
self.source_path = self.create_tempfile_from_testdata()
self.source_data = tables.open_file(self.source_path, 'r')
self.dest_path = self.create_tempfile_path()
self.dest_data = tables.open_file(self.dest_path, 'a')
self.proc = process_events.ProcessEventsFromSource(
self.source_data, self.dest_data, DATA_GROUP, DATA_GROUP)
def tearDown(self):
warnings.resetwarnings()
self.source_data.close()
os.remove(self.source_path)
self.dest_data.close()
os.remove(self.dest_path)
def test_process_and_store_results(self):
self.proc.process_and_store_results()
class ProcessEventsFromSourceWithTriggerOffsetTests(ProcessEventsFromSourceTests,
ProcessEventsWithTr | iggerOffsetTests):
def setUp(self):
warnings.filterwarnings('ignore')
self.source_path = self.create_tempfile_from_testdat | a()
self.source_data = tables.open_file(self.source_path, 'r')
self.dest_path = self.create_tempfile_path()
self.dest_data = tables.open_file(self.dest_path, 'a')
self.proc = process_events.ProcessEventsFromSourceWithTriggerOffset(
self.source_data, self.dest_data, DATA_GROUP, DATA_GROUP)
class ProcessEventsFromSourceWithTriggerOffsetStationTests(ProcessEventsFromSourceTests,
ProcessEventsWithTriggerOffsetTests):
def setUp(self):
warnings.filterwarnings('ignore')
self.source_path = self.create_tempfile_from_testdata()
self.source_data = tables.open_file(self.source_path, 'r')
self.dest_path = self.create_tempfile_path()
self.dest_data = tables.open_file(self.dest_path, 'a')
self.proc = process_events.ProcessEventsFromSourceWithTriggerOffset(
self.source_data, self.dest_data, DATA_GROUP, DATA_GROUP,
station=501)
def test__reconstruct_time_from_traces_with_external(self):
mock_trigger = Mock()
mock_trigger.return_value = ([(process_events.ADC_LOW_THRESHOLD,
process_events.ADC_HIGH_THRESHOLD)] * 4,
[0, 0, 0, 1])
self.proc.station.trigger = mock_trigger
event = self.proc.source[10]
times = self.proc._reconstruct_time_from_traces(event)
self.assertEqual(times[0], 162.5)
self.assertEqual(times[2], -999)
self.assertEqual(times[4], -999)
class ProcessSinglesTests(unittest.TestCase):
def setUp(self):
warnings.filterwarnings('ignore')
self.data_path = self.create_tempfile_from_testdata()
self.data = tables.open_file(self.data_path, 'a')
self.proc = process_events.ProcessSingles(self.data, DATA_GROUP,
progress=False)
def tearDown(self):
warnings.resetwarnings()
self.data.close()
os.remove(self.data_path)
def test_process_and_store_results(self):
self.proc.process_and_store_results()
# check for unique and sorted timestamps
singles_table = self.data.get_node(DATA_GROUP, 'singles')
ts = singles_table.col('timestamp')
unique_ts = array(sorted(set(ts)))
assert_array_equal(ts, unique_ts)
def create_tempfile_from_testdata(self):
tmp_path = self.create_tempfile_path()
data_path = self.get_testdata_path()
shutil.copyfile(data_path, tmp_path)
return tmp_path
def create_tempfile_path(self):
fd, path = tempfile.mkstemp('.h5')
|
SEFARIA_API_NODE = "https://www.sefaria.org/api/texts/"
CACHE_MONITOR_LOOP_DELAY_IN_SECONDS = 86400
CACHE_LIFETIME_SECONDS = 604800
category_colors = {
"Commentary": "#4871bf",
"Tanakh": "#004e5f",
"Midrash": "#5d956f",
"Mishnah": "#5a99b7",
"Talmud": "#ccb479",
"Halakhah": "#802f3e",
"Kabbalah": "#594176",
"Philosophy": "#7f85a9",
"Liturgy": "#ab4e66",
"Tanaitic": "#00827f",
"Parshanut": "#9ab8cb",
"Chasidut": "#97b386",
"Musar": "#7c406f",
"Responsa": "#cb6158",
"Apocrypha": "#c7a7b4",
"Other": "#073570",
"Quoting Commentary": "#cb6158",
"Sheets": "#7c406f",
"Community": "#7c406f",
"Targum": "#7f85a9",
"Modern Works": "#7c406f",
"Modern Commentary": "#7c406f",
}
platform_settings = {
"twitter": {
"font_size": 29,
"additional_line_spacing_he": 5,
"additional_line_spacing_en": -10,
"image_width": 506,
"image_height": 253,
"margin": 20,
"category_color_line_width": 7,
| "sefaria_branding": False,
"branding_height": 0
},
"facebook": {
"font_size": 70,
"additional_line_spacing_he": 12,
"additional_line_spacing_en": -20,
"image_width": 1200,
"image_height": 630,
"margin": 40,
"category_color_line_width": 15,
"sefaria_branding": False,
"branding_height": | 0
},
"instagram": {
"font_size": 70,
"additional_line_spacing_he": 12,
"additional_line_spacing_en": 0,
"image_width": 1040,
"image_height": 1040,
"margin": 40,
"category_color_line_width": 13,
"sefaria_branding": True,
"branding_height": 100
}
}
|
#!/usr/bin/env python
# encoding: utf-8
"""
models.py
Created by Darcy Liu on 2012-03-03.
Copyright (c) 2012 Close To U. All rights reserved.
"""
from django.db import models
from django.contrib.auth.models import User
# class Setting(models.Model):
# sid = models.AutoField(primary_key=True)
# option = models.CharField(unique=True,max_length=128,verbose_name='Option')
# value = models.CharField(max_length=256,verbose_name='Value')
class Minisite(models.Model):
key = models.AutoField(primary_key=True)
name = models.CharField(max_length=256,verbose_name='name')
slug = models.CharField(unique=True,max_length=128,verbose_name='slug')
meta = models.TextField(blank=True, verbose_name='meta')
description = models.TextField(blank=True, verbose_name='description')
author = models.ForeignKey(User,verbose_name='author')
created = models.DateTimeField(auto_now_add=True,verbose_name='created')
updated = models.DateTimeField(auto_now=True,verbose_name='updated')
def __unicode__(self):
result = self.name
return unicode(result)
class Page(models.Model):
key = models.AutoField(primary_key=True)
name = models.CharField(max_length=256,verbose_name='name')
slug = models.CharField(max_length=128,verbose_name='slug')
#type=//insite standlone
Mode_Choices = (
('0', 'insite'),
('1', 'standlone'),
)
mode = models.CharField(verbose_name='format',max_length=1,default=0,choices=Mode_Choices)
#content-type
mime = models.CharField(max_length=64,default='text/html;charset=utf-8',verbose_name='mime')
#format
Format_Choices = (
('0', 'txt'),
('1', 'html'),
('2', 'markdown'),
('3', 'textile'),
)
format = models.CharField(verbose_name='format',max_length=1,default=0,choices=Format_Choices)
text = models.TextField(blank=True, verbose_name='content')
script = models | .TextField(blank=True, verbose_name='script')
style = models.TextField(blank=True, verbose_name='style')
text_html = models.TextField(blank=True, verbose_name='html')
minisite = models.ForeignKey(Minisite,ve | rbose_name='minisite')
author = models.ForeignKey(User,verbose_name='author')
created = models.DateTimeField(auto_now_add=True,verbose_name='created')
updated = models.DateTimeField(auto_now=True,verbose_name='updated')
def __unicode__(self):
result = self.name
return unicode(result)
class Meta:
unique_together = (('slug', 'minisite'),) |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from itertools import combinations
__all__ = [
'network_complement | '
]
def network_complement(network, cls=None):
"""Generate the complement network of a network.
The compl | ement of a graph G is the graph H with the same vertices
but whose edges consists of the edges not present in the graph G [1]_.
Parameters
----------
network : :class:`~compas.datastructures.Network`
A network.
Returns
-------
:class:`~compas.datastructures.Network`
The complement network.
References
----------
.. [1] Wolfram MathWorld. *Graph complement*.
Available at: http://mathworld.wolfram.com/GraphComplement.html.
Examples
--------
>>> import compas
>>> from compas.datastructures import Network
>>> from compas.datastructures import network_complement
>>> network = Network.from_obj(compas.get('lines.obj'))
>>> complement = network_complement(network)
>>> any(complement.has_edge(u, v, directed=False) for u, v in network.edges())
False
"""
if not cls:
cls = type(network)
nodes = [network.node_coordinates(key) for key in network.nodes()]
edges = [(u, v) for u, v in combinations(network.nodes(), 2) if not network.has_edge(u, v, directed=False)]
return cls.from_nodes_and_edges(nodes, edges)
|
f, partNumber):
self.lastPartNumber = partNumber
if partNumber == 0:
return self.object
key = self.privatePartNumber(partNumber)
if key in self.object:
return self.object[key]
else:
return getattr(self.object, key)
class SequenceInspector(Inspector):
def initializePartsList(self):
Inspector.initializePartsList(self)
for each in range(len(self.object)):
self._partsList.append(each)
def partNumber(self, partNumber):
self.lastPartNumber = partNumber
if partNumber == 0:
return self.object
index = self.privatePartNumber(partNumber)
if type(index) == IntType:
return self.object[index]
else:
return getattr(self.object, index)
class | SliceInspector(Inspector):
def namedParts(self):
return ['start', 'stop', 'step']
### Initialization
initializeInspectorMap()
class InspectorWindow:
def __init__(self, inspector):
self.inspectors = [inspector]
def topInspector(self):
return self.inspectors[len(self.inspectors) - 1]
def selectedPart(self):
return self.topInspector().selectedPart()
def inspectedObject(self) | :
return self.topInspector().object
def open(self):
self.top= Toplevel()
self.top.geometry('650x315')
self.createViews()
self.update()
#Private - view construction
def createViews(self):
self.createMenus()
# Paned widget for dividing two halves
self.framePane = Pmw.PanedWidget(self.top, orient = HORIZONTAL)
self.createListWidget()
self.createTextWidgets()
self.framePane.pack(expand = 1, fill = BOTH)
def setTitle(self):
self.top.title('Inspecting: ' + self.topInspector().title())
def createListWidget(self):
listFrame = self.framePane.add('list')
listWidget = self.listWidget = Pmw.ScrolledListBox(
listFrame, vscrollmode = 'static')
listWidget.pack(side=LEFT, fill=BOTH, expand=1)
# If you click in the list box, take focus so you can navigate
# with the cursor keys
listbox = listWidget.component('listbox')
listbox.bind('<ButtonPress-1>',
lambda e, l = listbox: l.focus_set())
listbox.bind('<ButtonRelease-1>', self.listSelectionChanged)
listbox.bind('<Double-Button-1>', self.popOrDive)
listbox.bind('<ButtonPress-3>', self.popupMenu)
listbox.bind('<KeyRelease-Up>', self.listSelectionChanged)
listbox.bind('<KeyRelease-Down>', self.listSelectionChanged)
listbox.bind('<KeyRelease-Left>', lambda e, s = self: s.pop())
listbox.bind('<KeyRelease-Right>', lambda e, s = self: s.dive())
listbox.bind('<Return>', self.popOrDive)
def createTextWidgets(self):
textWidgetsFrame = self.framePane.add('textWidgets')
self.textPane = Pmw.PanedWidget(textWidgetsFrame, orient = VERTICAL)
textFrame = self.textPane.add('text', size = 200)
self.textWidget = Pmw.ScrolledText(
textFrame, vscrollmode = 'static', text_state = 'disabled')
self.textWidget.pack(fill=BOTH, expand=1)
commandFrame = self.textPane.add('command')
self.commandWidget = Pmw.ScrolledText(
commandFrame, vscrollmode = 'static')
self.commandWidget.insert(1.0, '>>> ')
self.commandWidget.pack(fill = BOTH, expand = 1)
self.commandWidget.component('text').bind(
'<KeyRelease-Return>', self.evalCommand)
self.textPane.pack(expand = 1, fill = BOTH)
def createMenus(self):
self.menuBar = Menu(self.top)
self.top.config(menu=self.menuBar)
inspectMenu = Menu(self.menuBar)
self.menuBar.add_cascade(label='Inspect', menu=inspectMenu)
inspectMenu.add_command(label='Pop', command=self.pop)
inspectMenu.add_command(label='Dive', command=self.dive)
inspectMenu.add_command(label='Inspect', command=self.inspect)
helpMenu = Menu(self.menuBar)
self.menuBar.add_cascade(label='Help', menu=helpMenu)
helpMenu.add_command(label='Instructions', command=self.showHelp)
def fillList(self):
self.listWidget.delete(0, END)
for each in self.topInspector().partNames():
self.listWidget.insert(END, each)
self.listWidget.select_clear(0)
# Event Handling
def listSelectionChanged(self, event):
partNumber = self.selectedIndex()
if partNumber == None:
partNumber = 0
string = self.topInspector().stringForPartNumber(partNumber)
self.textWidget.component('text').configure(state = 'normal')
self.textWidget.delete('1.0', END)
self.textWidget.insert(END, string)
self.textWidget.component('text').configure(state = 'disabled')
def popOrDive(self, event):
"""The list has been double-clicked. If the selection is 'self' then pop,
otherwise dive into the selected part"""
if self.selectedIndex() == 0:
self.pop()
else:
self.dive()
def evalCommand(self, event):
"""Eval text in commandWidget"""
insertPt = self.commandWidget.index(INSERT)
commandLineStart = self.commandWidget.search(
'>>> ', INSERT, backwards = 1)
if commandLineStart:
commandStart = self.commandWidget.index(
commandLineStart + ' + 4 chars')
command = self.commandWidget.get(commandStart,
commandStart + ' lineend')
if command:
partDict = { 'this': self.selectedPart(),
'object': self.topInspector().object }
result = eval(command, partDict)
self.commandWidget.insert(INSERT, repr(result) + '\n>>> ')
self.commandWidget.see(INSERT)
# Menu Events
def inspect(self):
inspector = self.inspectorForSelectedPart()
if inspector == None:
return
InspectorWindow(inspector).open()
def pop(self):
if len(self.inspectors) > 1:
self.inspectors = self.inspectors[:-1]
self.update()
def dive(self):
inspector = self.inspectorForSelectedPart()
if inspector == None:
return
self.inspectors.append(inspector)
self.update()
def update(self):
self.setTitle()
self.fillList()
# What is active part in this inspector
partNumber = self.topInspector().getLastPartNumber()
self.listWidget.select_clear(0)
self.listWidget.activate(partNumber)
self.listWidget.select_set(partNumber)
self.listSelectionChanged(None)
# Make sure selected item is visible
self.listWidget.see(partNumber)
# Make sure left side of listbox visible
self.listWidget.xview_moveto(0.0)
# Grab focus in listbox
self.listWidget.component('listbox').focus_set()
def showHelp(self):
help = Toplevel(tkroot)
help.title("Inspector Help")
frame = Frame(help)
frame.pack()
text = Label(
frame, justify = LEFT,
text = "ListBox shows selected object's attributes\nDouble click or use right arrow on an instance variable to dive down.\nDouble click self or use left arrow to pop back up.\nUse up and down arrow keys to move from item to item in the current level.\n\nValue box (upper right) shows current value of selected item\n\nCommand box (lower right) is used to evaluate python commands\nLocal variables 'object' and 'this' are defined as the current object being inspected\nand the current attribute selected."
)
text.pack()
#Private
def selectedIndex(self):
indicies = map(int, self.listWidget.curselection())
if len(indicies) == 0:
return None
partNumber = indicies[0]
return partNumber
def inspectorForSelectedPart(self):
part |
#example
from kivy.base import runTouchApp
fro | m kivy.lang import Builder
from kivy.garden.light_indicator import Light_indicator
from kivy.uix.button import Button
# LOAD KV UIX
runTouchApp(Builder.load_file('example.kv | '))
|
e License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Event segmentation using a Hidden Markov Model
Given an ROI timeseries, this class uses an annealed fitting procedure to
segment the timeseries into events with stable activity patterns. After
learning the signature activity pattern of each event, the model can then be
applied to other datasets to identify a corresponding sequence of events.
Full details are available in:
Christopher Baldassano, Janice Chen, Asieh Zadbood,
Jonathan W Pillow, Uri Hasson, Kenneth A Norman
Discovering event structure in continuous narrative perception and memory
Neuron, Volume 95, Issue 3, 709 - 721.e5
https://doi.org/10.1016/j.neuron.2017.06.041
This class also extends the model described in the Neuron paper:
1) It allows transition matrices that are composed of multiple separate
chains of events rather than a single linear path. This allows a model to
contain patterns for multiple event sequences (e.g. narratives), and
fit probabilities along each of these chains on a new, unlabeled timeseries.
To use this option, pass in an event_chain vector labeling which events
belong to each chain, define event patterns using set_event_patterns(),
then fit to a new dataset with find_events.
2) To obtain better fits when the underlying event structure contains
events that vary substantially in length, the split_merge option allows
the fit() function to re-distribute events during fitting. The number of
merge/split proposals is controlled by split_merge_proposals, which
controls how thorough versus fast the fitting process is.
"""
# Authors: Chris Baldassano and Cătălin Iordan (Princeton University)
import numpy as np
from scipy import stats
import logging
import copy
from sklearn.base import BaseEstimator
from sklearn.utils.validation import check_is_fitted, check_array
from sklearn.exceptions import NotFittedError
import itertools
from . import _utils as utils # type: ignore
logger = logging.getLogger(__name__)
__all__ = [
"EventSegment",
]
class EventSegment(BaseEstimator):
"""Class for event segmentation of continuous fMRI data
Parameters
----------
n_events: int
Number of segments to learn
step_var: Callable[[int], float] : default 4 * (0.98 ** (step - 1))
The Gaussian variance to use during fitting, as a function of the
number of steps. Should decrease slowly over time.
n_iter: int, default: 500
Maximum number of steps to run during fitting
event_chains: ndarray with length = n_events
Array with unique value for each separate chain of events, each linked
in the order they appear in the array
split_merge: bool, default: False
Determines whether merge/split proposals are used during fitting with
fit(). This can improve fitting performance when events are highly
uneven in size, but requires additional time
split_merge_proposals: int, default: 1
Number of merges and splits to consider at each step. Computation time
scales as O(proposals^2) so this should usually be a small value
Attributes
----------
p_start, p_end: length n_events+1 ndarray
initial and final prior distributions over events
P: n_events+1 by n_events+1 ndarray
HMM transition matrix
ll_ : ndarray with length = number of training datasets
Log-likelihood for training datasets over the course of training
segments_: list of (time by event) ndarrays
Learned (soft) segmentation for training datasets
event_var_ : float
Gaussian variance at the end of learning
event_pat_ : voxel by event ndarray
Learned mean patterns for each event
"""
def _default_var_schedule(step):
return 4 * (0.98 ** (step - 1))
def __init__(self, n_events=2,
step_var=_default_var_schedule,
n_iter=500, event_chains=None,
split_merge=False, split_merge_proposals=1):
self.n_events = n_events
self.step_var = step_var
self.n_iter = n_iter
self.split_merge = split_merge
self.split_merge_proposals = split_merge_proposals
if event_chains is None:
self.event_chains = np.zeros(n_events)
else:
sel | f.event_chains = event_chains
def _fit_validate(self, X):
"""Validate input to fit()
Validate data passed to fit(). Includes a transpose operation to
change the row/column order of X and z-scoring in time.
Parameters
----------
X: time by voxel ndarray, or a list of such ndarrays
fMRI data to be segmented
Returns
-------
X: list of voxel by time ndarrays
"""
if len(np | .unique(self.event_chains)) > 1:
raise RuntimeError("Cannot fit chains, use set_event_patterns")
# Copy X into a list and transpose
X = copy.deepcopy(X)
if type(X) is not list:
X = [X]
for i in range(len(X)):
X[i] = check_array(X[i])
X[i] = X[i].T
# Check that number of voxels is consistent across datasets
n_dim = X[0].shape[0]
for i in range(len(X)):
assert (X[i].shape[0] == n_dim)
# Double-check that data is z-scored in time
for i in range(len(X)):
X[i] = stats.zscore(X[i], axis=1, ddof=1)
return X
def fit(self, X, y=None):
"""Learn a segmentation on training data
Fits event patterns and a segmentation to training data. After
running this function, the learned event patterns can be used to
segment other datasets using find_events
Parameters
----------
X: time by voxel ndarray, or a list of such ndarrays
fMRI data to be segmented. If a list is given, then all datasets
are segmented simultaneously with the same event patterns
y: not used (added to comply with BaseEstimator definition)
Returns
-------
self: the EventSegment object
"""
X = self._fit_validate(X)
n_train = len(X)
n_dim = X[0].shape[0]
self.classes_ = np.arange(self.n_events)
# Initialize variables for fitting
log_gamma = []
for i in range(n_train):
log_gamma.append(np.zeros((X[i].shape[1], self.n_events)))
step = 1
best_ll = float("-inf")
self.ll_ = np.empty((0, n_train))
while step <= self.n_iter:
iteration_var = self.step_var(step)
# Based on the current segmentation, compute the mean pattern
# for each event
seg_prob = [np.exp(lg) / np.sum(np.exp(lg), axis=0)
for lg in log_gamma]
mean_pat = np.empty((n_train, n_dim, self.n_events))
for i in range(n_train):
mean_pat[i, :, :] = X[i].dot(seg_prob[i])
mean_pat = np.mean(mean_pat, axis=0)
# Based on the current mean patterns, compute the event
# segmentation
self.ll_ = np.append(self.ll_, np.empty((1, n_train)), axis=0)
for i in range(n_train):
logprob = self._logprob_obs(X[i], mean_pat, iteration_var)
log_gamma[i], self.ll_[-1, i] = self._forward_backward(logprob)
if step > 1 and self.split_merge:
curr_ll = np.mean(self.ll_[-1, :])
self.ll_[-1, :], log_gamma, mean_pat = \
self._split_merge(X, log_gamma, iteration_var, curr_ll)
# If log-likelihood has started decreasing, undo last step and stop
if |
"""bubble - re-emit a log record with superdomain
| bubble [field=host] [parts=3]
adds 'superhost' field
"""
import sys,splunk.Intersplunk
import re
ipregex = r"(?P<ip>((25[0-5]|2[0-4]\d|[01]\d\d|\d?\d)\.){3}(25[0-5]|2[0-4]\d|[01]\d\d|\d?\d))"
ip_rex = re.compile(ipregex)
def super_domain(host, output_parts):
parts = host.split(".")
num_parts = len(parts)
if output_parts > num_parts:
return host
if ip_rex.match(host):
host = '.'.join(parts[:-output_parts])
else:
host = '.'.join(parts[-output_parts:])
return host
def add_superhost(results, field, num_parts):
for r in results:
if field not in r:
continue
d = super_domain(r[field], num_parts)
r['superhost'] = d
yield r
try:
keywords, options = splunk.Intersplunk.getKeywordsAndOptions()
field = options.get('field', 'hostname')
num_parts = int(options.get('parts', 2))
results,dummyresults,settings = splunk.Intersplunk.getOrganizedResults()
results = list(add_superhost | (results, field, num_parts))
except:
import traceback
stack = traceback.format_exc()
results = splunk.Intersplunk.generateErrorResults("Error : Traceback: " + str(stack))
splunk.Intersplunk | .outputResults( results )
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
import cPickle as pickle
except ImportError:
import pickle
import os.path
class FileCache(dict):
def __init__(self, filename):
self.filename = os.path.abspath(filename)
try:
self.update(pickle.load(open(self.filename)))
except:
pass
def __setitem__(self, key, value):
super(FileCache, self).__setitem__(key, value)
pickle.dump(self, open(self.filename, 'w'))
def set(self, key, value):
self.__setitem__( | key, value)
def get_stats(self):
pass
try:
import pylibmc as memcache
except ImportError:
import memcache
class Cache(object):
def __init__(self, servers=None, default='.cache', **kargs):
if servers is None:
self.cache = memcache.Client(**kargs)
else:
self.cache = memcache.Client(servers, **kar | gs)
if not self.cache.get_stats():
self.cache = FileCache(default)
def __getitem__(self, key):
return self.cache.get(key)
def __setitem__(self, key, value):
self.cache.set(key, value)
def get(self, key):
return self.cache.get(key)
def set(self, key, value):
self.cache.set(key, value)
|
_good_raw_ytilt = 0.0
# Supercall: stop current drag
return super(InkingMode, self).button_release_cb(tdw, event)
def motion_notify_cb(self, tdw, event):
current_layer = tdw.doc._layers.current
if not (tdw.is_sensitive and current_layer.get_paintable()):
return False
self._update_zone_and_target(tdw, event.x, event.y)
return super(InkingMode, self).motion_notify_cb(tdw, event)
def _update_zone_and_target(self, tdw, x, y):
"""Update the zone and target node under a cursor position"""
new_zone = _EditZone.EMPTY_CANVAS
if self.phase == _Phase.ADJUST and not self.in_drag:
new_target_node_index = None
# Test buttons for hits
overlay = self._ensure_overlay_for_tdw(tdw)
hit_dist = gui.style.FLOATING_BUTTON_RADIUS
button_info = [
(_EditZone.ACCEPT_BUTTON, overlay.accept_button_pos),
(_EditZone.REJECT_BUTTON, overlay.reject_button_pos),
]
for btn_zone, btn_pos in button_info:
if btn_pos is None:
continue
btn_x, btn_y = btn_pos
d = math.hypot(btn_x - x, btn_y - y)
if d <= hit_dist:
new_target_node_index = None
new_zone = btn_zone
break
# Test nodes for a hit, in reverse draw order
if new_zone == _EditZone.EMPTY_CANVAS:
hit_dist = gui.style.DRAGGABLE_POINT_HANDLE_SIZE + 12
new_target_node_index = None
for i, node in reversed(list(enumerate(self.nodes))):
node_x, node_y = tdw.model_to_display(node.x, node.y)
d = math.hypot(node_x - x, node_y - y)
if d > hit_dist:
continue
new_target_node_index = i
new_zone = _EditZone.CONTROL_NODE
break
# Draw changes to the prelit node
if new_target_node_index != self.target_node_index:
if self.target_node_index is not None:
self._queue_draw_node(tdw, self.target_node_index)
self.target_node_index = new_target_node_index
if self.target_node_index is not None:
self._queue_draw_node(tdw, self.target_node_index)
# Update the zone, and assume any change implies a button state
# change as well (for now...)
if self.zone != new_zone:
self.zone = new_zone
self._queue_draw_buttons(tdw)
# Update the "real" inactive cursor too:
if not self.in_drag:
cursor = None
if self.phase == _Phase.ADJUST:
if self.zone == _EditZone.CONTROL_NODE:
cursor = self._crosshair_cursor
elif self.zone != _EditZone.EMPTY_CANVAS: # assume button
cursor = self._arrow_cursor
if cursor is not self._current_override_cursor:
tdw.set_override_cursor(cursor)
self._current_override_cursor = cursor
## Redraws
def _queue_draw_buttons(self, tdw):
overlay = self._ensure_overlay_for_tdw(tdw)
overlay.update_button_positions()
positions = (
overlay.reject_button_pos,
overlay.accept_button_pos,
)
for pos in positions:
| if pos is None:
continue
r = gui.style.FLOATING_BUTTON_ICON_SIZE
r += ma | x(
gui.style.DROP_SHADOW_X_OFFSET,
gui.style.DROP_SHADOW_Y_OFFSET,
)
r += gui.style.DROP_SHADOW_BLUR
x, y = pos
tdw.queue_draw_area(x-r, y-r, 2*r+1, 2*r+1)
def _queue_draw_node(self, tdw, i):
node = self.nodes[i]
x, y = tdw.model_to_display(node.x, node.y)
x = math.floor(x)
y = math.floor(y)
size = math.ceil(gui.style.DRAGGABLE_POINT_HANDLE_SIZE * 2)
tdw.queue_draw_area(x-size, y-size, size*2+1, size*2+1)
def _queue_redraw_all_nodes(self, tdw):
for i in xrange(len(self.nodes)):
self._queue_draw_node(tdw, i)
def _queue_redraw_curve(self, tdw):
model = tdw.doc
self._stop_task_queue_runner(complete=False)
if len(self.nodes) < 2:
return
self._queue_task(self.brushwork_rollback, model)
self._queue_task(
self.brushwork_begin, model,
description=_("Inking"),
abrupt=True,
)
interp_state = {"t_abs": self.nodes[0].time}
for p_1, p0, p1, p2 in gui.drawutils.spline_iter(self.nodes):
self._queue_task(
self._draw_curve_segment,
model,
p_1, p0, p1, p2,
state=interp_state
)
self._start_task_queue_runner()
def _draw_curve_segment(self, model, p_1, p0, p1, p2, state):
"""Draw the curve segment between the middle two points"""
last_t_abs = state["t_abs"]
dtime_p0_p1_real = p1[-1] - p0[-1]
steps_t = dtime_p0_p1_real / self.INTERPOLATION_MAX_SLICE_TIME
dist_p1_p2 = math.hypot(p1[0]-p2[0], p1[1]-p2[1])
steps_d = dist_p1_p2 / self.INTERPOLATION_MAX_SLICE_DISTANCE
steps_max = float(self.INTERPOLATION_MAX_SLICES)
steps = math.ceil(min(steps_max, max(steps_t, steps_d)))
for i in xrange(int(steps) + 1):
t = i / steps
point = gui.drawutils.spline_4p(t, p_1, p0, p1, p2)
x, y, pressure, xtilt, ytilt, t_abs = point
pressure = lib.helpers.clamp(pressure, 0.0, 1.0)
xtilt = lib.helpers.clamp(xtilt, -1.0, 1.0)
ytilt = lib.helpers.clamp(ytilt, -1.0, 1.0)
t_abs = max(last_t_abs, t_abs)
dtime = t_abs - last_t_abs
self.stroke_to(
model, dtime, x, y, pressure, xtilt, ytilt,
auto_split=False,
)
last_t_abs = t_abs
state["t_abs"] = last_t_abs
def _queue_task(self, callback, *args, **kwargs):
"""Append a task to be done later in an idle cycle"""
self._task_queue.append((callback, args, kwargs))
def _start_task_queue_runner(self):
"""Begin processing the task queue, if not already going"""
if self._task_queue_runner_id is not None:
return
idler_id = GLib.idle_add(self._task_queue_runner_cb)
self._task_queue_runner_id = idler_id
def _stop_task_queue_runner(self, complete=True):
"""Halts processing of the task queue, and clears it"""
if self._task_queue_runner_id is None:
return
if complete:
for (callback, args, kwargs) in self._task_queue:
callback(*args, **kwargs)
self._task_queue.clear()
GLib.source_remove(self._task_queue_runner_id)
self._task_queue_runner_id = None
def _task_queue_runner_cb(self):
"""Idle runner callback for the task queue"""
try:
callback, args, kwargs = self._task_queue.popleft()
except IndexError: # queue empty
self._task_queue_runner_id = None
return False
else:
callback(*args, **kwargs)
return True
## Drag handling (both capture and adjust phases)
def drag_start_cb(self, tdw, event):
self._ensure_overlay_for_tdw(tdw)
if self.phase == _Phase.CAPTURE:
self._reset_nodes()
self._reset_capture_data()
self._reset_adjust_data()
node = self._get_event_data(tdw, event)
self.nodes.append(node)
self._queue_draw_node(tdw, 0)
self._last_node_evdata = (event.x, event.y, event.time)
self._last_event_node = node
elif self.phase == _Phase.ADJUST:
if self.target_node_index is not None:
node = self.nodes[self.target_node_index]
self._dragged_node_start_pos = (node.x, node.y)
|
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import weakref
import IECore
import Gaffer
import GafferUI
def appendDefinitions( menuDefinition, prefix ) :
menuDefinition.append( prefix + "/About Gaffer...", { "command" : about } )
menuDefinition.append( prefix + "/Preferences...", { "command" : preferences } )
menuD | efinition.append( prefix + "/Documentation...", { "command" : IECore.curry( GafferUI.showURL, os.path.expandvars( "$GAFFER_ROOT/doc/gaffer/html/index.html" ) ) } )
menuDefinition.append( prefix + "/Quit", { "command" : quit, "shortCut" : "Ctrl+Q" } )
def quit( menu ) :
scriptWindow = menu.ancestor( GafferUI.ScriptWindow )
applica | tion = scriptWindow.scriptNode().ancestor( Gaffer.ApplicationRoot )
unsavedNames = []
for script in application["scripts"].children() :
if script["unsavedChanges"].getValue() :
f = script["fileName"].getValue()
f = f.rpartition( "/" )[2] if f else "untitled"
unsavedNames.append( f )
if unsavedNames :
dialogue = GafferUI.ConfirmationDialogue(
"Discard Unsaved Changes?",
"The following files have unsaved changes : \n\n" +
"\n".join( [ " - " + n for n in unsavedNames ] ) +
"\n\nDo you want to discard the changes and quit?",
confirmLabel = "Discard and Quit"
)
if not dialogue.waitForConfirmation( parentWindow=scriptWindow ) :
return
# Defer the actual removal of scripts till an idle event - removing all
# the scripts will result in the removal of the window our menu item is
# parented to, which would cause a crash as it's deleted away from over us.
GafferUI.EventLoop.addIdleCallback( IECore.curry( __removeAllScripts, application ) )
def __removeAllScripts( application ) :
for script in application["scripts"].children() :
application["scripts"].removeChild( script )
return False # remove idle callback
__aboutWindow = None
def about( menu ) :
global __aboutWindow
if __aboutWindow is not None and __aboutWindow() :
window = __aboutWindow()
else :
window = GafferUI.AboutWindow( Gaffer.About )
__aboutWindow = weakref.ref( window )
scriptWindow = menu.ancestor( GafferUI.ScriptWindow )
scriptWindow.addChildWindow( window )
window.setVisible( True )
__preferencesWindows = weakref.WeakKeyDictionary()
def preferences( menu ) :
scriptWindow = menu.ancestor( GafferUI.ScriptWindow )
application = scriptWindow.scriptNode().ancestor( Gaffer.ApplicationRoot )
global __preferencesWindows
window = __preferencesWindows.get( application, None )
if window is not None and window() :
window = window()
else :
window = GafferUI.Dialogue( "Preferences" )
closeButton = window._addButton( "Close" )
window.__closeButtonConnection = closeButton.clickedSignal().connect( __closePreferences )
saveButton = window._addButton( "Save" )
window.__saveButtonConnection = saveButton.clickedSignal().connect( __savePreferences )
nodeUI = GafferUI.NodeUI.create( application["preferences"] )
window._setWidget( nodeUI )
__preferencesWindows[application] = weakref.ref( window )
scriptWindow.addChildWindow( window )
window.setVisible( True )
def __closePreferences( button ) :
button.ancestor( type=GafferUI.Window ).setVisible( False )
def __savePreferences( button ) :
scriptWindow = button.ancestor( GafferUI.ScriptWindow )
application = scriptWindow.scriptNode().ancestor( Gaffer.ApplicationRoot )
application.savePreferences()
button.ancestor( type=GafferUI.Window ).setVisible( False )
|
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
db = SQLAlchemy()
class BaseTable(db.Model):
__abstract__ = True
updated = db.Column(db.DateTime, default=func.now(), onupdate=func.current_timestamp())
created = db.Column(db.DateTime, default=func.now())
# Server -> Namespace -> Repository -> Branch -> Commit -> Deploy -> Log
class Server(BaseTable):
__tablename__ = 'server'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
namespace = relationship("Namespace", order_by="Namespace.id", backref="server")
class Namespace(BaseTable):
__tablename__ = 'namespace'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
server_id = db.Column(db.Integer, db.ForeignKey('server.id'))
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
repository = relationship("Repository", order_by="Repository.id", backref="namespace")
class Repository(BaseTable):
__tablename__ = 'repository'
id = db.Column(db.Integer, primary_key=True)
na | me = db.Column(db.String(255))
namespace_id = db.Column(db.Integer, db.ForeignKey('namespace.id'))
branch = relationship("Branch", order_by="Branch.updated.desc()", backref="repository")
class Branch(BaseTable):
__tablename__ = 'branch'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
repository_id = db.Column(db.Integer, db.ForeignKey('repository.id'))
commit = relationship("Commit", order_by="Commit.created.desc()", backref="branch")
class Commit(BaseTable):
| __tablename__ = 'commit'
id = db.Column(db.Integer, primary_key=True)
sha = db.Column(db.String(40))
name = db.Column(db.String(255))
description = db.Column(db.String(1024))
status = db.Column(db.Enum('ERROR', 'WARNING', 'OK', 'UNKNOWN', 'RUNNING', name='commit_status_type'))
runtime = db.Column(db.Integer)
branch_id = db.Column(db.Integer, db.ForeignKey('branch.id'))
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
deploy = relationship("Deploy", order_by="Deploy.id", backref="commit")
class Deploy(BaseTable):
__tablename__ = 'deploy'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
uri = db.Column(db.String(1024))
status = db.Column(db.Enum('ERROR', 'WARNING', 'OK', 'UNKNOWN', 'RUNNING', name='deploy_status_type'))
runtime = db.Column(db.Integer)
commit_id = db.Column(db.Integer, db.ForeignKey('commit.id'))
log = relationship("Log", order_by="Log.id", backref="deploy")
class Log(BaseTable):
__tablename__ = 'log'
id = db.Column(db.Integer, primary_key=True)
data = db.Column(db.String(1024))
status = db.Column(db.Enum('ERROR', 'WARNING', 'OK', 'UNKNOWN', name='log_status_type'))
deploy_id = db.Column(db.Integer, db.ForeignKey('deploy.id'))
class User(BaseTable):
__tablename__ = 'user'
id = db.Column(db.Integer, primary_key=True)
first_name = db.Column(db.String(255))
last_name = db.Column(db.String(255))
email = db.Column(db.String(255))
password = db.Column(db.String(255))
commit = relationship("Commit", order_by="Commit.id", backref="user")
namespace = relationship("Namespace", order_by="Namespace.id", backref="user")
|
"""
The MIT License (MIT)
Copyright (c) [2015-2018] [Andrew Annex]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or s | ubstantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT | , TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from .spiceypy import *
from .utils import support_types
__author__ = 'AndrewAnnex'
# Default setting for error reporting so that programs don't just exit out!
erract("set", 10, "return")
errdev("set", 10, "null")
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = "Julien Chaumont"
__copyright__ = "Copyright 2014, Julien Chaumont"
__licence__ = "MIT"
__version__ = "1.0.2"
__contact__ = "julienc91 [at] outlook.fr"
import flickrapi
import os, sys
import re
from config import *
class ShFlickr:
##
# Connexion to Flickr.
#
def __init__(self):
self.flickr = flickrapi.FlickrAPI(API_KEY, API_SECRET)
(token, frob) = self.flickr.get_token_part_one(perms='delete')
if not token:
raw_input("Press ENTER after you authorized this program")
self.flickr.get_token_part_two((token, frob))
##
# Get the list of files to synchronize with Flickr.
# @param folder Path to the main folder
# @return A tuple (photos_to_sync, photosets_to_create) where photos_to_sync
# is the list of files to synchronize for each subfolder, and
# photoset_ids is the list of albums with their respective id on Flickr,
# or None if the album does not exist yet.
#
def synclist(self, folder=PICTURE_FOLDER_PATH):
print "Getting the list of pictures to synchronize..."
subfolders = [lfile for lfile in os.listdir(unicode(folder))
if os.path.isdir(os.path.join(folder, lfile))
and re.match(SUBFOLDERS_REGEXP, lfile)]
photosets = self.flickr.photosets_getList(user_id=USER_ID)
photos_to_sync = {}
photoset_ids = {}
for subfolder in subfolders:
subfolder = subfolder.encode("UTF-8")
# Check if the album already exists on Flickr
photoset_id = None
for photoset in photosets.find('photosets').findall('photoset'):
photoset_title = photoset.find('title').text
if type(photoset_title) == unicode:
photoset_title = photoset_title.encode("UTF-8")
if photoset_title == subfolder:
photoset_id = str(photoset.attrib['id'])
break
photoset_ids[subfolder] = photoset_id
# Get the list of pictures to synchronize within this album
photos_to_sync[subfolder] = self.synclist_subfolder(os.path.join(folder, subfolder), photoset_id)
return photos_to_sync, photoset_ids
##
# Get the list of pictures to synchronize within an album.
# @param subfolder Complete path to the subfolder to synchronize
# @param photoset_id Id of the album on Flickr, or None of the album does not exist yet
# @return The list of the pictures to synchronize.
#
def synclist_subfolder(self, subfolder, photoset_id=None):
files = [lfile for lfile in os.listdir(unicode(subfolder))
if lfile.endswith(PICTURE_EXTENSIONS)]
files_to_sync = []
if photoset_id is not None:
# Find which file were not uploaded
photoset = list(self.flickr.walk_set(photoset_id))
for lfile in files:
lfile = lfile.encode("UTF-8")
found = False
for photo in photoset:
photo = photo.get('title')
if type(photo) == unicode:
photo = photo.encode("UTF-8")
if photo == lfile:
found = True
break
if not found:
files_to_sync.append(lfile)
else:
for lfile in files:
files_to_sync.append(lfile)
| return files_to_sync
##
# Performs the upl | oad.
# @param photos_to_sync A dictionary containing the list of
# pictures to upload for each subfolder.
# @param photoset_ids Dict of albums and their Flickr ids.
# @param folder Path to the main folder.
#
def upload(self, photos_to_sync, photosets={}, folder=PICTURE_FOLDER_PATH):
for subfolder in sorted(photos_to_sync):
count = 1
total = len(photos_to_sync[subfolder])
len_count = len(str(total))
consecutive_errors = 0
print "Album %s: %s photos to synchronize" % (subfolder, total)
for photo in sorted(photos_to_sync[subfolder]):
print "%-*s/%s\t %s" % (len_count, count, total, photo)
nb_errors = 0
done = False
while nb_errors < MAX_RETRIES and not done:
try:
path = os.path.join(folder, subfolder, photo).encode("UTF-8")
photo = photo.encode("UTF-8")
response = self.flickr.upload(filename=path,
title=photo,
is_public=VISIBLE_PUBLIC,
is_family=VISIBLE_FAMILY,
is_friend=VISIBLE_FRIEND)
except KeyboardInterrupt:
print "Exit by user request"
return
except:
nb_errors += 1
consecutive_errors += 1
if consecutive_errors >= MAX_CONSECUTIVE_ERRORS:
print "5 failed uploads in a row, aborting."
return
else:
print "Error, retrying upload (%s/%s)" % (nb_errors, MAX_RETRIES)
else:
photo_id = response.find('photoid').text
done = True
count += 1
consecutive_errors = 0
if photoset_ids[subfolder] is None:
print "Creating the remote album %s" % subfolder
response = self.flickr.photosets_create(title=subfolder,
primary_photo_id=photo_id)
photoset_ids[subfolder] = response.find('photoset').attrib['id']
else:
self.flickr.photosets_addPhoto(photoset_id=photoset_ids[subfolder],
photo_id=photo_id)
if nb_errors == 3:
print "%s failed to upload" % photo
if __name__ == "__main__":
shflickr = ShFlickr()
photos_to_sync, photoset_ids = shflickr.synclist()
shflickr.upload(photos_to_sync, photoset_ids)
|
URL, 'SOCKET_URL': socket_url, 'login': 'true',
'project_name': project_name, 'latitude': latitude, 'longitude': longitude, 'zoom': zoom,
'default_hash': default_hash, 'min_zoom': min_zoom, 'max_zoom': max_zoom,
'project_logo': project_logo, 'project_icon': project_icon, 'project_home_page': project_home_page,
'enable_drawing': enable_drawing,
'bitly_registered_domain': bitly_registered_domain, 'bitly_username': bitly_username, 'bitly_api_key': bitly_api_key
}
if request.user.is_authenticated:
context['session'] = request.session._session_key
if request.user.is_authenticated() and request.user.social_auth.all().count() > 0:
context['picture'] = request.user.social_auth.all()[0].extra_data.get('picture')
if settings.SOCIAL_AUTH_GOOGLE_PLUS_KEY:
context['plus_scope'] = ' '.join(GooglePlusAuth.DEFAULT_SCOPE)
context['plus_id'] = settings.SOCIAL_AUTH_GOOGLE_PLUS_KEY
if settings.UNDER_MAINTENANCE_TEMPLATE:
return render_to_response('under_maintenance.html',
RequestContext(request, context))
return render_to_response(template, RequestContext(request, context))
def show_embedded_map(request, project=None, template='map.html'):
try:
if project:
mp_settings = MarinePlannerSettings.objects.get(slug_name=project)
else:
mp_settings = MarinePlannerSettings.objects.get(active=True)
project_name = mp_settings.project_name
project_logo = mp_settings.project_logo
try:
if project_logo:
url_validator = URLValidator(verify_exists=False)
url_validator(project_logo)
except ValidationError, e:
project_logo = os.path.join(settings.MEDIA_URL, project_logo)
project_home_page = mp_settings.project_home_page
except:
project_name = project_logo = project_home_page = None
context = {
'MEDIA_URL': settings.MEDIA_URL,
'project_name': project_name,
'project_logo': project_logo,
'project_home_page': project_home_page
}
#context = {'MEDIA_URL': settings.MEDIA_URL}
return render_to_response(template, RequestContext(request, context))
def show_mobile_map(request, project=None, template='mobile-map.html'):
try:
if project:
mp_settings = MarinePlannerSettings.objects.get(slug_name=project)
else:
mp_settings = MarinePlannerSettings.objects.get(active=True)
print 'so far so good'
project_name = mp_settings.project_name
project_logo = mp_settings.project_logo
print project_name
print project_logo
# try:
# if project_logo:
# url_validator = URLValidator(verify_exists=False)
# url_validator(project_logo)
# except ValidationError, e:
# project_logo = os.path.join(settings.MEDIA_URL, project_logo)
print 'almost there...'
project_home_page = mp_settings.project_home_page
print 'here we go...'
latitude = mp_settings.latitude
print latitude
longitude = mp_settings.longitude
print longitude
zoom = mp_settings.zoom
print zoom
min_zoom = mp_settings.min_zoom
max_zoom = mp_settings.max_zoom
print min_zoom
print max_zoom
except:
project_name = project_logo = project_home_page = None
context = {
'MEDIA_URL': settings.MEDIA_URL,
# 'project_name': project_name,
# 'project_logo': project_logo,
# 'project_home_page': project_home_page
'latitude': latitude,
'longitude': longitude,
'zoom': zoom
}
#context = {'MEDIA_URL': settings.MEDIA_URL}
return render_to_response(template, RequestContext(request, context))
def get_sharing_groups(request):
from madrona.features import user_sharing_groups
from functools import cmp_to_key
import locale
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
json = []
sharing_groups = user_sharing_groups(request.user)
for group in sharing_groups:
members = []
for user in group.user_set.all():
if user.first_name.replace(' ', '') != '' and user.last_name.replace(' ', '') != '':
members.append(user.first_name + ' ' + user.last_name)
else:
members.append(user.username)
sorted_members = sorted(members, key=cmp_to_key(locale.strcoll))
json.append({
'group_name': group.name,
'group_slug': slugify(group.name)+'-sharing',
'members': sorted_members
})
return HttpResponse(dumps(json))
'''
'''
def share_bookmark(request):
group_names = request.POST.getlist('groups[]')
bookmark_uid = request.POST['bookmark']
bookmark = get_feature_by_uid(bookmark_uid)
viewable, response = bookmark.is_viewable(request.user)
if not viewable:
return response
#remove previously shared with groups, before sharing with new list
bookmark.share_with(None)
groups = []
for group_name in group_names:
groups.append(Group.objects.get(name=group_name))
bookmark.share_with(groups, append=False)
return HttpResponse("", status=200)
'''
'''
def get_bookmarks(request):
#sync the client-side bookmarks with the server side bookmarks
#update the server-side bookmarks and return the new list
try:
bookmark_dict = parser.parse(request.POST.urlencode())['bookmarks']
except:
bookmark_dict = {}
try:
#loop through the list from the client
#if user, bm_name, and bm_state match then skip
#otherwise, add to the db
for key,bookmark in bookmark_dict.items():
try:
Bookmark.objects.get(user=request.user, name=bookmark['name'], url_hash=bookmark['hash'])
except Bookmark.DoesNotExist:
new_bookmark = Bookmark(user=request.user, name=bookmark['name'], url_hash=bookmark['hash'])
new_bookmark.save()
except:
continue
#grab all bookmarks belonging to this user
#serialize bookmarks into 'name', 'hash' objects and return simplejson dump
content = []
bookmark_list = Bookmark.objects.filter(user=request.user)
for bookmark in bookmark_list:
sharing_groups = [group.name for group in bookmark.sharing_groups.all()]
content.append({
'uid': bookmark.uid,
'name': bookmark.name,
'hash': bookmark.url_hash,
'sharing_groups': sharing_groups
})
shared_bookmarks = Bookmark.objects.shared_with_user(request.user)
for bookmark in shared_bookmarks:
if bookmark not in bookmark_list:
username = bookmark.user.username
actual_name = bookmark.user.first_name + ' ' + bookmark.user.last_name
content.append({
'uid': bookmark.uid,
'name': bookmark.name,
'hash': bookmark.url_hash,
'shared': True,
'shared_by_username': username,
'shared_by_name': actual_name
})
return HttpResponse(simplejson.dumps(content), content_type="application/json", status=200)
except:
return HttpResponse(status=304)
def remove_bookmark(request):
try:
bookmark_uid = request.POST['uid']
bookmark = get_feature_by_uid(bookmark_ | uid)
viewable, respo | nse = bookmark.is_viewable(request.user)
if not viewable:
return response
bookmark.delete()
return HttpResponse(status=200)
except:
return HttpResponse(status=304)
def add_bookmark(request):
try:
bookmark = Bookmark(user=request.user, |
# -*- coding: utf-8 -*-
# Author: Mikhail Polyanskiy
# Last modified: 2017-04-02
# Original data: Rakić et al. 1998, https://doi.org/10.1364/AO.37.005271
import numpy as np
import matplotlib.pyplot as plt
# Lorentz-Drude (LD) model parameters
ωp = 9.03 #eV
f0 = 0.760
Γ0 = 0.053 #eV
f1 = 0.024
Γ1 = 0.241 #eV
ω1 = 0.415 #eV
f2 = 0.010
Γ2 = 0.345 #eV
ω2 = 0.830 #eV
f3 = 0.071
Γ3 = 0.870 #eV
ω3 = 2.969 #eV
f4 = 0.601
Γ4 = 2.494 #eV
ω4 = 4.304 #eV
f5 = 4.384
Γ5 = 2.214 #eV
ω5 = 13.32 #eV
Ωp = f0**.5 * ωp #eV
def LD(ω): #ω: eV
ε = 1-Ωp**2/(ω*(ω+1j*Γ0))
ε += f1*ωp**2 / ((ω1**2-ω**2)-1j*ω*Γ1)
ε += f2*ωp**2 / ((ω2**2-ω**2)-1j*ω*Γ2)
ε += f3*ωp**2 / ((ω3**2-ω**2)-1j*ω*Γ3)
ε += f4*ωp**2 / ((ω4**2-ω**2)-1j*ω*Γ4)
ε += f5*ωp**2 / ((ω5**2-ω**2)-1j*ω*Γ5)
return ε
ev_min=0.2
ev_max=5
npoints=200
eV = np.logspace(np.log10(ev_min), np.log10(ev_max), npoints)
μm = 4.13566733e-1*2.99792458/eV
ε = LD(eV)
n = (ε**.5).real
k = (ε**.5).imag
#=========== | ================= DATA OUTPUT =================================
file | = open('out.txt', 'w')
for i in range(npoints-1, -1, -1):
file.write('\n {:.4e} {:.4e} {:.4e}'.format(μm[i],n[i],k[i]))
file.close()
#=============================== PLOT =====================================
plt.rc('font', family='Arial', size='14')
plt.figure(1)
plt.plot(eV, -ε.real, label="-ε1")
plt.plot(eV, ε.imag, label="ε2")
plt.xlabel('Photon energy (eV)')
plt.ylabel('ε')
plt.xscale('log')
plt.yscale('log')
plt.legend(bbox_to_anchor=(0,1.02,1,0),loc=3,ncol=2,borderaxespad=0)
#plot n,k vs eV
plt.figure(2)
plt.plot(eV, n, label="n")
plt.plot(eV, k, label="k")
plt.xlabel('Photon energy (eV)')
plt.ylabel('n, k')
plt.yscale('log')
plt.legend(bbox_to_anchor=(0,1.02,1,0),loc=3,ncol=2,borderaxespad=0)
#plot n,k vs μm
plt.figure(3)
plt.plot(μm, n, label="n")
plt.plot(μm, k, label="k")
plt.xlabel('Wavelength (μm)')
plt.ylabel('n, k')
plt.xscale('log')
plt.yscale('log')
plt.legend(bbox_to_anchor=(0,1.02,1,0),loc=3,ncol=2,borderaxespad=0) |
r option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_overlay_global
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Configures anycast gateway MAC of the switch.
description:
- Configures anycast gateway MAC of the switch.
author: Gabriele Gerbino (@GGabriele)
notes:
- Default restores params default value
- Supported MAC address format are "E.E.E", "EE-EE-EE-EE-EE-EE",
"EE:EE:EE:EE:EE:EE" and "EEEE.EEEE.EEEE"
options:
anycast_gateway_mac:
description:
- Anycast gateway mac of the switch.
required: true
default: null
'''
EXAMPLES = '''
- nxos_overlay_global:
anycast_gateway_mac: "b.b.b"
username: "{{ un }}"
password: "{{ pwd }}"
host: "{{ inventory_hostname }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"asn": "65535", "router_id": "1.1.1.1", "vrf": "test"}
existing:
description: k/v pairs of existing BGP configuration
returned: verbose mode
type: dict
sample: {"asn": "65535", "bestpath_always_compare_med": false,
"bestpath_aspath_multipath_relax": false,
"bestpath_compare_neighborid": false,
"bestpath_compare_routerid": false,
"bestpath_cost_community_ignore": false,
"bestpath_med_confed": false,
"bestpath_med_missing_as_worst": false,
"bestpath_med_non_deterministic": false, "cluster_id": "",
"confederation_id": "", "confederation_peers": "",
"graceful_restart": true, "graceful_restart_helper": false,
"graceful_restart_timers_restart": "120",
"graceful_restart_timers_stalepath_time": "300", "local_as": "",
"log_neighbor_changes": false, "maxas_limit": "",
"neighbor_down_fib_accelerate": false, "reconnect_interval": "60",
"router_id": "11.11.11.11", "suppress_fib_pending": false,
"timer_bestpath_limit": "", "timer_bgp_hold": "180",
"timer_bgp_keepalive": "60", "vrf": "test"}
end_state:
description: k/v pairs of BGP configuration after module execution
returned: verbose mode
type: dict
sample: {"asn": "65535", "bestpath_always_compare_med": false,
"bestpath_aspath_multipath_relax": false,
"bestpath_compare_neighborid": false,
"bestpath_compare_routerid": false,
"bestpath_cost_community_ignore": false,
"bestpath_med_confed": false,
"bestpath_med_missing_as_worst": false,
"bestpath_med_non_deterministic": false, "cluster_id": "",
"confederation_id": "", "confederation_peers": "",
"graceful_restart": true, "graceful_restart_helper": false,
"graceful_restart_timers_restart": "120",
"graceful_restart_timers_stalepath_time": "300", "local_as": "",
"log_neighbor_changes": false, "maxas_limit": "",
"neighbor_down_fib_accelerate": false, "reconnect_interval": "60",
"router_id": "1.1.1.1", "suppress_fib_pending": false,
"timer_bestpath_limit": "", "timer_bgp_hold": "180",
"timer_bgp_keepalive": "60", "vrf": "test"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["router bgp 65535", "vrf test", "router-id 1.1.1.1"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.nxos import get_config, load_config
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netcfg import CustomNetworkConfig
PARAM_TO_COMMAND_KEYMAP = {
'anycast_gateway_mac': 'fabric forwarding anycast-gateway-mac',
}
def invoke(name, *args, **kwargs):
func = globals().get(name)
if func:
return func(*args, **kwargs)
def get_value(arg, config, module):
REGEX = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
value = ''
if PARAM_TO_COMMAND_KEYMAP[arg] in config:
value = REGEX.search(config).group('value')
return value
def get_existing(module, args):
existing = {}
config = str(get_config(module))
for arg in args:
existing[arg] = get_value(arg, config, module)
return existing
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = value
else:
new_dict[new_key] = value
return new_dict
def get_commands(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.items():
if value == 'default':
existing_value = existing_commands.get(key)
if existing_value:
commands.append('no {0} {1}'.format(key, existing_value))
else:
if 'anycast-gateway-mac' in key:
value = normalize_mac(value, module)
command = '{0} {1}'.format(key, value)
commands.append(command)
if commands:
candidate.add(commands, parents=[])
def normalize_mac(proposed_mac, module):
try:
if '-' in proposed_mac:
splitted_mac = proposed_mac.split('-')
if len(splitted_mac) != 6:
raise ValueError
for octect in splitted_mac:
if len(octect) != 2:
raise ValueError
elif '.' in proposed_mac:
splitted_mac = []
splitted_dot_mac = proposed_mac.split('.')
if len(splitted_dot_mac) != 3:
raise ValueError
for octect in splitted_dot_mac:
if len(octect) > 4:
raise ValueError
else:
octect_len = len(octect)
padding = 4 - octect_len
splitted_mac.append(octect.zfill(padding+1))
elif ':' in proposed_mac:
splitted_mac = proposed_mac.split(':')
if len(splitted_mac) != 6:
raise ValueError
for octect in splitted_mac:
if len(octect) != 2:
raise ValueError
else:
raise ValueError
except ValueError:
| module.fail_json(msg='Invalid MAC address format',
proposed_mac=proposed_mac)
joined_mac = ''.join(splitted_mac)
mac = [joined_mac[i:i+4] for i in range(0, len(joined_mac), 4)]
return '.'.join(mac).upper()
def main():
argument_spec = dict(
anycast_gateway_mac=dict(required=True, type='str'),
m_facts=dict(required=False, default=False, type='bool'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule( | argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
args = [
'anycast_gateway_mac'
]
existing = invoke('get_existing', module, args)
end_state = existing
proposed = dict((k, v) for k, v in module.params.items()
if v is not None and k in args)
result = {}
candidate = CustomNetworkConfig(indent=3)
inv |
#!/usr/bin/env python3
#
import re
import evdev
import subprocess
import time
import argparse
def process_test_line(line, controls):
tmp = line.strip()
fields = tmp.split()
operation = fields[1].lower()
if operation == 'receive':
target = fields[2].lower()
if target == 'syn':
return (operation, 0, 0, 0)
elif target == 'axis':
ctrl_type = evdev.ecodes.EV_ABS
else:
ctrl_type = evdev.ecodes.EV_KEY
control = int(fields[3])
value = int(fields[4])
else:
control_str = fields[2]
if not control_str in controls:
print('Warning: Control {0} unknown.'.format(control_str))
print(line)
return None
(ctrl_type, control) = controls[control_str]
value = int(fields[3])
return (operation, ctrl_type, control, value)
def read_config(fname):
sequence = []
devname = ''
controls = {}
f = open(fname)
test_re = re.compile('//\*(.*)$')
dev_re = re.compile('^\s*(grab\s+)?device\s+"([^"]+)"')
def_re = re.compile | ('^\s*(button|axis)\s+(\S+)\s*=\s*(\S+)')
for line in f:
m = test_re.match(line)
if m:
| tst = process_test_line(line, controls)
if tst:
sequence.append(tst)
continue
m = dev_re.match(line)
if m:
devname = m.group(2)
continue
m = def_re.match(line)
if m:
if m.group(1) == 'axis':
controls[m.group(2)] = (evdev.ecodes.EV_ABS, int(m.group(3)));
else:
controls[m.group(2)] = (evdev.ecodes.EV_KEY, int(m.group(3)));
f.close()
return {'seq':sequence, 'devname': devname, 'controls': controls}
def make_cap(config):
axes = []
buttons = []
# loops through keys of dictionary
for ctrl in config['controls']:
(ctrl_type, ctrl_id) = config['controls'][ctrl]
if ctrl_type == evdev.ecodes.EV_KEY:
buttons.append(ctrl_id)
else:
axes.append((ctrl_id, evdev.AbsInfo(0, 255, 0, 15, 0, 0)))
# sort the arrays
axes.sort()
buttons.sort()
cap = {}
if axes:
cap[evdev.ecodes.EV_ABS] = axes;
if buttons:
cap[evdev.ecodes.EV_KEY] = buttons;
return cap
def find_device(name):
patt = re.compile(name)
devices = [evdev.InputDevice(fn) for fn in evdev.list_devices()]
for device in devices:
if patt.match(device.name):
return device
parser = argparse.ArgumentParser(description = 'Test evdevshift using specially prepared config.')
parser.add_argument('--config', type=str, dest='arg')
args = parser.parse_args()
arg = args.arg
# read the config and prepare the caps of the source device
config = read_config(arg)
cap = make_cap(config)
# create the source device
ui = evdev.UInput(cap, name=config['devname'], vendor = 0xf30, product = 0x110, version=0x110)
eds_templ = subprocess.Popen(['./evdevshift_dbg', '--device={0}'.format(ui.device.fn), '--template=regrestest.conf'])
# start the evdevshift and point it to the config
eds = subprocess.Popen(['./evdevshift_dbg', '--config={0}'.format(arg)])
# temporary, to make sure the evdevshift started and created the device...
time.sleep(1)
# find the newly created device
dev = find_device('evdevshift')
print(dev)
#send the test sequence and check the outputs
buffered = False
problems = 0
for ev in config['seq']:
if ev[0] == 'send':
print('=====================================')
print('Sending (type {0} code {1} val {2})'.format(ev[1], ev[2], ev[3]))
sent = True
ui.write(ev[1], ev[2], ev[3])
else:
if sent:
#print('syn')
ui.syn()
sent = False
# give the stuff some time to pass the events
# not nice, will need to rework to avoid races
time.sleep(0.1)
in_ev = dev.read_one()
if in_ev:
if (in_ev.type == ev[1]) and (in_ev.code == ev[2]) and (in_ev.value == ev[3]):
print('Response OK (type {0} code {1} val {2})'.format(ev[1], ev[2], ev[3]))
else:
problems += 1
print('Error: Expected (type {0} code {1} val {2})'.format(ev[1], ev[2], ev[3]))
print(' Received (type {0} code {1} val {2})'.format(in_ev.type, in_ev.code, in_ev.value))
print('=====================================')
print('Expected error (Read wrong number of bytes (-1)!)')
ui.close()
time.sleep(1)
if problems == 0:
print('\n\nNo problems encountered!')
else:
print('\n\n{0} problems found.'.format(problems))
|
#!/usr/bin/env python
import os
import re
import sys
import socket
import httplib
import urlparse
from urllib import urlencode
from urllib2 import urlopen
from argparse import ArgumentParser
from collections import OrderedDict
def _get_discover_url(given_discover_url, update_type):
if update_type == '4':
return given_discover_url
elif update_type == '6':
parsed_url = urlparse.urlsplit(given_discover_url)
for (family, socktype, proto, canonname, sockaddr) in socket.getaddrinfo(parsed_url.netloc, parsed_url.port, socket.AF_INET6):
address, port, flow_info, scope_id = sockaddr
return urlparse.urlunsplit((parsed_url.scheme, '[' + address + ']', parsed_url.path, parsed_url.query, parsed_url.fragment))
raise ValueError('Cannot find an IPv6 address with the discovery URL {}'.format(given_discover_url))
else:
raise ValueError('Unknown update type {!r}'.format(update_type))
def _discover_own_address(discover_url):
response = urlopen(discover_url)
code, content = response.code, response.read()
if code != httplib.OK:
print >>sys.stderr, 'error: could not discover own address.'
print >>sys.stderr, 'server returned {}, {}'.format(code, content)
raise SystemExit(1)
parsed_response = re.search(r'Your IP address is\s*:\s*(?P<ip_address>(\d+\.\d+\.\d+\.\d+)|([0-9a-fA-F:]+))', content)
if parsed_response is None:
print >>sys.stderr, 'error: could not parse own IP properly'
print >>sys.stderr, 'server returned:', content
raise SystemExit(2)
return parsed_response.groupdict()['ip_address']
def _send_update(hostname, password, update_url, ip_address):
data = urlencode(OrderedDict(hostname=hostname, password=password, myip=ip_address))
response = urlopen(update_url, data)
content = response.read().strip()
if response.code != httplib.OK:
print >>sys.stderr, 'error: up | date failed. error is {}'.for | mat(response.code)
print >>sys.stderr, content
raise SystemExit(3)
parsed_content = re.match(r'^(?P<key>badauth|nochg|good|noipv6)(\s(?P<value>.*))?$', content)
if parsed_content is None:
print >>sys.stderr, 'error: unknown returned response: {}'.format(content)
raise SystemExit(4)
key, value = parsed_content.groupdict()['key'], parsed_content.groupdict()['value']
if key == 'badauth':
print >>sys.stderr, 'error: the domain name and password do not match'
print >>sys.stderr, 'Make sure you are using a domain name that has been marked for dynamic updates,'
print >>sys.stderr, 'and that the password used is the update key (not your account password).'
raise SystemExit(5)
elif key == 'nochg':
print >>sys.stderr, 'no update required (IP is {})'.format(value)
elif key == 'noipv6':
print >>sys.stderr, 'cannot update ipv6 for this hostname'
elif key == 'good':
print >>sys.stderr, 'update complete: {}'.format(value)
def main():
parser = ArgumentParser()
parser.add_argument('hostname', help='The hostname (domain name) to be updated. Make sure this domain has been marked for dynamic DNS updating')
parser.add_argument('password', help='Update key for this domain (as generated from the zone management interface)')
parser.add_argument('-u', '--update-url', default='https://dyn.dns.he.net/nic/update',
help='URL to post the update to')
parser.add_argument('-d', '--discover-url', default='http://checkip.dns.he.net',
help='Service for discovery of own address')
parser.add_argument('-t', '--type', default='4',
help='Type of update: either "4" for IPv4 or "6" for IPv6')
parser.add_argument('-i', '--ip-address', default=None,
help='The IP address to be updated for this domain. Leave blank to auto-discover')
args = parser.parse_args()
if args.ip_address is None:
discover_url = _get_discover_url(args.discover_url, args.type)
args.ip_address = _discover_own_address(discover_url)
_send_update(args.hostname, args.password, args.update_url, args.ip_address)
if __name__ == '__main__':
main()
|
from django.conf.urls import patterns, include, url
from misago.threads.views.privatethreads import PrivateThreadsView
urlpatterns = patterns('',
url(r'^private-threads/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/show-(?P<show>[\w-]+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/show-(?P<show>[\w-]+)/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/show-(?P<show>[\w-]+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/show-(?P<show>[\w-]+)/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
)
# thread view
from misago.threads.views.privatethreads import ThreadView
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/$', ThreadView.as_view(), name='private_thread'),
| url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/(?P<page>\d+)/$', ThreadView.as_view(), name='private_thread'),
)
# goto views
from misago.threads.views.privatethreads import (GotoLastView, GotoNewView,
GotoPostView)
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/last/$', GotoLastView.as_view(), name | ='private_thread_last'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/new/$', GotoNewView.as_view(), name='private_thread_new'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/post-(?P<post_id>\d+)/$', GotoPostView.as_view(), name='private_thread_post'),
)
# reported posts views
from misago.threads.views.privatethreads import ReportedPostsListView
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/reported-posts/$', ReportedPostsListView.as_view(), name='private_thread_reported'),
)
# participants views
from misago.threads.views.privatethreads import (ThreadParticipantsView,
EditThreadParticipantsView,
AddThreadParticipantsView,
RemoveThreadParticipantView,
LeaveThreadView)
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/participants/$', ThreadParticipantsView.as_view(), name='private_thread_participants'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/edit-participants/$', EditThreadParticipantsView.as_view(), name='private_thread_edit_participants'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/remove-participant/(?P<user_id>\d+)/$', RemoveThreadParticipantView.as_view(), name='private_thread_remove_participant'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/add-participants/$', AddThreadParticipantsView.as_view(), name='private_thread_add_participants'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/leave/$', LeaveThreadView.as_view(), name='private_thread_leave'),
)
# post views
from misago.threads.views.privatethreads import (QuotePostView, HidePostView,
UnhidePostView,
DeletePostView,
ReportPostView)
urlpatterns += patterns('',
url(r'^private-post/(?P<post_id>\d+)/quote/$', QuotePostView.as_view(), name='quote_private_post'),
url(r'^private-post/(?P<post_id>\d+)/unhide/$', UnhidePostView.as_view(), name='unhide_private_post'),
url(r'^private-post/(?P<post_id>\d+)/hide/$', HidePostView.as_view(), name='hide_private_post'),
url(r'^private-post/(?P<post_id>\d+)/delete/$', DeletePostView.as_view(), name='delete_private_post'),
url(r'^private-post/(?P<post_id>\d+)/report/$', ReportPostView.as_view(), name='report_private_post'),
)
# events view
from misago.threads.views.privatethreads import EventsView
urlpatterns += patterns('',
url(r'^edit-private-event/(?P<event_id>\d+)/$', EventsView.as_view(), name='edit_private_event'),
)
# posting views
from misago.threads.views.privatethreads import PostingView
urlpatterns += patterns('',
url(r'^start-private-thread/$', PostingView.as_view(), name='start_private_thread'),
url(r'^reply-private-thread/(?P<thread_id>\d+)/$', PostingView.as_view(), name='reply_private_thread'),
url(r'^edit-private_post/(?P<thread_id>\d+)/(?P<post_id>\d+)/edit/$', PostingView.as_view(), name='edit_private_post'),
)
|
# -*- coding: utf-8 -*-
import ast
import os
import requests
import models
from config import config, sqla
from gevent.pool import Pool
from helpers import random_str, down
base_path = config.get('photo', 'path')
base_path = os.path.join(base_path, 'celebrity')
cookies = {
'bid': ''
}
def create_down(str_urls, douban | _id, category):
urls = ast.literal_eval(str_urls or "[]")
path = os.path.join(base_path, category)
for url in urls:
filename = str(douban_id) + '_' + url.split('/')[-1].strip('?')
cookies['bid'] = random_str(11)
down(url, cookies, path, filename)
def create_requests_and_save_datas(douban_id) | :
session = sqla['session']
cookies['bid'] = random_str(11)
celebrity = session.query(models.Celebrity).filter_by(
douban_id=douban_id
).one()
cover_url = celebrity.cover
thumbnail_cover_url = celebrity.thumbnail_cover
photos_url = celebrity.photos
thumbnail_photos_url = celebrity.thumbnail_photos
down(
cover_url,
cookies,
os.path.join(base_path, 'cover'),
str(douban_id)+'_'+cover_url.split('/')[-1].strip('?')
)
down(
thumbnail_cover_url,
cookies,
os.path.join(base_path, 'thumbnail_cover'),
str(douban_id)+'_'+cover_url.split('/')[-1].strip('?')
)
create_down(photos_url, douban_id, 'photos')
create_down(thumbnail_photos_url, douban_id, 'thumbnail_photos')
def task(douban_ids, pool_number):
pool = Pool(pool_number)
for douban_id in douban_ids:
pool.spawn(
create_requests_and_save_datas,
douban_id=douban_id
)
pool.join()
|
self.sid is not None and self.sid[-33] == 'S'
def is_accessed(self):
"""Returns True if any value of this session has been accessed."""
return self._accessed
def ensure_data_loaded(self):
"""Fetch the session data if it hasn't been retrieved it yet."""
self._accessed = True
if self.data is None and self.sid:
self.__retrieve_data()
def get_expiration(self):
"""Returns the timestamp at which this session will expire."""
try:
return int(self.sid[:-33])
except:
return 0
def __make_sid(self, expire_ts=None, ssl_only=False):
"""Returns a new session ID."""
# make a random ID (random.randrange() is 10x faster but less secure?)
if expire_ts is None:
expire_dt = datetime.datetime.now() + self.lifetime
expire_ts = int(time.mktime((expire_dt).timetuple()))
else:
expire_ts = int(expire_ts)
if ssl_only:
sep = 'S'
else:
sep = '_'
return ('%010d' % expire_ts) + sep + hashlib.md5(os.urandom(16)).hexdigest()
@staticmethod
def __encode_data(d):
"""Returns a "pickled+" encoding of d. d values of type db.Model are
protobuf encoded before pickling to minimize CPU usage & data size."""
# separate protobufs so we'll know how to decode (they are just strings)
eP = {} # for models encoded as protobufs
eO = {} # for everything else
for k, v in d.iteritems():
if isinstance(v, db.Model):
eP[k] = db.model_to_protobuf(v)
else:
eO[k] = v
return pickle.dumps((eP, eO), 2)
@staticmethod
def __decode_data(pdump):
"""Returns a data dictionary after decoding it from "pickled+" form."""
try:
eP, eO = pickle.loads(pdump)
for k, v in eP.iteritems():
eO[k] = db.model_from_protobuf(v)
except Exception, e:
logging.warn("failed to decode session data: %s" % e)
eO = {}
return eO
def regenerate_id(self, expiration_ts=None):
"""Assigns the session a new session ID (data carries over). This
should be called whenever a user authenticates to prevent session
fixation attacks.
``expiration_ts`` - The UNIX timestamp the session will expire at. If
omitted, the session expiration time will not be changed.
"""
if self.sid or expiration_ts is not None:
self.ensure_data_loaded() # ensure we have the data before we delete it
if expiration_ts is None:
expiration_ts = self.get_expiration()
self.__set_sid(self.__make_sid(expiration_ts, self.is_ssl_only()))
self.dirty = True # ensure the data is written to the new session
def start(self, expiration_ts=None, ssl_only=False):
"""Starts a new session. expiration specifies when it will expire. If
expiration is not specified, then self.lifetime will used to
determine the expiration date.
Normally this method does not need to be called directly - a session is
automatically started when the first value is added to the session.
``expiration_ts`` - The UNIX timestamp the session will expire at. If
omitted, the session will expire after the default ``lifetime`` has past
(as specified in ``SessionMiddleware``).
``ssl_only`` - Whether to specify the "Secure" attribute on the cookie
so that the client will ONLY transfer the cookie over a secure channel.
"""
self.dirty = True
self.data = {}
self.__set_sid(self.__make_sid(expiration_ts, ssl_only), True)
def terminate(self, clear_data=True):
"""Deletes the session and its data, and expires the user's cookie."""
if clear_data:
self.__clear_data()
self.sid = None
self.data = {}
self.dirty = False
if self.cookie_keys:
self.cookie_data = '' # trigger the cookies to expire
else:
self.cookie_data = None
def __set_sid(self, sid, make_cookie=True):
"""Sets the session ID, deleting the old session if one existed. The
session's data will remain intact (only the session ID changes)."""
if self.sid:
self.__clear_data()
self.sid = sid
self.db_key = db.Key.from_path(SessionModel.kind(), sid, namespace='')
# set the cookie if requested
if make_cookie:
self.cookie_data = '' # trigger the cookie to be sent
def __clear_data(self):
"""Deletes this session from memcache and the datastore."""
| if self.sid:
memcache.delete(self.sid, namespace='') # not really needed; it'll go away on its own
try:
db.delete(self.db_key)
except:
pass # either it wasn't in the db (maybe cookie/memcache-only) or db is down => cron will expire it
def __retrieve_data(self):
"""Sets the data associated with this | session after retrieving it from
memcache or the datastore. Assumes self.sid is set. Checks for session
expiration after getting the data."""
pdump = memcache.get(self.sid, namespace='')
if pdump is None:
# memcache lost it, go to the datastore
if self.no_datastore:
logging.info("can't find session data in memcache for sid=%s (using memcache only sessions)" % self.sid)
self.terminate(False) # we lost it; just kill the session
return
session_model_instance = db.get(self.db_key)
if session_model_instance:
pdump = session_model_instance.pdump
else:
logging.error("can't find session data in the datastore for sid=%s" % self.sid)
self.terminate(False) # we lost it; just kill the session
return
self.data = self.__decode_data(pdump)
def save(self, persist_even_if_using_cookie=False):
"""Saves the data associated with this session IF any changes have been
made (specifically, if any mutator methods like __setitem__ or the like
is called).
If the data is small enough it will be sent back to the user in a cookie
instead of using memcache and the datastore. If `persist_even_if_using_cookie`
evaluates to True, memcache and the datastore will also be used. If the
no_datastore option is set, then the datastore will never be used.
Normally this method does not need to be called directly - a session is
automatically saved at the end of the request if any changes were made.
"""
if not self.sid:
return # no session is active
if not self.dirty:
return # nothing has changed
dirty = self.dirty
self.dirty = False # saving, so it won't be dirty anymore
# do the pickling ourselves b/c we need it for the datastore anyway
pdump = self.__encode_data(self.data)
# persist via cookies if it is reasonably small
if len(pdump) * 4 / 3 <= self.cookie_only_thresh: # 4/3 b/c base64 is ~33% bigger
self.cookie_data = pdump
if not persist_even_if_using_cookie:
return
elif self.cookie_keys:
# latest data will only be in the backend, so expire data cookies we set
self.cookie_data = ''
memcache.set(self.sid, pdump, namespace='', time=self.get_expiration()) # may fail if memcache is down
# persist the session to the datastore
if dirty is Session.DIRTY_BUT_DONT_PERSIST_TO_DB or self.no_datastore:
return
try:
SessionModel(key_name=self.sid, pdump=pdump).put()
except Exception, e:
logging.warning("unable to persist session to datastore for sid=%s (%s)" % (self.sid, e))
# Users may interact with the session through a dictionary-like interface.
def clear |
import itchat, time, re
from itchat.content import *
import urllib2, urllib
import json
from watson_developer_cloud import ConversationV1
response={'context':{}}
@itchat.msg_register([TEXT])
def text_reply(msg):
global response
request_text = msg['Text'].encode('UTF-8')
conversation = | ConversationV1(
username='9c359fba-0692-4afa-afb1-bd5bf4d7e367',
password='5Id2zfapBV6e',
version='2017-04-21')
# replace with your own workspace_id
workspace_id = 'd3e50587-f36a-4bdf-bf3e-38c382e8d63a'
print "request ==>", request_text
try:
type(eval(response))
except:
print "first call"
response = conversation.message(workspace_id=workspace_id, message_ | input={
'text': request_text}, context=response['context'])
else:
print "continue call"
response = conversation.message(workspace_id=workspace_id, message_input={
'text': request_text}, context=response['context'])
if len( response['output']['text']) >0:
response_text = response['output']['text'][0]
else:
response_text = "No message"
itchat.send( response_text, msg['FromUserName'])
itchat.auto_login()
itchat.run(debug=True)
|
# Copyright 2008-2013 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.errors import DataError
from robot.utils import plural_or_not
from robot.variables import is_list_var
class ArgumentValidator(object):
def __init__(self, argspec):
self._argspec = argspec
def validate(self, positional, named, dryrun=False):
if dryrun and any(is_list_var(arg) for arg in positional):
return
self._validate_no_multiple_values(positional, named, self._argspec)
self._validate_limits(positional, named, self._argspec)
self._validate_no_mandatory | _missing(positional, named, self._argspec)
def _validate_limits(self, positional, named, spec):
count = len(positional) + self._named_positionals(named, spec)
if not spec.minargs <= count <= spec.maxargs:
self._raise_wrong_count(count, spec)
def _named_positionals(self, named, spec):
if not spec.supports_named:
return 0
| return sum(1 for n in named if n in spec.positional)
def _raise_wrong_count(self, count, spec):
minend = plural_or_not(spec.minargs)
if spec.minargs == spec.maxargs:
expected = '%d argument%s' % (spec.minargs, minend)
elif not spec.varargs:
expected = '%d to %d arguments' % (spec.minargs, spec.maxargs)
else:
expected = 'at least %d argument%s' % (spec.minargs, minend)
if spec.kwargs:
expected = expected.replace('argument', 'non-keyword argument')
raise DataError("%s '%s' expected %s, got %d."
% (spec.type, spec.name, expected, count))
def _validate_no_multiple_values(self, positional, named, spec):
for name in spec.positional[:len(positional)]:
if name in named and spec.supports_named:
raise DataError("%s '%s' got multiple values for argument '%s'."
% (spec.type, spec.name, name))
def _validate_no_mandatory_missing(self, positional, named, spec):
for name in spec.positional[len(positional):spec.minargs]:
if name not in named:
raise DataError("%s '%s' missing value for argument '%s'."
% (spec.type, spec.name, name))
|
# -*- coding: utf-8 -*-
import nltk
import csv
import random
import codecs
import re
from nltk.corpus import stopwords
stopset = list(set(stopwords.words('spanish')))
hil_tweets = []
trump_tweets = []
bernie_tweets = []
cruz_tweets = []
classes = {}
def transform(temp):
if temp == "imo":
return "opinion"
elif temp == "inches":
return "inch"
elif temp == "including" or temp == "included" or temp == "includes":
return "include"
elif temp == "issued" or temp == "issues":
return "issue"
elif temp == "ppl":
return "people"
elif temp == "prices":
return "price"
elif temp == "say":
return "says"
elif temp == "shocked" or temp == "shocker" or temp == "shocking":
return "shock"
#elif temp == "sooooo" or temp == "soooo" or temp == "sooo" or temp == "soo":
# return "so"
return temp
def getPureWord(word):
#if str.startswith(word,'@'):
# return ""
#print word
temp = word.lower()
if str.startswith(temp,"http"):
return ""
temp = ''.join(e for e in temp if e.isalpha())
#if temp not in stop_words and temp !='':
if temp not in stopset and temp !='':
return transform(temp)
else:
return ""
def purifyText(input):
output = input.replace('\r','').replace('\n','')
op = re.sub(r'\w+:\/{2}[\d\w-]+(\.[\d\w-]+)*(?:(?:\/[^\s/]*))*', '', output)
op1 = " ".join(getPureWord(w) for w in op.split())
return op1.strip()
def buildHash():
#Hillary, Bernie, Trump, Cruz, GOP, DEM
classes["trump"] = ["donald","trump","donaldtrump"]
classes["cruz"] = ["tedcruz","cruz","ted"]
classes["hillary"] = ["hillaryclinton","hillary","clinton"]
classes["bernie"] = ["berniesanders","bernie","sanders","bern"]
classes["gop"] = ["gop","gopdebate","republicans"]
classes["dem"] = ["dem","demdebate","democrats","Democratic","democrata","democrat"]
def getEntities(line):
line = line.lower()
op = set()
for key in classes:
temp = classes[key]
#print temp
for t in temp:
| #print type(line)
if t.lower() in line:
op.add(key)
if key in op:
break
return list(op)
def unicode_csv_reader(utf8_data, dialect=csv.excel, **kwargs):
csv_reader = csv.reader(utf8_data, dialect=dialect, **kwargs)
for row in csv_reader:
| yield [unicode(cell, 'utf-8') for cell in row]
# Process Tweet
def processTweet(tweet):
tweet = tweet.lower()
# Convert www.* or https?://* to URL
tweet = re.sub('((www\.[^\s]+)|(https?://[^\s]+))', 'URL', tweet)
# Convert @username to AT_USER
tweet = re.sub('@[^\s]+', 'AT_USER', tweet)
# Remove additional white spaces
tweet = re.sub('[\s]+', ' ', tweet)
# Replace #word with word
tweet = re.sub(r'#([^\s]+)', r'\1', tweet)
# trim
tweet = tweet.strip('\'"')
return tweet
def tweet_word(words):
return dict([(word.decode('utf-8'), True) for word in words.split() if word.decode('utf-8') not in stopset])
buildHash()
test_set = []
for x in ['a', 'b', 'c', 'd', 'e']:
#for x in ['annotatedTrump2.csv']:
with codecs.open('../python/Annotated4/annotated.csva' + x, 'rb') as csvfile:
tweets = csv.reader(csvfile, delimiter=',', quotechar='\'')
for tweet in tweets:
if tweet[12] == 'berniePositive':
bernie_tweets.append(purifyText(tweet[13]))
elif tweet[12] == 'hillaryPositive':
hil_tweets.append(purifyText(tweet[13]))
elif tweet[12] == 'cruzPositive':
cruz_tweets.append(purifyText(tweet[13]))
elif tweet[12] == 'trumpPositive':
trump_tweets.append(purifyText(tweet[13]))
elif tweet[12] == 'nuetral':
test_set.append(tweet)
labeled_words = ([(word, 'hillary') for word in hil_tweets] + [(word, 'trump') for word in trump_tweets] + [(word, 'cruz') for word in cruz_tweets] + [(word, 'bernie') for word in bernie_tweets])
random.shuffle(labeled_words)
featuresets = [(tweet_word(n), classify) for (n, classify) in labeled_words]
train_set = featuresets
# Generating Test Set...
'''
for x in ['testTrump.csv']:
with codecs.open('../python/annotated2/' + x, 'rb') as csvfile:
tweets = csv.reader(csvfile, delimiter=',', quotechar='\'')
for tweet in tweets:
if tweet[7] == '0':
test_set.append(tweet)
'''
# Ref - http://www.nltk.org/api/nltk.classify.html
# ALGORITHMS = ['GIS', 'IIS', 'MEGAM', 'TADM']
algorithm = nltk.classify.MaxentClassifier.ALGORITHMS[1]
classifier = nltk.MaxentClassifier.train(train_set, algorithm, max_iter=3)
classifier.show_most_informative_features(10)
#print(nltk.classify.accuracy(classifier, test_set))
i = 1
with open("canoutput.csv", 'wb') as f:
for tweet in test_set:
op1 = purifyText(tweet[13])
op = getEntities(op1)
if "trump" in op or "bernie" in op or "hillary" in op or "cruz" in op:
result = classifier.classify(tweet_word(op1))
print tweet[13]
print result
#else:
# print result + "Positive"
i += 1
if i > 100:
break
|
# ===============================================================================
# Copyright 2016 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
import os
import socket
from ftplib import FTP
import paramiko
from pychron.media_storage.storage import RemoteStorage
class FTPStorage(RemoteStorage):
url_name = 'FTP'
def put(self, src, dest):
client = self._get_client()
self._put(client, src, dest)
self._close_client(client)
def _close_client(self, client):
client.quit()
def _get_client(self):
client = FTP(self.host)
client.login(self.username, self.password)
return client
def _put(self, client, src, dest):
head, ext = os.path.splitext(src)
if ext in ('.jpg', '.png'):
with open(src, 'rb') | as rfile:
client.storbinary('STOR {}'.format(dest), rfile, 1024)
else:
with open(src, 'r') as rfile:
client.storlines('STOR {}'.format(dest), rfile)
class SFTPStorage(FTPStorage):
url_name = 'SFTP'
def _get_client(self):
ssh = paramiko.SSHClient()
ssh.set_missing_hos | t_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(self.host, username=self.username, password=self.password, timeout=2)
except (socket.timeout, paramiko.AuthenticationException):
self.warning_dialog('Could not connect to server')
return
return ssh.open_sftp()
def _close_client(self, client):
client.close()
def _put(self, client, src, dest):
client.put(src, dest)
# ============= EOF =============================================
|
ctypes.wintypes import BOOL
from ctypes.wintypes import LPCWSTR
_stdcall_libraries = {}
_stdcall_libraries['kernel32'] = WinDLL('kernel32')
from ctypes.wintypes import DWORD
from ctypes.wintypes import WORD
from ctypes.wintypes import BYTE
INVALID_HANDLE_VALUE = HANDLE(-1).value
class _SECURITY_ATTRIBUTES(Structure):
pass
LPSECURITY_ATTRIBUTES = POINTER(_SECURITY_ATTRIBUTES)
CreateEventW = _stdcall_libraries['kernel32'].CreateEventW
CreateEventW.restype = HANDLE
CreateEventW.argtypes = [LPSECURITY_ATTRIBUTES, BOOL, BOOL, LPCWSTR]
CreateEvent = CreateEventW # alias
CreateFileW = _stdcall_libraries['kernel32'].CreateFileW
CreateFileW.restype = HANDLE
CreateFileW.argtypes = [LPCWSTR, DWORD, DWORD, LPSECURITY_ATTRIBUTES, DWORD, DWORD, HANDLE]
CreateFile = CreateFileW # alias
class _OVERLAPPED(Structure):
pass
OVERLAPPED = _OVERLAPPED
class _COMSTAT(Structure):
pass
COMSTAT = _COMSTAT
class _DCB(Structure):
pass
DCB = _DCB
class _COMMTIMEOUTS(Structure):
pass
COMMTIMEOUTS = _COMMTIMEOUTS
GetLastError = _stdcall_libraries['kernel32'].GetLastError
GetLastError.restype = DWORD
GetLastError.argtypes = []
LPOVERLAPPED = POINTER(_OVERLAPPED)
LPDWORD = POINTER(DWORD)
GetOverlappedResult = _stdcall_libraries['kernel32'].GetOverlappedResult
GetOverlappedResult.restype = BOOL
GetOverlappedResult.argtypes = [HANDLE, LPOVERLAPPED, LPDWORD, BOOL]
ResetEvent = _stdcall_libraries['kernel32'].ResetEvent
ResetEvent.restype = BOOL
ResetEvent.argtypes = [HANDLE]
LPCVOID = c_void_p
WriteFile = _stdcall_libraries['kernel32'].WriteFile
WriteFile.restype = BOOL
WriteFile.argtypes = [HANDLE, LPCVOID, DWORD, LPDWORD, LPOVERLAPPED]
LPVOID = c_void_p
ReadFile = _stdcall_libraries['kernel32'].ReadFile
ReadFile.restype = BOOL
ReadFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPOVERLAPPED]
CloseHandle = _stdcall_libraries['kernel32'].CloseHandle
CloseHandle.restype = BOOL
CloseHandle.argtypes = [HANDLE]
ClearCommBreak = _stdcall_libraries['kernel32'].ClearCommBreak
ClearCommBreak.restype = BOOL
ClearCommBreak.argtypes = [HANDLE]
LPCOMSTAT = POINTER(_COMSTAT)
ClearCommError = _stdcall_libraries['kernel32'].ClearCommError
ClearCommError.restype = BOOL
ClearCommError.argtypes = [HANDLE, LPDWORD, LPCOMSTAT]
SetupComm = _stdcall_libraries['kernel32'].SetupComm
SetupComm.restype = BOOL
SetupComm.argtypes = [HANDLE, DWORD, DWORD]
EscapeCommFunction = _stdcall_libraries['kernel32'].EscapeCommFunction
EscapeCommFunction.restype = BOOL
EscapeCommFunction.argtypes = [HANDLE, DWORD]
GetCommModemStatus = _stdcall_libraries['kernel32'].GetCommModemStatus
GetCommModemStatus.restype = BOOL
GetCommModemStatus.argtypes = [HANDLE, LPDWORD]
LPDCB = POINTER(_DCB)
GetCommState = _stdcall_libraries['kernel32'].GetCommState
GetCommState.restype = BOOL
GetCommState.argtypes = [HANDLE, LPDCB]
LPCOMMTIMEOUTS = POINTER(_COMMTIMEOUTS)
GetCommTimeouts = _stdcall_libraries['kernel32'].GetCommTimeouts
GetCommTimeouts.restype = BOOL
GetCommTimeouts.argtypes = [HANDLE, LPCOMMTIMEOUTS]
PurgeComm = _stdcall_libraries['kernel32'].PurgeComm
PurgeComm.restype = BOOL
PurgeComm.argtypes = [HANDLE, DWORD]
SetCommBreak = _stdcall_libraries['kernel32'].SetCommBreak
SetCommBreak.restype = BOOL
SetCommBreak.argtypes = [HANDLE]
SetCommMask = _stdcall_libraries['kernel32'].SetCommMask
SetCommMask.restype = BOOL
SetCommMask.argtypes = [HANDLE, DWORD]
SetCommState = _stdcall_libraries['kernel32'].SetCommState
SetCommState.restype = BOOL
SetCommState.argtypes = [HANDLE, LPDCB]
SetCommTimeouts = _stdcall_libraries['kernel32'].SetCommTimeouts
SetCommTimeouts.restype = BOOL
SetCommTimeouts.argtypes = [HANDLE, LPCOMMTIMEOUTS]
WaitForSingleObject = _stdcall_libraries['kernel32'].WaitForSingleObject
WaitForSingleObject.restype = DWORD
WaitForSingleObject.argtypes = [HANDLE, DWORD]
ONESTOPBIT = 0 # Variable c_int
TWOSTOPBITS = 2 # Variable c_int
ONE5STOPBITS = 1
NOPARITY = 0 # Variable c_int
ODDPARITY = 1 # Variable c_int
EVENPARITY = 2 # Variable c_int
MARKPARITY = 3
SPACEPARITY = 4
RTS_CONTROL_HANDSHAKE = 2 # Variable c_int
RTS_CONTROL_DISABLE = 0 # Variable c_int
RTS_CONTROL_ENABLE = 1 # Variable c_int
SETRTS = 3
CLRRTS = 4
DTR_CONTROL_HANDSHAKE = 2 # Variable c_int
DTR_CONTROL_DISABLE = 0 # Variable c_int
DTR_CONTROL_ENABLE = 1 # Variable c_int
SETDTR = 5
CLRDTR = 6
MS_DSR_ON = 32 # Variable c_ulong
EV_RING = 256 # Variable c_int
EV_PERR = 512 # Variable c_int
EV_ERR = 128 # Variable c_int
SETXOFF = 1 # Variable c_int
EV_RXCHAR = 1 # Variable c_int
GENERIC_WRI | TE = 1073741824 # Variable c_long
PURGE_TXCLEAR = 4 # Variable c_int
FILE_FLAG_OVERLAPPED = 1073741824 # Variable c_int
EV_DSR = 16 # Variable c_int
MAXDWORD = 4294967295L # Variable c_uint
EV_RLSD = 32 # Variable c_int
ERROR_IO_PENDING = 997 # Variable c_long
MS_CTS_ON = 16 # Variable c_ulong
EV_EVENT1 = 2048 # Variable c_int
EV_RX80FULL = 1024 # Va | riable c_int
PURGE_RXABORT = 2 # Variable c_int
FILE_ATTRIBUTE_NORMAL = 128 # Variable c_int
PURGE_TXABORT = 1 # Variable c_int
SETXON = 2 # Variable c_int
OPEN_EXISTING = 3 # Variable c_int
MS_RING_ON = 64 # Variable c_ulong
EV_TXEMPTY = 4 # Variable c_int
EV_RXFLAG = 2 # Variable c_int
MS_RLSD_ON = 128 # Variable c_ulong
GENERIC_READ = 2147483648L # Variable c_ulong
EV_EVENT2 = 4096 # Variable c_int
EV_CTS = 8 # Variable c_int
EV_BREAK = 64 # Variable c_int
PURGE_RXCLEAR = 8 # Variable c_int
ULONG_PTR = c_ulong
INFINITE = 0xFFFFFFFFL
class N11_OVERLAPPED4DOLLAR_48E(Union):
pass
class N11_OVERLAPPED4DOLLAR_484DOLLAR_49E(Structure):
pass
N11_OVERLAPPED4DOLLAR_484DOLLAR_49E._fields_ = [
('Offset', DWORD),
('OffsetHigh', DWORD),
]
PVOID = c_void_p
N11_OVERLAPPED4DOLLAR_48E._anonymous_ = ['_0']
N11_OVERLAPPED4DOLLAR_48E._fields_ = [
('_0', N11_OVERLAPPED4DOLLAR_484DOLLAR_49E),
('Pointer', PVOID),
]
_OVERLAPPED._anonymous_ = ['_0']
_OVERLAPPED._fields_ = [
('Internal', ULONG_PTR),
('InternalHigh', ULONG_PTR),
('_0', N11_OVERLAPPED4DOLLAR_48E),
('hEvent', HANDLE),
]
_SECURITY_ATTRIBUTES._fields_ = [
('nLength', DWORD),
('lpSecurityDescriptor', LPVOID),
('bInheritHandle', BOOL),
]
_COMSTAT._fields_ = [
('fCtsHold', DWORD, 1),
('fDsrHold', DWORD, 1),
('fRlsdHold', DWORD, 1),
('fXoffHold', DWORD, 1),
('fXoffSent', DWORD, 1),
('fEof', DWORD, 1),
('fTxim', DWORD, 1),
('fReserved', DWORD, 25),
('cbInQue', DWORD),
('cbOutQue', DWORD),
]
_DCB._fields_ = [
('DCBlength', DWORD),
('BaudRate', DWORD),
('fBinary', DWORD, 1),
('fParity', DWORD, 1),
('fOutxCtsFlow', DWORD, 1),
('fOutxDsrFlow', DWORD, 1),
('fDtrControl', DWORD, 2),
('fDsrSensitivity', DWORD, 1),
('fTXContinueOnXoff', DWORD, 1),
('fOutX', DWORD, 1),
('fInX', DWORD, 1),
('fErrorChar', DWORD, 1),
('fNull', DWORD, 1),
('fRtsControl', DWORD, 2),
('fAbortOnError', DWORD, 1),
('fDummy2', DWORD, 17),
('wReserved', WORD),
('XonLim', WORD),
('XoffLim', WORD),
('ByteSize', BYTE),
('Parity', BYTE),
('StopBits', BYTE),
('XonChar', c_char),
('XoffChar', c_char),
('ErrorChar', c_char),
('EofChar', c_char),
('EvtChar', c_char),
('wReserved1', WORD),
]
_COMMTIMEOUTS._fields_ = [
('ReadIntervalTimeout', DWORD),
('ReadTotalTimeoutMultiplier', DWORD),
('ReadTotalTimeoutConstant', DWORD),
('WriteTotalTimeoutMultiplier', DWORD),
('WriteTotalTimeoutConstant', DWORD),
]
__all__ = ['GetLastError', 'MS_CTS_ON', 'FILE_ATTRIBUTE_NORMAL',
'DTR_CONTROL_ENABLE', '_COMSTAT', 'MS_RLSD_ON',
'GetOverlappedResult', 'SETXON', 'PURGE_TXABORT',
'PurgeComm', 'N11_OVERLAPPED4DOLLAR_48E', 'EV_RING',
'ONESTOPBIT', 'SETXOFF', 'PURGE_RXABORT', 'GetCommState',
'RTS_CONTROL_ENABLE', '_DCB', 'CreateEvent',
'_COMMTIMEOUTS', '_SECURITY_ATTRIBUTES', 'EV_DSR',
'EV_PERR', 'EV_RXFLAG', 'OPEN_EXISTING', 'DCB',
'FILE_FLAG_OVERLAPPED', 'EV_CTS', 'SetupComm',
'LPOVERLAPPED', 'EV_TXEMPTY', 'ClearCommBreak',
'LPSECURITY_ATTRIBUTES', 'SetCommBreak', 'SetCommTimeouts',
'COMMTIMEOUTS', 'ODDPARITY', 'EV_RLSD',
'GetCo |
"""
Tests for the Gauges template tags and filters.
"""
from django.http import HttpRequest
from django.template import Context
from django.test.utils import override_settings
from analytical.templatetags.gauges import GaugesNode
from analytical.tests.utils import TagTestCase
from analytical.utils import AnalyticalException
@override_settings(GAUGES_SITE_ID='1234567890abcdef0123456789')
class GaugesTagTestCase(TagTestCase):
"""
Tests for the ``gauges`` template tag.
"""
def test_tag(self):
self.assertEqual("""
<script type="text/javascript">
var _gauges = _gauges || [];
(function() {
var t = document.createElement('script');
t.type = 'text/javascript';
t.async = true;
t.id = 'gauges-tracker';
t.setAttribute('data-site-id', '1234567890abcdef0123456789');
t.src = '//secure.gaug.es/track.js';
var s = document.getElementsByTagName('script')[0];
s.parentNode.insertBefore(t, s);
})();
</script>
""", self.render_tag('gauges', 'gauges'))
def test_node(self):
self.assertEqual(
"""
<script type="text/javascript">
var _gauges = _gauges || [];
(function() {
var t = document.createElement('script');
t.type = 'text/javascript';
t.async = true;
t.id = 'gauges-tracker';
t.setAttribute('data-site-id', '1234567890abcdef0123456789');
t.src = '//secure.gaug.es/track.js';
var s = document.getElementsB | yTagName('script')[0];
s.parentNode.insertBefore(t, s);
})();
</script>
""", GaugesNode().render(Context()))
@override_settings(GAUGES_SITE_ID=None)
def test_no_account_number(self):
self.assertRaises(AnalyticalException, GaugesNode)
@override_settings(GAUGES_SITE_ID='123abQ')
def test_wrong_account_number(self):
self.assertR | aises(AnalyticalException, GaugesNode)
@override_settings(ANALYTICAL_INTERNAL_IPS=['1.1.1.1'])
def test_render_internal_ip(self):
req = HttpRequest()
req.META['REMOTE_ADDR'] = '1.1.1.1'
context = Context({'request': req})
r = GaugesNode().render(context)
self.assertTrue(r.startswith(
'<!-- Gauges disabled on internal IP address'), r)
self.assertTrue(r.endswith('-->'), r)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
def get_ki2_list(parser):
parser.add_argument('-p', '--path_2chkifu',
default='~/data/shogi/2chkifu/',
help='2chkifu.zipを展開したディレクトリ')
args = parser.parse_args()
path_2chkifu = args.path_2chkifu
| sub_dir_list = ['00001', '10000', '20000', '30000', '40000']
path_ki2_list = []
# Extract paths of KI2 files
| for sub_dir in sub_dir_list:
path_dir = os.path.expanduser(os.path.join(path_2chkifu, sub_dir))
ki2files = os.listdir(path_dir)
for ki2file in ki2files:
path_ki2_list.append(os.path.join(path_dir, ki2file))
return sorted(path_ki2_list)
|
from flask import Blueprint, jsonify, request
routes_api = Blueprint('routes_api', __name__)
@routes_api.route('/v1/routes', methods=['GET'])
def routes_get():
'''
Get a list of route | s
| It is handler for GET /routes
'''
return jsonify()
|
TFORM, CONF_NAME, CONF_CODE,
CONF_DELAY_TIME, CONF_PENDING_TIME, CONF_TRIGGER_TIME,
CONF_DISARM_AFTER_TRIGGER)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_point_in_time
CONF_CODE_TEMPLATE = 'code_template'
DEFAULT_ALARM_NAME = 'HA Alarm'
DEFAULT_DELAY_TIME = datetime.timedelta(seconds=0)
DEFAULT_PENDING_TIME = datetime.timedelta(seconds=60)
DEFAULT_TRIGGER_TIME = datetime.timedelta(seconds=120)
DEFAULT_DISARM_AFTER_TRIGGER = False
SUPPORTED_STATES = [STATE_ALARM_DISARMED, STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMED_CUSTOM_BYPASS, STATE_ALARM_TRIGGERED]
SUPPORTED_PRETRIGGER_STATES = [state for state in SUPPORTED_STATES
if state != STATE_ALARM_TRIGGERED]
SUPPORTED_PENDING_STATES = [state for state in SUPPORTED_STATES
if state != STATE_ALARM_DISARMED]
ATTR_PRE_PENDING_STATE = 'pre_pending_state'
ATTR_POST_PENDING_STATE = 'post_pending_state'
def _state_validator(config):
config = copy.deepcopy(config)
for state in SUPPORTED_PRETRIGGER_STATES:
if CONF_DELAY_TIME not in config[state]:
config[state][CONF_DELAY_TIME] = config[CONF_DELAY_TIME]
if CONF_TRIGGER_TIME not in config[state]:
config[state][CONF_TRIGGER_TIME] = config[CONF_TRIGGER_TIME]
| for state in SUPPORTED_PENDING_STATES:
if CONF_PENDING_TIME not in config[state]:
config[state][CONF_PENDING_TIME] = config[CONF_PENDING_TIME]
return config
def _state_schema(state):
schem | a = {}
if state in SUPPORTED_PRETRIGGER_STATES:
schema[vol.Optional(CONF_DELAY_TIME)] = vol.All(
cv.time_period, cv.positive_timedelta)
schema[vol.Optional(CONF_TRIGGER_TIME)] = vol.All(
cv.time_period, cv.positive_timedelta)
if state in SUPPORTED_PENDING_STATES:
schema[vol.Optional(CONF_PENDING_TIME)] = vol.All(
cv.time_period, cv.positive_timedelta)
return vol.Schema(schema)
PLATFORM_SCHEMA = vol.Schema(vol.All({
vol.Required(CONF_PLATFORM): 'manual',
vol.Optional(CONF_NAME, default=DEFAULT_ALARM_NAME): cv.string,
vol.Exclusive(CONF_CODE, 'code validation'): cv.string,
vol.Exclusive(CONF_CODE_TEMPLATE, 'code validation'): cv.template,
vol.Optional(CONF_DELAY_TIME, default=DEFAULT_DELAY_TIME):
vol.All(cv.time_period, cv.positive_timedelta),
vol.Optional(CONF_PENDING_TIME, default=DEFAULT_PENDING_TIME):
vol.All(cv.time_period, cv.positive_timedelta),
vol.Optional(CONF_TRIGGER_TIME, default=DEFAULT_TRIGGER_TIME):
vol.All(cv.time_period, cv.positive_timedelta),
vol.Optional(CONF_DISARM_AFTER_TRIGGER,
default=DEFAULT_DISARM_AFTER_TRIGGER): cv.boolean,
vol.Optional(STATE_ALARM_ARMED_AWAY, default={}):
_state_schema(STATE_ALARM_ARMED_AWAY),
vol.Optional(STATE_ALARM_ARMED_HOME, default={}):
_state_schema(STATE_ALARM_ARMED_HOME),
vol.Optional(STATE_ALARM_ARMED_NIGHT, default={}):
_state_schema(STATE_ALARM_ARMED_NIGHT),
vol.Optional(STATE_ALARM_ARMED_CUSTOM_BYPASS, default={}):
_state_schema(STATE_ALARM_ARMED_CUSTOM_BYPASS),
vol.Optional(STATE_ALARM_DISARMED, default={}):
_state_schema(STATE_ALARM_DISARMED),
vol.Optional(STATE_ALARM_TRIGGERED, default={}):
_state_schema(STATE_ALARM_TRIGGERED),
}, _state_validator))
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the manual alarm platform."""
add_devices([ManualAlarm(
hass,
config[CONF_NAME],
config.get(CONF_CODE),
config.get(CONF_CODE_TEMPLATE),
config.get(CONF_DISARM_AFTER_TRIGGER, DEFAULT_DISARM_AFTER_TRIGGER),
config
)])
class ManualAlarm(alarm.AlarmControlPanel):
"""
Representation of an alarm status.
When armed, will be pending for 'pending_time', after that armed.
When triggered, will be pending for the triggering state's 'delay_time'
plus the triggered state's 'pending_time'.
After that will be triggered for 'trigger_time', after that we return to
the previous state or disarm if `disarm_after_trigger` is true.
A trigger_time of zero disables the alarm_trigger service.
"""
def __init__(self, hass, name, code, code_template,
disarm_after_trigger, config):
"""Init the manual alarm panel."""
self._state = STATE_ALARM_DISARMED
self._hass = hass
self._name = name
if code_template:
self._code = code_template
self._code.hass = hass
else:
self._code = code or None
self._disarm_after_trigger = disarm_after_trigger
self._previous_state = self._state
self._state_ts = None
self._delay_time_by_state = {
state: config[state][CONF_DELAY_TIME]
for state in SUPPORTED_PRETRIGGER_STATES}
self._trigger_time_by_state = {
state: config[state][CONF_TRIGGER_TIME]
for state in SUPPORTED_PRETRIGGER_STATES}
self._pending_time_by_state = {
state: config[state][CONF_PENDING_TIME]
for state in SUPPORTED_PENDING_STATES}
@property
def should_poll(self):
"""Return the plling state."""
return False
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if self._state == STATE_ALARM_TRIGGERED:
if self._within_pending_time(self._state):
return STATE_ALARM_PENDING
trigger_time = self._trigger_time_by_state[self._previous_state]
if (self._state_ts + self._pending_time(self._state) +
trigger_time) < dt_util.utcnow():
if self._disarm_after_trigger:
return STATE_ALARM_DISARMED
else:
self._state = self._previous_state
return self._state
if self._state in SUPPORTED_PENDING_STATES and \
self._within_pending_time(self._state):
return STATE_ALARM_PENDING
return self._state
@property
def _active_state(self):
if self.state == STATE_ALARM_PENDING:
return self._previous_state
else:
return self._state
def _pending_time(self, state):
pending_time = self._pending_time_by_state[state]
if state == STATE_ALARM_TRIGGERED:
pending_time += self._delay_time_by_state[self._previous_state]
return pending_time
def _within_pending_time(self, state):
return self._state_ts + self._pending_time(state) > dt_util.utcnow()
@property
def code_format(self):
"""One or more characters."""
return None if self._code is None else '.+'
def alarm_disarm(self, code=None):
"""Send disarm command."""
if not self._validate_code(code, STATE_ALARM_DISARMED):
return
self._state = STATE_ALARM_DISARMED
self._state_ts = dt_util.utcnow()
self.schedule_update_ha_state()
def alarm_arm_home(self, code=None):
"""Send arm home command."""
if not self._validate_code(code, STATE_ALARM_ARMED_HOME):
return
self._update_state(STATE_ALARM_ARMED_HOME)
def alarm_arm_away(self, code=None):
"""Send arm away command."""
if not self._validate_code(code, STATE_ALARM_ARMED_AWAY):
return
self._update_state(STATE_ALARM_ARMED_AWAY)
def alarm_arm_night(self, code=None):
"""Send arm night command."""
if not self._validate_code(code, STATE_ALARM_ARMED_NIGHT):
return
self._update_state(STATE_ALARM_ARMED_NIGHT)
def alarm_arm_custom_bypass(self, code=None):
"""Send arm custom bypass command."""
if not self._validate_code(code, STATE_ALARM_ARMED_CUSTOM_BYPASS):
retur |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2012 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistrib | ute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# | This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Lunch Orders',
'author': 'OpenERP SA',
'version': '0.2',
'depends': ['base', 'report'],
'category' : 'Tools',
'summary': 'Lunch Order, Meal, Food',
'description': """
The base module to manage lunch.
================================
Many companies order sandwiches, pizzas and other, from usual suppliers, for their employees to offer them more facilities.
However lunches management within the company requires proper administration especially when the number of employees or suppliers is important.
The “Lunch Order” module has been developed to make this management easier but also to offer employees more tools and usability.
In addition to a full meal and supplier management, this module offers the possibility to display warning and provides quick order selection based on employee’s preferences.
If you want to save your employees' time and avoid them to always have coins in their pockets, this module is essential.
""",
'data': [
'security/lunch_security.xml',
'lunch_view.xml',
'wizard/lunch_order_view.xml',
'wizard/lunch_validation_view.xml',
'wizard/lunch_cancel_view.xml',
'lunch_report.xml',
'report/report_lunch_order_view.xml',
'security/ir.model.access.csv',
'views/report_lunchorder.xml',
'views/lunch.xml',
],
'images': ['images/new_order.jpeg','images/lunch_account.jpeg','images/order_by_supplier_analysis.jpeg','images/alert.jpeg'],
'demo': ['lunch_demo.xml',],
'installable': True,
'application' : True,
'certificate' : '001292377792581874189',
'images': [],
}
|
from django import forms, template
from django.core.cache import cache
from repertoire_telephonique.models import Phone
regi | ster = template.Library()
@register.simple_tag
def simple_add(a, b):
return a + b
@register.inclusion_tag('vcard/tags/form_phone.html')
def get_form_phone(contact_id):
# get from cache
cache_key = 'phone_choices_%s' % contact_id
choices = cache.get(cache_key)
# not in cache generate choices
if not choices:
choices = [(_p.id, '%s %s' % (_p.prefix, _p.value))
| for _p in Phone.objects.filter(contact_id=contact_id)]
# cache update
cache.set(cache_key, choices)
# dynamic form to manage dynamic choices
class PhoneForm(forms.Form):
phone = forms.MultipleChoiceField(choices=choices)
return {
'contact_id': contact_id,
'form': PhoneForm()
}
|
"""
set up an example custom response problem using a check function
"""
test_csv = self.CUSTOM_RESPONSE_SCRIPT
expect = self.CUSTOM_RESPONSE_CORRECT
cfn_problem_xml = CustomResponseXMLFactory().build_xml(script=test_csv, cfn='test_csv', expect=expect)
ItemFactory.create(
parent_location=self.section.location,
category='problem',
boilerplate='customgrader.yaml',
data=cfn_problem_xml,
display_name=name
)
# define the correct and incorrect responses to this problem
self.correct_responses[name] = expect
self.incorrect_responses[name] = self.CUSTOM_RESPONSE_INCORRECT
# re-fetch the course from the database so the object is up to date
self.refresh_course()
def computed_answer_setup(self, name):
"""
set up an example problem using an answer script'''
"""
script = self.COMPUTED_ANSWER_SCRIPT
computed_xml = CustomResponseXMLFactory().build_xml(answer=script)
ItemFactory.create(
parent_location=self.section.location,
category='problem',
boilerplate='customgrader.yaml',
data=computed_xml,
display_name=name
)
# define the correct and incorrect responses to this problem
self.correct_responses[name] = self.COMPUTED_ANSWER_CORRECT
self.incorrect_responses[name] = self.COMPUTED_ANSWER_INCORRECT
# re-fetch the course from the database so the object is up to date
self.refresh_course()
def _check_correct(self, name):
"""
check that problem named "name" gets evaluated correctly correctly
"""
resp = self.submit_question_answer(name, {'2_1': self.correct_responses[name]})
respdata = json.loads(resp.content)
self.assertEqual(respdata['success'], 'correct')
def _check_incorrect(self, name):
"""
check that problem named "name" gets evaluated incorrectly correctly
"""
resp = self.submit_question_answer(name, {'2_1': self.incorrect_responses[name]})
respdata = json.loads(resp.content)
self.assertEqual(respdata['success'], 'incorrect')
def _check_ireset(self, name):
"""
Check that the problem can be reset
"""
# first, get the question wrong
resp = self.submit_question_answer(name, {'2_1': self.incorrect_responses[name]})
# reset the question
self.reset_question_answer(name)
# then get it right
resp = self.submit_question_answer(name, {'2_1': self.correct_responses[name]})
respdata = json.loads(resp.content)
self.assertEqual(respdata['success'], 'correct')
def test_schematic_correct(self):
name = "schematic_problem"
self.schematic_setup(name)
self._check_correct(name)
def test_schematic_incorrect(self):
name = "schematic_problem"
self.schematic_setup(name)
self._check_incorrect(name)
def test_schematic_reset(self):
name = "schematic_problem"
self.schematic_setup(name)
self._check_ireset(name)
def test_check_function_correct(self):
name = 'cfn_problem'
self.custom_response_setup(name)
self._check_correct(name)
def test_check_function_incorrect(self):
name = 'cfn_problem'
self.custom_response_setup(name)
self._check_incorrect(name)
def test_check_function_reset(self):
name = 'cfn_problem'
self.custom_response_setup(name)
self._check_ireset(name)
def test_computed_correct(self):
name = 'computed_answer'
self.computed_answer_setup(name)
self._check_correct(name)
def test_computed_incorrect(self):
name = 'computed_answer'
self.computed_answer_setup(name)
self._check_incorrect(name)
def test_computed_reset(self):
name = 'computed_answer'
self.computed_answer_setup(name)
self._check_ireset(name)
@attr('shard_1')
class TestAnswerDistributions(TestSubmittingProblems):
"""Check that we can pull answer distributions for problems."""
def setUp(self):
"""Set up a simple course with four problems."""
super(TestAnswerDistributions, self).setUp()
self.homework = self.add_graded_section_to_course('homework')
self.p1_html_id = self.add_dropdown_to_section(self.homework.location, 'p1', 1).location.html_id()
self.p2_html_id = self.add_dropdown_to_section(self.homework.location, 'p2', 1).location.html_id()
self.p3_html_id = self.add_dropdown_to_section(self.homework.location, 'p3', 1).location.html_id()
self.refresh_course()
def test_empty(self):
# Just make sure we can process this without errors.
empty_distribution = grades.answer_distributions(self.course.id)
self.assertFalse(empty_distribution) # should be empty
def test_one_student(self):
# Basic test to make sure we have simple behavior right for a student
# Throw in a non-ASCII answer
self.submit_question_answer('p1', {'2_1': u'ⓤⓝⓘⓒⓞⓓⓔ'})
self.submit_question_answer('p2', {'2_1': 'Correct'})
distributions = grades.answer_distributions(self.course.id)
self.assertEqual(
distributions,
{
('p1', 'p1', '{}_2_1'.format(self.p1_html_id)): {
u'ⓤⓝⓘⓒⓞⓓⓔ': 1
},
('p2', 'p2', '{}_2_1'.format(self.p2_html_id)): {
'Correct': 1
}
}
)
def test_multiple_students(self):
# Our test class is based around making requests for a particular user,
# so we're going to cheat by creating another user and copying and
# modifying StudentModule entries to make them from other users. It's
# a little hacky, but it seemed the simpler way to do this.
self.submit_question_answer('p1', {'2_1': u'Correct'})
self.submit_question_answer('p2', {'2_1': u'Incorrect'})
self.submit_question_answer('p3', {'2_1': u'Correct'})
# Make the above submissions owned by user2
user2 = UserFactory.create()
problems = StudentModule.objects.filter(
course_id=self.course.id,
student=self.student_user
)
for problem in problems:
problem.student_id = user2.id
problem.save()
# Now make more submissions by our original user
self.submit_question_answer('p1', {'2_1': u'Correct'})
self.submit_question_answer('p2', {'2_1': u'Correct'})
self.assertEqual(
grades.answer_distributions(self.course.id),
{
('p1', 'p1', '{}_2_1'.format(self.p1_html_id)): {
'Correct': 2
},
('p2', 'p2', '{}_2_1'.format(self.p2_html_id)): {
'Correct': 1,
'Incorrect': 1
},
('p3', 'p3', '{}_2_1'.format(self.p3_html_id)): {
'Correct': 1
}
}
)
def test_other_data_types(self):
# We'll submit | one problem, and then muck with the student_answers
# dict inside its state to try different data types (str, int, float,
# none)
self.submit_question_answer('p1', {'2_1': u'Correct'})
# Now fetch the state entry f | or that problem.
student_module = StudentModule.objects.get(
course_id=self.course.id,
student=self.student_user
)
for val in ('Correct', True, False, 0, 0.0, 1, 1.0, None):
state = json.loads(student_module.state)
state["student_answers"]['{}_2_1'.format(self.p1_html_id)] = val
student_module.state = json.dumps(state)
student_module.save()
self.assertEqual(
grades.answer_distributions(self.course.id),
{
('p1', 'p1', '{}_2_1'.format(self.p1_htm |
# -*- coding: utf-8 -*-
"""
logbook.testsuite
~~~~~~~~~~~~~~~~~
The logbook testsuite.
:copyright: (c) 2010 by Armin Ronacher, Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import sys
import unittest
import logbook
_skipped_modules = []
_missing = object()
_func_ident = lambda f: f
_func_none = lambda f: None
class LogbookTestSuite(unittest.TestSuite):
def run(self, result):
try:
return unittest.TestSuite.run(self, result)
finally:
sys.stderr.write('\n')
for mod in _skipped_modules:
msg = '*** Failed to import %s, tests skipped.\n' % mod
sys.stderr.write(msg)
class LogbookTestCase(unittest.TestCase):
def setUp(self):
self.log = logbook.Logger('testlogger')
# silence deprecation warning displayed on Py 3.2
LogbookTestCase.assert_ = LogbookTestCase.assertTrue
def make_fake_mail_handler(**kwargs):
class FakeMailHandler(logbook.MailHandler):
mails = []
def get_connection(self):
return self
def close_connection(self, con):
pass
def sendmail(self, fromaddr, recipients, mail):
self.mails.append((fromaddr, recipients, mail))
kwargs.setdefault('level', logbook.ERROR)
return FakeMailHandler('foo@example.com', ['bar@example.com'], **kwargs)
def skip_if(condition):
if condition:
return _func_ident
else:
return _func_none
def require(name):
if name in _skipped_modules:
return _func_none
try:
__import__(name)
except ImportError:
_skipped_modules.append(name)
return _func_none
return _func_ident
def missing(name):
def decorate(f):
def wrapper(*args, **kwargs):
old = sys.modules.get(name, _missing)
sys.modules[name] = None
| try:
f(*args, **kwargs)
finally:
if old is _missing:
del sys.modules[name]
else:
sys.modules[name] = old
return wrapper
return decorate
def suite():
loader = unittest.TestLoader()
suite = LogbookTestSuite()
suite.addTests(loader.loadTestsFromName('logbook.testsuite.test_regular'))
if sys.version_info >= (2, 5):
su | ite.addTests(loader.loadTestsFromName
('logbook.testsuite.test_contextmanager'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import unittest
import pandas as pd
from pandas.util.testing import assert_series_equal
import numpy as np
from easyframes.easyframes import hhkit
class TestStataMerge(unittest.TestCase):
def setUp(self):
"""
df_original = pd.read_csv('sample_hh_dataset.csv')
df = df_original.copy()
print(df.to_dict())
"""
self.df_master = pd.DataFrame(
{'educ': {0: 'secondary', 1: 'bachelor', 2: 'primary', 3: 'higher', 4: 'bachelor', 5: 'secondary',
6: 'higher', 7: 'higher', 8: 'primary', 9: 'primary'},
'hh': {0: 1, 1: 1, 2: 1, 3: 2, 4: 3, 5: 3, 6: 4, 7: 4, 8: 4, 9: 4},
'id': {0: 1, 1: 2, 2: 3, 3: 1, 4: 1, 5: 2, 6: 1, 7: 2, 8: 3, 9: 4},
'has_car': {0: 1, 1: 1, 2: 1, 3: 1, 4: 0, 5: 0, 6: 1, 7: 1, 8: 1, 9: 1},
'weighthh': {0: 2, 1: 2, 2: 2, 3: 3, 4: 2, 5: 2, 6: 3, 7: 3, 8: 3, 9: 3},
'house_rooms': {0: 3, 1: 3, 2: 3, 3: 2, 4: 1, 5: 1, 6: 3, 7: 3, 8: 3, 9: 3},
'prov': {0: 'BC', 1: 'BC', 2: 'BC', 3: 'Alberta', 4: 'BC', 5: 'BC', 6: 'Alberta',
7: 'Alberta', 8: 'Alberta', 9: 'Alberta'},
'age': {0: 44, 1: 43, 2: 13, 3: 70, 4: 23, 5: 20, 6: 37, 7: 35, 8: 8, 9: 15},
'fridge': {0: 'yes', 1: 'yes', 2: 'yes', 3: 'no', 4: 'yes', 5: 'yes', 6: 'no',
7: 'no', 8: 'no', 9: 'no'},
'male': {0: 1, 1: 0, 2: 1, 3: 1, 4: 1, 5: 0, 6: 1, 7: 0, 8: 0, 9: 0}})
self.df_using_hh = pd.DataFrame(
{'hh': {0: 2, 1: 4, 2: 5, 3: 6, 4: 7},
'has_fence': {0: 1, 1: 0, 2: 1, 3: 1, 4: 0}
})
self.df_using_ind = pd.DataFrame(
{'empl': {0: 'not employed', 1: 'full-time', 2: 'part-time', 3: 'part-time', 4: 'full-time', 5: 'part-time',
6: 'self-employed', 7: 'full-time', 8: 'self-employed'},
'hh': {0: 1, 1: 1, 2: 1, 3: 2, 4: 5, 5: 5, 6: 4, 7: 4, 8: 4},
'id': {0: 1, 1: 2, 2: 4, 3: 1, 4: 1, 5: 2, 6: 1, 7: 2, 8: 5}
})
# @unittest.skip("demonstrating skipping")
def test_new_columns_added_merging_hh_level(self):
myhhkit = hhkit(self.df_master)
# myhhkit.from_dict(self.df_master)
myhhkit_using_hh = hhkit(self.df_using_hh)
# myhhkit_using_hh.from_dict(self.df_using_hh)
myhhkit.statamerge(myhhkit_using_hh, on=['hh'])
list_of_columns = myhhkit.df.columns.values.tolist()
self.assertIn('has_fence',list_of_columns)
# also check that the values are correct
correct_values = pd.Series([np.nan, np.nan, np.nan, 1, np.nan, np.nan, 0, 0, 0, 0, 1, 1, 0])
assert_series_equal(correct_values, myhhkit.df['has_fence'])
# @unittest.skip("demonstrating skipping")
def test_new_columns_added_merging_ind_level(self):
myhhkit = hhkit(self.df_master)
# myhhkit.from_dict(self.df_master)
myhhkit_using_ind = hhkit(self.df_using_ind)
# myhhkit_using_ind.from_dict(self.df_using_ind)
myhhkit.statamerge(myhhkit_using_ind, on=['hh','id'])
list_of_columns = myhhkit.df.columns.values.tolist()
self.assertIn('empl',list_of_columns)
# also check that the values are correct
correct_values = pd.Series(['not employed', 'full-time', np.nan, 'part-time', np.nan, np.nan,
'self-employed', 'full-time', np.nan, np.nan, 'part-time', 'full-time', 'part-time', 'self-employed'])
assert_series_equal(correct_values, myhhkit.df['empl'])
# @unittest.skip("demonstrating skipping")
def test_check_proper_merged_variable_created_and_is_correct_hh_level(self):
myhhkit = hhkit(self.df_master)
# myhhkit.from_dict(self.df_master)
myhhkit_using_hh = hhkit(self.df_using_hh)
# myhhkit_using_hh. | from_dict(self.df_using_hh)
correct_values = pd.Series([1, 1, 1, 3, 1, 1, 3, 3, 3, 3, 2, 2, 2])
myhhkit.statamerge(myhhkit_using | _hh, on=['hh'], mergevarname='_merge_hh')
assert_series_equal(correct_values, myhhkit.df['_merge_hh'])
def test_check_proper_merged_variable_created_and_is_correct_ind_level(self):
myhhkit = hhkit(self.df_master)
# myhhkit.from_dict(self.df_master)
myhhkit_using_ind = hhkit(self.df_using_ind)
# myhhkit_using_ind.from_dict(self.df_using_ind)
correct_values = pd.Series([3, 3, 1, 3, 1, 1, 3, 3, 1, 1, 2, 2, 2, 2])
myhhkit.statamerge(myhhkit_using_ind, on=['hh','id'], mergevarname='_merge_hh')
assert_series_equal(correct_values, myhhkit.df['_merge_hh'])
if __name__ == '__main__':
unittest.main()
|
# Copyright 2021 Red Hat, Inc. Jake Hunsaker <jhunsake@redhat.com>
# This file is part of the sos project: https://github.com/sosreport/sos
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# version 2 of the GNU General Public License.
#
# See the LICENSE file in the source distribution for further information.
from sos.cleaner.archives import SoSObfuscationArchive
import tarfile
class InsightsArchive(SoSObfuscationArchive):
"""This class represents archives generated by the insights-client utility
for RHEL systems.
"""
type_name = 'insights'
description = 'insights-client archive'
prep_files = {
'hostname': 'data/insights_commands/hostname_-f',
'ip': 'data/insights_commands/ip_addr',
'mac': 'data/insights_commands/ip_addr'
}
@classmethod
def check_is_type(cls, arc_path):
try:
| return tarfile.is_tarfile(arc_path) and 'insights-' in arc_path
except Exception:
return False
def get_archiv | e_root(self):
top = self.archive_path.split('/')[-1].split('.tar')[0]
if self.tarobj.firstmember.name == '.':
top = './' + top
return top
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
tools for imr datasets
@author: Chris Mantas
@contact: the1pro@gmail.com
@since: Created on 2016-02-12
@todo: custom formats, break up big lines
@license: http://www.apache.org/licenses/LICENSE-2.0 Apache License
"""
from ast import literal_eval
from collections import defaultdict
def create_label_encoder(labels):
"""
Creates a label encoder from a list of labels
:param labels: a list of integers
:return: a LabelEncoder object
"""
from sklearn.preprocessing import LabelEncoder
encoder = LabelEncoder()
encoder.fit(labels)
return encoder
def get_features_from_line(line):
"""
Given a text line it returns
a) only the last element of the tuple if the line is a tuple.
That element we assume to be a list of features.
b) the line's elements if the line is not a tuple
:param line:
:return:
"""
from ast import literal_eval
entry = literal_eval(line)
return entry[-1] if isinstance(entry, tuple) else entry
def parse_line(line):
"""
Parses a string line to a tuple
:param line:
:return:
"""
from ast import literal_eval
try:
entry = literal_eval(line)
if not isinstance(entry, tuple):
raise Exception("Input parsed, but is not a tuple")
except:
raise Exception("Could not evaluate (parse) input into an object")
return entry
def tuple_to_labeled_point(ent | ry, catego | ry, l_encoder=None):
"""
Creates a label point from a text line that is formated as a tuple
:param entry: a tuple of format (3, 2, 1, [3,4,4 ..]), where the first
entries in the tuple are labels, and the last entry is
a list of features
:param category: which one of the labels in the tuple to keep for the
labeled point (0 to 2 for imr dataset)
:param l_encoder: the label encoder to encode the label (if any)
:return: a LabeledPoint
"""
from pyspark.mllib.classification import LabeledPoint
label = entry[category]
if l_encoder:
label = l_encoder.transform(label)
features = entry[-1]
return LabeledPoint(label, features) # return a new labelPoint
def classify_line(features, model, l_encoder=None):
"""
Classifies the features based on the given model.
If a label encoder is specified, it reverses the encoding of the label
:param features: a vector of features
:param model: a Classification Model
:param l_encoder: a LabelEncoder
:return: a tuple of: label, [feat1, feat2 ... featN]
"""
encoded_prediction = model.predict(features)
prediction = l_encoder.inverse_transform(encoded_prediction) \
if l_encoder else encoded_prediction
return prediction, features
def label_encoders_from_json_file(labels_json_file, category=None):
"""
Loads a mapping of categories->available_labels from a json file.
If category is specified it returns the LabelEncoder for this category.
If not, it returns a dict of category->LabelEncoder
:param labels_json_file:
:param category:
:return:
"""
from json import load
from sklearn.preprocessing import LabelEncoder
with open(labels_json_file) as infile:
all_labels = load(infile)
label_dict = dict(map(
lambda (k, v): (int(k), LabelEncoder().fit(v)),
all_labels.iteritems()
))
return label_dict[category] if category else label_dict
def labels_from_csv_file(csv_file, label_range):
"""
Parses a csv dataset and keeps a set of all the labels in 'label_range'.
Preserves the order in which it sees labels - does not contain duplicates.
:param csv_file:
:param label_range:
:return:
"""
labels = defaultdict(list)
label_sets = defaultdict(set)
with open(csv_file) as infile:
for line in infile:
line_tokens = line.split(';')
for i in range(label_range[0], label_range[1]+1):
label = int(line_tokens[i])
if label not in label_sets[i]:
label_sets[i].add(label)
labels[i].append(label)
# convert to regular dict of lists
return dict(labels.iteritems())
# ======================= MAIN ========================= #
if __name__ == "__main__":
from argparse import ArgumentParser
from json import dump
cli_parser = ArgumentParser(description='tools for imr datasets')
cli_parser.add_argument("operation",
help="the operation to run: 'train' or 'classify'")
cli_parser.add_argument("input",
help="the input dataset (formatted as a csv file"
"separated with ';' character")
cli_parser.add_argument("output", help="the output file")
cli_parser.add_argument("-rs", '--range-start', type=int, default=1,
help="the start of the range of labels")
cli_parser.add_argument("-re", '--range-end', type=int, default=3,
help="the end of the range of labels (inclusive)")
args = cli_parser.parse_args()
if args.operation == "storelabels":
from collections import defaultdict
# get a dict of labels from a csv dataset
labels_dict = labels_from_csv_file(args.input,
(args.range_start, args.range_end))
# dump it to the output file
with open(args.output, 'w+') as outfile:
dump(labels_dict, outfile)
else:
print("I do not know operation:", args.operation)
|
#!/usr/bin/python2.7
# Copyright 2012 Google Inc. All Rights Reserved.
"""Support for simple JSON | templates.
A JSON template is a dictionary of JSON data in which string values
may be simple templates in string.Template format (i.e.,
$dollarSignEscaping). By default, the template is expanded against
its own data, optionally updated with additional | context.
"""
import json
from string import Template
import sys
__author__ = 'smulloni@google.com (Jacob Smullyan)'
def ExpandJsonTemplate(json_data, extra_context=None, use_self=True):
"""Recursively template-expand a json dict against itself or other context.
The context for string expansion is the json dict itself by default, updated
by extra_context, if supplied.
Args:
json_data: (dict) A JSON object where string values may be templates.
extra_context: (dict) Additional context for template expansion.
use_self: (bool) Whether to expand the template against itself, or only use
extra_context.
Returns:
A dict where string template values have been expanded against
the context.
"""
if use_self:
context = dict(json_data)
else:
context = {}
if extra_context:
context.update(extra_context)
def RecursiveExpand(obj):
if isinstance(obj, list):
return [RecursiveExpand(x) for x in obj]
elif isinstance(obj, dict):
return dict((k, RecursiveExpand(v)) for k, v in obj.iteritems())
elif isinstance(obj, (str, unicode)):
return Template(obj).safe_substitute(context)
else:
return obj
return RecursiveExpand(json_data)
if __name__ == '__main__':
if len(sys.argv) > 1:
json_in = open(sys.argv[1])
else:
json_in = sys.stdin
data = json.load(json_in)
expanded = ExpandJsonTemplate(data)
json.dump(expanded, sys.stdout, indent=2)
|
"""
raven.transport.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more deta | ils.
:license: BSD, see LICENSE for more details.
"""
class InvalidScheme(ValueError):
"""
Raised when a transport is constructed using a URI which is not
handled by the transport
"""
class DuplicateScheme(StandardError):
"""
Raised when registering a handler for a particular scheme which
| is already registered
"""
|
# -*- cod | ing: utf-8 -*-
"""
Created on Tue Jul 07 13:58:49 2015
@author: Wasit
"""
import serial
import re
import datetime
#ser = serial.Serial('/dev/tty.usbserial', 9600)
#ser = serial.Serial('COM7', 9600)
#ser = serial.Serial(0) # open first serial port
ser=None
for i in xrange(10):
try:
ser = serial.Serial(i)
break
except:
print "port COM%d is disabled"%(i+1)
p | rint "Connecting to port: "+ser.name
endTime = datetime.datetime.now() + datetime.timedelta(seconds=5)
while True:
if datetime.datetime.now() >= endTime:
break
record=re.split(',',ser.readline())
record = map(int, record)
print record
ser.close() |
or
class ExecutionSchedulesTestCase(BaseServerTestCase):
DEPLOYMENT_ID = 'deployment'
fmt = '%Y-%m-%dT%H:%M:%S.%fZ'
an_hour_from_now = \
datetime.utcnow().replace(microsecond=0) + timedelta(hours=1)
two_hours_from_now = \
datetime.utcnow().replace(microsecond=0) + timedelta(hours=2)
three_hours_from_now = \
datetime.utcnow().replace(microsecond=0) + timedelta(hours=3)
three_weeks_from_now = \
datetime.utcnow().replace(microsecond=0) + timedelta(weeks=3)
deployment_id = None
def setUp(self):
super(ExecutionSchedulesTestCase, self).setUp()
_, self.deployment_id, _, _ = self.put_deployment(self.DEPLOYMENT_ID)
def test_schedule_create(self):
schedule_id = 'sched-1'
workflow_id = 'install'
schedule = self.client.execution_schedules.create(
schedule_id, self.deployment_id, workflow_id,
since=self.an_hour_from_now, recurrence='1 minutes', count=5)
self.assertEqual(schedule.id, schedule_id)
self.assertEqual(schedule.deployment_id, self.deployment_id)
self.assertEqual(schedule.workflow_id, workflow_id)
self.assertEqual(datetime.strptime(schedule.since, self.fmt),
self.an_hour_from_now)
self.assertEqual(len(schedule['all_next_occurrences']), 5)
self.assertEqual(
datetime.strptime(schedule['next_occurrence'], self.fmt),
self.an_hour_from_now)
self.assertEqual(schedule['slip'], 0)
self.assertEqual(schedule['stop_on_fail'], False)
def test_schedule_create_weekdays(self):
schedule = self.client.execution_schedules.create(
'sched-weekdays', self.deployment_id, 'install',
since=self.an_hour_from_now, until=self.three_weeks_from_now,
recurrence='1 days', weekdays=['mo', 'tu', 'we', 'th'])
self.assertEqual(len(schedule['all_next_occurrences']), 12) # 3w * 4d
def test_schedules_list(self):
schedule_ids = ['sched-1', 'sched-2']
for schedule_id in schedule_ids:
self.client. | execution_schedules.create(
| schedule_id, self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='1 minutes', count=5)
schedules = self.client.execution_schedules.list()
self.assertEqual(len(schedules), 2)
self.assertSetEqual({s.id for s in schedules}, set(schedule_ids))
def test_schedule_delete(self):
self.client.execution_schedules.create(
'delete-me', self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='1 minutes', count=5)
self.assertEqual(len(self.client.execution_schedules.list()), 1)
self.client.execution_schedules.delete('delete-me', self.deployment_id)
self.assertEqual(len(self.client.execution_schedules.list()), 0)
def test_schedule_update(self):
schedule = self.client.execution_schedules.create(
'update-me', self.deployment_id, 'install',
since=self.an_hour_from_now, until=self.two_hours_from_now,
recurrence='1 minutes')
# `until` is inclusive
self.assertEqual(len(schedule['all_next_occurrences']), 61)
self.assertEqual(schedule['rule']['recurrence'], '1 minutes')
self.assertEqual(schedule['slip'], 0)
self.client.execution_schedules.update(
'update-me', self.deployment_id, recurrence='5 minutes', slip=30)
# get the schedule from the DB and not directly from .update endpoint
schedule = self.client.execution_schedules.get('update-me',
self.deployment_id)
self.assertEqual(len(schedule['all_next_occurrences']), 13) # 60/5+1
self.assertEqual(schedule['rule']['recurrence'], '5 minutes')
self.assertEqual(schedule['slip'], 30)
self.client.execution_schedules.update(
'update-me', self.deployment_id, until=self.three_hours_from_now)
schedule = self.client.execution_schedules.get('update-me',
self.deployment_id)
self.assertEqual(len(schedule['all_next_occurrences']), 25) # 2*60/5+1
def test_schedule_get_invalid_id(self):
self.assertRaisesRegex(
CloudifyClientError,
'404: Requested `ExecutionSchedule` .* was not found',
self.client.execution_schedules.get,
'nonsuch',
self.deployment_id
)
def test_schedule_create_no_since(self):
self.assertRaises(
AssertionError,
self.client.execution_schedules.create,
'some_id', self.deployment_id, 'some_workflow',
recurrence='1 minutes', count=5
)
def test_schedule_create_invalid_time_format(self):
self.assertRaisesRegex(
AttributeError,
"'str' object has no attribute 'isoformat'",
self.client.execution_schedules.create,
'some_id', self.deployment_id, 'install',
since='long ago', recurrence='1 minutes', count=5
)
def test_schedule_create_invalid_workflow(self):
self.assertRaisesRegex(
CloudifyClientError,
'400: Workflow some_workflow does not exist',
self.client.execution_schedules.create,
'some_id', self.deployment_id, 'some_workflow',
since=self.an_hour_from_now, recurrence='1 minutes', count=5,
)
def test_schedule_invalid_weekdays(self):
self.assertRaisesRegex(
CloudifyClientError,
'400:.* invalid weekday',
self.client.execution_schedules.create,
'bad-weekdays', self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='4 hours',
weekdays=['oneday', 'someday']
)
self.client.execution_schedules.create(
'good-weekdays', self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='4 hours', count=6,
weekdays=['mo', 'tu']
)
self.assertRaisesRegex(
CloudifyClientError,
'400:.* invalid weekday',
self.client.execution_schedules.update,
'good-weekdays', self.deployment_id, weekdays=['oneday', 'someday']
)
def test_schedule_create_invalid_complex_weekdays(self):
self.assertRaisesRegex(
CloudifyClientError,
'400:.* invalid weekday',
self.client.execution_schedules.create,
'bad-complex-wd', self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='4 hours',
weekdays=['5tu']
)
def test_schedule_create_invalid_recurrence_with_complex_weekdays(self):
self.assertRaisesRegex(
CloudifyClientError,
'400:.* complex weekday expression',
self.client.execution_schedules.create,
'bad-complex-wd', self.deployment_id, 'install',
since=self.an_hour_from_now, recurrence='4 hours',
weekdays=['2mo', 'l-tu']
)
def test_schedule_invalid_repetition_without_recurrence(self):
recurrence_error = \
'400: recurrence must be specified for execution count ' \
'larger than 1'
self.assertRaisesRegex(
CloudifyClientError,
recurrence_error,
self.client.execution_schedules.create,
'no-recurrence-no-count', self.deployment_id, 'uninstall',
since=self.an_hour_from_now, weekdays=['su', 'mo', 'tu'],
)
self.client.execution_schedules.create(
'no-recurrence-count-1', self.deployment_id, 'install',
since=self.an_hour_from_now, count=1,
)
self.assertRaisesRegex(
CloudifyClientError,
recurrence_error,
self.client.execution_schedules.update,
'no-recurrence-count-1', self.deployment_id, count=2
)
def test_schedule_create_invalid_recurrence(self):
self.assertRaisesRe |
#!/usr/bin/env python
from sys import argv, stderr
usage = \
"""
Usa | ge: {program} <sample rate> <A4 freq.> [octaves=8]
e.g.: {program} 64000 442.0 5
""".format(program=argv[0])
if len(argv) < 3 or len(argv) > 4 :
prin | t(usage, file = stderr)
exit(1)
A4 = 0
sample_rate = 0
octaves = 8
try:
A4 = float(argv[2])
except:
print("Error, invalid argument: Freq. must be a number!", file = stderr)
print(usage, file = stderr)
exit(1)
try:
sample_rate = int(argv[1])
except:
print("Error, invalid argument: Sample rate must be an integer!", \
file = stderr)
print(usage, file = stderr)
exit(1)
if len(argv) == 4 :
try:
octaves = int(argv[3])
except:
print("Error, invalid argument: Octaves must be an integer!", \
file = stderr)
print(usage, file = stderr)
exit(1)
freq_ratio = 2**(1/12)
base_freq = A4/(freq_ratio**57)
periods = [round(sample_rate/(2*base_freq*freq_ratio**t)) \
for t in range(0, 12*octaves)]
print("uint16_t tone_periods[{ntones}] = {{".format(ntones=12*octaves))
for o in range(0, octaves):
print('\t', end='')
for i in range(0, 12):
print("{period}, ".format(period=periods[12*o+i]), end='')
print('')
print("};")
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-07-27 15:04
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('press', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='press',
name='password_length',
field=models.PositiveIntegerField(default=12, validators=[django.core.valida | tors.MinValueValidator(9)]),
),
migrations.AddField(
model_name='press',
name='password_number',
field=models.BooleanField(default=False, help_text='If set, passwords must include one number.'),
),
migrations.AddField(
model_name='press',
name='password_upper',
| field=models.BooleanField(default=False, help_text='If set, passwords must include one upper case.'),
),
]
|
# -*- coding: utf8 -*-
"CountColumn filter"
from .abstract import AbstractFilter
class CountColumn(Abstra | ctFilter):
"Count a flux's column and put the result in a variable"
| name = 'Compter colonnes'
description = "Compte le nombre de colonnes d'un flux et met le résultat dans une variable"
node_in = ['cible']
parameters = [
{
'name': 'Variable',
'key': 'target',
'type': 'integer'
}
]
def run(self):
"Execute the filter"
target = self._model.config('target')
value = len(self._flux_in['cible']['headers'])
self._registery.set(target, value)
|
import bitcoin
import struct
import serialize
class BlockHeader:
def __init__(self):
self.height = None
@classmethod
def deserialize(cls, raw):
assert len(raw) == 80
self = cls()
self.version = struct.unpack('<I', raw[:4])[0]
self.previous_block_hash = raw[4:36][::-1]
assert len(self.previous_block_hash) == 32
self.merkle = raw[36:68][::-1]
assert len(self.merkle) == 32
self.timestamp, self.bits, self.nonce = struct.unpack('<III', raw[68:])
return self
@property
def hash(self):
data = struct.pack('<I', self.version)
data += self.previous_block_hash[::-1]
data += self.merkle[::-1]
data += struct.pack('<III', self.timestamp, self.bits, self.nonce)
return bitcoin.Hash(data)[::-1]
def __repr__(self):
return '<BlockHeader %s>' % (self.hash.encode("hex"),)
class OutPoint(object):
def __init__(self):
self.hash = None
self.index = None
def is_null(self):
return (len(self.hash) == 0) and (self.index == 0xffffffff)
def __repr__(self):
return "OutPoint(hash=%s, index=%i)" % (self.hash.encode("hex"), self.index)
def serialize(self):
return serialize.ser_output_point(self)
@staticmethod
def deserialize(bytes):
return serialize.deser_output_point(bytes)
class TxOut(object):
def __init__(self): |
self.value = None
self.script = ""
def __ | repr__(self):
return "TxOut(value=%i.%08i script=%s)" % (self.value // 100000000, self.value % 100000000, self.script.encode("hex"))
def serialize(self):
return serialize.ser_txout(self)
@staticmethod
def deserialize(bytes):
return serialize.deser_txout(bytes)
class TxIn(object):
def __init__(self):
self.previous_output = OutPoint()
self.script = ""
self.sequence = 0xffffffff
def is_final(self):
return self.sequence == 0xffffffff
def __repr__(self):
return "TxIn(previous_output=%s script=%s sequence=%i)" % (repr(self.previous_output), self.script.encode("hex"), self.sequence)
def serialize(self):
return serialize.ser_txin(self)
@staticmethod
def deserialize(bytes):
return serialize.deser_txin(bytes)
class Transaction:
def __init__(self):
self.version = 1
self.locktime = 0
self.inputs = []
self.outputs = []
def is_final(self):
for tin in self.vin:
if not tin.is_final():
return False
return True
def is_coinbase(self):
return len(self.vin) == 1 and self.vin[0].prevout.is_null()
def __repr__(self):
return "Transaction(version=%i inputs=%s outputs=%s locktime=%i)" % (self.version, repr(self.inputs), repr(self.outputs), self.locktime)
def serialize(self):
return serialize.ser_tx(self)
@staticmethod
def deserialize(bytes):
return serialize.deser_tx(bytes)
|
__author__ = 'sondredyvik'
class ConstraintNet:
def __init__(self):
| self.const | raints = {}
def add_constraint(self,key,constraint):
if key in self.constraints:
self.constraints[key].append(constraint)
else:
self.constraints[key] = [constraint]
|
#
# Graphene schema for exposing ProcessClassification model
#
import graphene
from valuenetwork.valueaccounting.models import ProcessType
from valuenetwork.api.types.Process import ProcessClassification
from valuenetwork.api.types.EconomicEvent import Action
from django.db.models import Q
class Query(object): #graphene.Ab | stractType):
process_classification = graphene.Field(ProcessClassification,
id=graphene.Int())
all_process_classifications = graphene.List(ProcessClassification)
def resolve_process_classification(self, args, *rargs):
id = args.get('id')
if id is not None:
pt = ProcessType.object | s.get(pk=id)
if pt:
return pt
return None
def resolve_all_process_classifications(self, args, context, info):
return ProcessType.objects.all()
|
"""
This module implement a filesystem storage adapter.
"""
from __future__ import unicode_literals
import errno
import logging
import os
from flask import current_app
from .interface import ImagineAdapterInterface
from PIL import Image
LOGGER = logging.getLogger(__name__)
class ImagineFilesystemAdapter(ImagineAdapterInterface):
"""
Filesystem storage adapter
"""
source_folder = None
cache_folder = None
def __init__(self, **kwargs):
"""
Init _adapter
:param kwargs: parameters
:return:
"""
self.source_folder = kwargs.get('source_folder', '').strip('/')
self.cache_folder = kwargs.get('cache_folder', 'cache').strip('/')
def get_item(self, path):
"""
Get resource item
:param path: string
:return: PIL.Image
"""
if self.source_folder:
item_path = '%s/%s/%s' % (
current_app.static_folder,
self.source_folder,
path.strip('/')
)
else:
item_path = '%s/%s' % (
current_app.static_folder,
path.strip('/')
)
if os.path.isfile(item_path):
try:
return Image.open(item_path)
except IOError as err:
LOGGER.warning('File not found on path "%s" with error: %s' % (item_path, str(err)))
return False
else:
return False
def create_cached_item(self, path, content):
"""
Create cached resource item
:param path: str
:param c | ontent: Image
:retu | rn: str
"""
if isinstance(content, Image.Image):
item_path = '%s/%s/%s' % (
current_app.static_folder,
self.cache_folder,
path.strip('/')
)
self.make_dirs(item_path)
content.save(item_path)
if os.path.isfile(item_path):
return '%s/%s/%s' % (current_app.static_url_path, self.cache_folder, path.strip('/'))
else: # pragma: no cover
LOGGER.warning('File is not created on path: %s' % item_path)
return False
else:
return False
def get_cached_item(self, path):
"""
Get cached resource item
:param path: str
:return: PIL.Image
"""
item_path = '%s/%s/%s' % (
current_app.static_folder,
self.cache_folder,
path.strip('/')
)
if os.path.isfile(item_path):
try:
return Image.open(item_path)
except IOError as err: # pragma: no cover
LOGGER.warning('Cached file not found on path "%s" with error: %s' % (item_path, str(err)))
return False
else:
return False
def check_cached_item(self, path):
"""
Check for cached resource item exists
:param path: str
:return: bool
"""
item_path = '%s/%s/%s' % (
current_app.static_folder,
self.cache_folder,
path.strip('/')
)
if os.path.isfile(item_path):
return '%s/%s/%s' % (current_app.static_url_path, self.cache_folder, path.strip('/'))
else:
return False
def remove_cached_item(self, path):
"""
Remove cached resource item
:param path: str
:return: bool
"""
item_path = '%s/%s/%s' % (
current_app.static_folder,
self.cache_folder,
path.strip('/')
)
if os.path.isfile(item_path):
os.remove(item_path)
return True
@staticmethod
def make_dirs(path):
"""
Create directories if not exist
:param path: string
:return:
"""
try:
os.makedirs(os.path.dirname(path))
except OSError as err:
if err.errno != errno.EEXIST:
LOGGER.error('Failed to create directory %s with error: %s' % (path, str(err)))
raise
|
"""
Flask routing
"""
from flask import Flask, request, session, send_from_directory, render_template
from werkzeug.contrib.fixers import ProxyFix
app = Flask(__name__, static_path="/")
app.wsgi_app = ProxyFix(app.wsgi_app)
import api
import json
import mimetypes
import os.path
from datetime import datetime
from api.common import WebSuccess, WebError
from api.annotations import api_wrapper, require_login, require_teacher, require_admin, check_csrf
from api.annotations import block_before_competition, block_after_competition
from api.annotations import log_action
import api.routes.autogen
import api.routes.user
import api.routes.team
import api.routes.stats
import api.routes.admin
import api.routes.group
import api.routes.problem
import api.routes.achievements
log = api.logger.use(__name__)
session_cookie_domain = "127.0.0.1"
session_cookie_path = "/"
session_cookie_name = "flask"
secret_key = ""
def config_app(*args, **kwargs):
"""
Return the app object configured correctly.
This needed to be done for gunicorn.
"""
app.secret_key = secret_key
app.config["SESSION_COOKIE_DOMAIN"] = session_cookie_domain
app.config["SESSION_COOKIE_PATH"] = session_cookie_path
app.config["SESSION_COOKIE_NAME"] = session_cookie_name
app.register_blueprint(api.routes.autogen.blueprint, url_prefix="/api/autogen")
app.register_blueprint(api.routes.user.blueprint, url_prefix="/api/user")
app.register_blueprint(api.routes.team.blueprint, url_prefix="/api/team")
app.register_blueprint(api.routes.stats.blueprint, url_prefix="/api/stats")
app.register_blueprint(api.routes.admin.blueprint, url_prefix="/api/admin")
app.register_blueprint(api.routes.group.blueprint, url_prefix="/api/group")
app.register_blueprint(api.routes.problem.blueprint, url_prefix="/api/problems")
app.register_blueprint(api.routes.achievements.blueprint, url_prefix="/api/achievements")
api.logger.setup_logs({"verbose": 2})
return app
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Methods', 'GET, POST')
response.headers.add('Access-Control-Allow-Credentials', 'true')
response.headers.add('Access-Control-Allow-Headers', 'Content-Type, *')
response.headers.add('Cache-Control', 'no-cache')
response.headers.add('Cache-Control', 'no-store')
if api.auth.is_logged_in():
if 'token' in session:
response.set_cookie('token', session['token'])
else:
csrf_token = api.common.token()
session['token'] = csrf_token
response | .set_cookie('token', csrf_token)
# JB: This is a hack. We need a better solution
if request.path[0:19] != "/api/autogen/serve/":
| response.mimetype = 'appication/json'
return response
@app.route('/api/time', methods=['GET'])
@api_wrapper
def get_time():
return WebSuccess(data=int(datetime.utcnow().timestamp()))
|
# Django settings for sensible_data_platform project.
import os
import LOCAL_SETTINGS
from utils import SECURE_platform_config
DEBUG = True
TEMPLATE_DEBUG = DEBUG
MAINTENANCE_MODE = False
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
BASE_DIR = LOCAL_SETTINGS.BASE_DIR
ROOT_DIR = LOCAL_SETTINGS.ROOT_DIR
ROOT_URL = LOCAL_SETTINGS.ROOT_URL
DATABASES = LOCAL_SETTINGS.DATABASES
BASE_URL = LOCAL_SETTINGS.BASE_URL
TRUST_ROOTS = LOCAL_SETTINGS.TRUST_ROOTS
PLATFORM_NAME = LOCAL_SETTINGS.PLATFORM_NAME
SUPPORT_EMAIL = LOCAL_SETTINGS.SUPPORT_EMAIL
EMAIL_HOST = LOCAL_SETTINGS.EMAIL_HOST
EMAIL_PORT = LOCAL_SETTINGS.EMAIL_PORT
EMAIL_HOST_USER = SECURE_platform_config.EMAIL_HOST_USER
EMAIL_HOST_PASSWORD = SECURE_platform_config.EMAIL_HOST_PASSWORD
DEFAULT_FROM_EMAIL = LOCAL_SETTINGS.DEFAULT_FROM_EMAIL
EMAIL_USE_TLS = LOCAL_SETTINGS.EMAIL_USE_TLS
MAINTENANCE_IGNORE_URLS = (
r'^.*/admin/$',
)
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
LOGIN_URL = ROOT_URL + 'accounts/login/'
LOGIN_REDIRECT_URL = ROOT_URL
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'da'
LANGUAGES = (
('da', 'Danish'),
('en', 'English'),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that | handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ROOT_DIR+'static_root/'
# URL prefix for st | atic files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = ROOT_URL+'static/'
# Additional locations of static files
STATICFILES_DIRS = (
ROOT_DIR+'static/',
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = LOCAL_SETTINGS.SECRET_KEY
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'maintenancemode.middleware.MaintenanceModeMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.static',
'django.core.context_processors.i18n',
'django.contrib.auth.context_processors.auth',
'sensible_data_platform.context_processors.platform',
)
ROOT_URLCONF = 'sensible_data_platform.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'sensible_data_platform.wsgi.application'
TEMPLATE_DIRS = (
ROOT_DIR+'templates',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'openid_provider',
'accounts',
'render',
'identity_providers',
'utils',
'oauth2app',
'oauth2_authorization_server',
'uni_form',
'service_manager',
'south',
'sensible_platform_documents',
'password_reset',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
import hashlib
SESSION_COOKIE_NAME = str(hashlib.sha1(SECRET_KEY).hexdigest())
LOCALE_PATHS = (
'/home/arks/sensibledtu_DEVEL/SensibleData-Platform/sensible_data_platform/locale',
)
|
c_value, traceback):
try:
if self._local.con is None:
raise RuntimeError("Exit connection pool with no connection?")
if exc_type is not None:
self.rollback()
else:
self.commit()
if len(self._free) < self.max_idle:
self._free.append(self._local.con)
self._local.con = None
finally:
self._sem.release()
#print('released')
def commit(self):
self._local.con.commit()
def rollback(self):
self._local.con.rollback()
def connect():
#raise Exception('from where')
pg = psycopg2.connect(PG_DSN)
pgc = pg.cursor(cursor_factory=psycopg2.extras.DictCursor)
pg.set_client_encoding('utf-8')
return pg,pgc
def json_serial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, datetime):
serial = obj.isoformat()
elif isinstance(obj,decimal.Decimal):
serial = float(obj)
elif isinstance(obj,set):
serial = list(obj)
elif isinstance(obj,date):
serial = obj.isoformat()
else:
raise Exception(type(obj))
return serial
raise TypeError ("Type not serializable")
def get_journals(P,C,assignee=None,metastate_group='merge',archive=False):
qry = "select * from tasks where 1=1" #journal_entries where 1=1"
args=[]
if assignee=='all': assignee=None
if assignee:
qry+=" and contents->>'assignee'=%s"
args.append(assignee)
if metastate_group:
if metastate_group!='production':
if not archive: #and t['status'] in cfg.DONESTATES: continue
qry+=" and contents->>'status' not in %s"
args.append(tuple(DONESTATES))
elif archive: #and t['status'] not in cfg.DONESTATES: continue
qry+=" and contents->>'status' in %s"
args.append(tuple(DONESTATES))
else:
raise Exception('wtf')
args = tuple(args) ;
C.execute(qry,args)
rt=[]
for r in C.fetchall():
rt.append(r)
return rt
def journal_digest(j):
"""prepare a digest of the journal's most recent state."""
rt={}
for i in j:
cat = i['created_at']
jc = i['content']
ja = i['creator']
for k,v in list(i['attrs'].items()):
if type(cat)!=str:
cat = cat.strftime('%Y-%m-%dT%H:%I:%S')
if k not in rt: rt[k]={'created_at' | :cat}
#print('about to compare',type(rt[k]['created_at']),'with',type(cat))
if rt[k]['created_at']<=cat:
rt[k]['created_at']=cat
rt[k]['value']=v
return rt
def validate_save(C,tid,fetch_stamp,exc=True):
C.execute("select changed_at,changed_by from tasks where id=%s",(tid,))
res = C.fetchone()
if res and fetch_stamp and res['changed_at'] and res.get('changed_by')!='notify-trigger':
eq = res['changed_at']= | =fetch_stamp
if exc:
assert eq,"task %s: fetch stamp!=changed_at by %s (%s , %s)"%(tid,res.get('changed_by'),fetch_stamp,res and res['changed_at']) or None
else:
return eq,res['changed_at'],res['changed_by']
return True,res and res.get('changed_at') or None,res and res.get('changed_by') or None
def migrate_one(t,pgc,fetch_stamp=None,user=None):
td={}
tid = t._id
parid = "/".join(tid.split("/")[0:-1])
if not parid: parid=None
for k in t.__dict__:
if k not in ['_dynamic_properties','_doc']:
if t.__dict__[k] is not None:
assert k not in td,"%s already exists with value %s (!= %s) for %s"%(k,td[k],t.__dict__[k],t._id)
td[k]=t.__dict__[k]
if 'journal' in td and len(td['journal']):
td['journal_digest']=journal_digest(td['journal'])
tdj = json.dumps(td,default=json_serial)
pgc.execute("select * from tasks where id=%s",(tid,))
res = pgc.fetchone()
if not res:
op='ins'
qry = "insert into tasks (contents,parent_id,changed_at,changed_by,id) values(%s,%s,%s,%s,%s)"
chat=datetime.now() ; chatf='now'
suser = user
else:
excont = res['contents']
nwcont = json.loads(tdj)
# exf = open('/tmp/ex.json','w') ; exf.write(json.dumps(excont)) ; exf.close()
# nwf = open('/tmp/nw.json','w') ; nwf.write(json.dumps(nwcont)) ; nwf.close()
if nwcont==excont and user not in ['notify-trigger']:
chat = res['changed_at'] ; chatf='existing'
suser = res['changed_by']
else:
chatf='now'
chat = datetime.now() ;
suser = user
#raise Exception(type(nwcont),type(excont),len(nwcont),len(excont),nwcont==excont)
op='upd'
qry = "update tasks set contents=%s,parent_id=%s,changed_at=%s,changed_by=%s where id=%s"
data = (tdj,parid,chat,suser,t._id)
#print qry,data
print((op,t._id,parid))
pgc.execute(qry,data)
# -- create table tasks (id varchar primary key, parent_id varchar references tasks(id) , contents json);
def get_repos(C):
C.execute("select name from repos")
res = C.fetchall()
return [r['name'] for r in res]
def get_usernames(C):
C.execute("select username from participants where active=true order by username")
res = C.fetchall()
return [r['username'] for r in res]
def hasperm_db(C,perm,user):
qry = "select count(*) cnt from participants where username=%s and %s=any(perms) and active=true"
C.execute(qry,(perm,user))
o = C.fetchone()
rt = o['cnt'] and True or False
return rt
def hasperm(perms,perm):
rt = perm in perms
#print(qry,(user,perm),'=>',rt)
return rt
def get_participants(C,sort=True,disabled=False):
qry = "select * from participants "
if not disabled: qry+=" where active=true "
if sort: qry+=" order by username"
C.execute(qry)
rt = {}
for r in C.fetchall():
if r['username'] not in rt: rt[r['username']]={}
for k in list(r.keys()):
rt[r['username']][k]=r[k]
return rt #dict([(r['username'],dict([(k,r[k]) for k in r.keys()])) for r in C.fetchall()])
def get_all_journals(C,day=None,creator=None):
qry = "select * from journal_entries where 1=1 "
cnd=[]
if day:
qry+=" and created_at::date between %s and %s"
cnd.append(day[0]) ; cnd.append(day[1])
if creator:
qry+=" and creator=%s"
cnd.append(creator)
C.execute(qry,cnd)
jes = C.fetchall()
return [{'creator':je['creator'],
'content':je['cnt'],
'attrs':je['attrs'],
'created_at':je['created_at'],
'tid':je['tid']} for je in jes]
# parents retrieval:
# with recursive allparents as (select id,parent_id from tasks t where id='832/408/8/1' union all select t.id,t.parent_id from tasks t join allparents on allparents.parent_id=t.id) select * from allparents order by id
# children retrieval:
def get_children(C,tid):
from couchdb import Task
qry="select t.* from task_hierarchy th,tasks t where %s=any(th.path_info) and th.id<>%s and t.id=th.id"
opts=(tid,tid)
C.execute(qry,opts)
rows = [t['contents'] for t in C.fetchall()]
rt=[]
for r in rows:
r['created_at']=datetime.strptime( r['created_at'].split('.')[0].split('Z')[0], "%Y-%m-%dT%H:%M:%S" )
t = Task(**r)
rt.append(t)
return rt
def get_cross_links(C,tid):
C.execute("select * from cross_links where id=%s or clid=%s",(tid,tid))
rt = C.fetchall()
rt2=[]
for r in rt:
rt2.append(r['clid'])
rt2.append(r['id'])
rt2 = [r for r in rt2 if r!=tid]
return rt2
def get_new_idx(C,parent=None):
if parent==None:
qry = "select max((regexp_split_to_array(id,'/'))[1]::integer)+1 new_idx from tasks"
conds=()
else:
pars = str(parent).split("/")
parlen=len(pars)+1
like=str(parent)+'/%'
qry = "select max((regexp_split_to_array(id,'/'))["+str(parlen)+"]::integer)+1 new_idx from tasks |
new and not all the same, so add it to the dictionary
kMerDict[sequences[i]] = count
count = count + 1
motifListFile.close()
return kMerDict
def getFastaList(fastaFileList):
# Get the next fasta from each file in a list
fastaList = []
for fastaFile in fastaFileList:
# Iterate through the fasta files and begin parsing each
fastaName = fastaFile.readline().strip()
if fastaName != "":
# Not at the end of the fasta file
DNASequenceRecord = SeqRecord(Seq(fastaFile.readline().strip(), generic_dna), name = fastaName)
fastaList.append(DNASequenceRecord)
else:
# At the end of the fasta file, so stop
break
return fastaList
def makeValueFileEntries(valueFile, peakHeightLineElements, peakHeightColA, peakHeightColB):
# Make the value entries
# The 1st 8 are the fold change from individual A to individual B, and the next 8 are the fold change from individual B to individual A
FCA = int(peakHeightLineElements[peakHeightColA]) - int(peakHeightLineElements[peakHeightColB])
for i in range(8):
# Record the fold change from individual A to individual B 8 times for the 8 examples
valueFile.write(str(FCA) + "\n")
FCB = int(peakHeightLineElements[peakHeightColB]) - int(peakHeightLineElements[peakHeightColA])
for i in range(8):
# Record the fold change from individual B to individual A 8 times for the 8 examples
valueFile.write(str(FCB) + "\n")
def makeReverseComplements(seqRecordMaternalA, seqRecordPaternalA, seqRecordMaternalB, seqRecordPaternalB):
# Make the reverse complements of all of the sequences
seqReverseMaternalA = seqRecordMaternalA.seq.reverse_complement().upper()
seqReversePaternalA = seqRecordPaternalA.seq.reverse_complement().upper()
seqReverseMaternalB = seqRecordMaternalB.seq.reverse_complement().upper()
seqReversePaternalB = seqRecordPaternalB.seq.reverse_complement().upper()
return [seqReverseMaternalA, seqReversePaternalA, seqReverseMaternalB, seqReversePaternalB]
def recordKMerCounts(kMerCounts, outputFile):
# Record the k-mer counts in the windows of the sequence to the output file
for count in kMerCounts:
# Iterate through the k-mer counts and record each in the output file
outputFile.write(str(count) + "\t")
outputFile.write("\n")
def getKMerFeatures(sequenceOneA, sequenceTwoA, sequenceOneB, sequenceTwoB, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE):
# Convert the fasta sequence pair to the (4 x 4) x (4 x 4) alphabet and record its k-mer counts
# ASSUMES THAT sequenceOneA sequenceTwoA, sequenceOneB, and sequenceTwoB ARE THE SAME LENGTH (no indels)
# THE END OF THE SEQUENCE WI | LL GET CUT OFF IF THE WINDOWS DO NOT EXACTLY ENCOMPASS THE SEQUENCE
numWindows = math.trunc(float(len(sequenceOneA) - WINDOWSIZE)/float(WINDOWSTRIDE)) + 1
kMerCounts = numpy.zeros(numWindows*len(kMerDict), 1)
for l in range(numWindows):
# Iterate through the windows and get the k-mer counts in each
windowStart = l * WINDOWSTRIDE
windowEnd = windowStart + WINDOWSIZE
for i in range(windowStart, windowEnd - K | + 1):
# Iterate through the k-mers in the current window mark a 1 in the appropriate k-mer
sequenceToLookUp = ""
for j in range(K):
# Iterate through the bases in the k-mer and make the sequence combination that represents it
sequenceToLookUp = sequenceOneA[i + j] + sequenceTwoA[i + j] + sequenceOneB[i + j] + sequenceTwoB[i + j]
if sequenceToLookUp not in kMerDict.keys():
# The sequence has a wild card or is not in a motif, so skip it
continue
kMerCounts[(l * len(kMerDict)) + kMerDict[sequenceToLookUp]] = kMerCounts[kMerDict[sequenceToLookUp]] + 1
recordKMerCounts(kMerCounts, outputFile)
def makeFourFeatureCounts(seqRecordMaternalA, seqRecordPaternalA, seqRecordMaternalB, seqRecordPaternalB, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE):
# Make four sets of sequence features for two sequence pairs
# Sequence set 1: (MaternalA, PaternalA), (MaternalB, PaternalB)
# Sequence set 2: (MaternalA, PaternalA), (PaternalB, MaternalB)
# Sequence set 3: (PaternalA, MaternalA), (MaternalB, PaternalB)
# Sequence set 4: (PaternalA, MaternalA), (PaternalB, MaternalB)
getKMerFeatures(seqRecordMaternalA, seqRecordPaternalA, seqRecordMaternalB, seqRecordPaternalB, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE)
getKMerFeatures(seqRecordMaternalA, seqRecordPaternalA, seqRecordPaternalB, seqRecordMaternalB, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE)
getKMerFeatures(seqRecordPaternalA, seqRecordMaternalA, seqRecordMaternalB, seqRecordPaternalB, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE)
getKMerFeatures(seqRecordPaternalA, seqRecordMaternalA, seqRecordPaternalB, seqRecordMaternalB,outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE)
def makeSingleSequenceInputsKMerCounts(((seqRecordMaternalA, seqRecordPaternalA), (seqRecordMaternalB, seqRecordPaternalB)), valueFile, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE, peakHeightLineElements, peakHeightColA, peakHeightColB):
# Make all of the sequence features for a fasta file, and make the corresponding values
makeValueFileEntries(valueFile, peakHeightLineElements, peakHeightColA, peakHeightColB)
[seqReverseMaternalA, seqReversePaternalA, seqReverseMaternalB, seqReversePaternalB] = makeReverseComplements(seqRecordMaternalA, seqRecordPaternalA, seqRecordMaternalB, seqRecordPaternalB)
makeFourFeatureCounts(seqRecordMaternalA.seq.upper(), seqRecordPaternalA.seq.upper(), seqRecordMaternalB.seq.upper(), seqRecordPaternalB.seq.upper(), outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE)
makeFourFeatureCounts(seqReverseMaternalA, seqReversePaternalA, seqReverseMaternalB, seqReversePaternalB, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE)
makeFourFeatureCounts(seqRecordMaternalB.seq.upper(), seqRecordPaternalB.seq.upper(), seqRecordMaternalA.seq.upper(), seqRecordPaternalA.seq.upper(), outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE)
makeFourFeatureCounts(seqReverseMaternalB, seqReversePaternalB, seqReverseMaternalA, seqReversePaternalA, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE)
def makeSequenceInputsKMerCountsAllSeqIter(kMerDict):
# Make sequence inputs for all of the pairs of individuals
# ASSUMES THAT THE INDIVIDUALS LISTED IN maternalFastaFileNameListFile and paternalFastaFileNameListFile ARE IN THE SAME ORDER
maternalFastaFileNameListFile = open(MATERNALFASTAFILENAMELISTFILENAME)
paternalFastaFileNameListFile = open(PATERNALFASTAFILENAMELISTFILENAME)
maternalFastaFileList = []
paternalFastaFileList = []
for line in maternalFastaFileNameListFile:
# Iterate through the fasta files and make a list of the maternal and paternal ones
maternalFastaFileList.append(open(line.strip()))
paternalFastaFileList.append(open(paternalFastaFileNameListFile.readline().strip()))
maternalFastaFileNameListFile.close()
paternalFastaFileNameListFile.close()
valueFile = open(VALUEFILENAME, 'w+')
outputFile = open(OUTPUTFILENAME, 'w+')
peakHeightFile = open(PEAKHEIGHTFILENAME)
print "Starting iterations!"
maternalFastas = getFastaList(maternalFastaFileList)
paternalFastas = getFastaList(paternalFastaFileList)
while len(maternalFastas) > 0:
# Iterate through all of the sequences and make all of the images for each combination
fastaPairs = it.izip(maternalFastas, paternalFastas)
fastaCombinations = it.combinations(fastaPairs, 2)
peakHeightLineElements = peakHeightFile.readline().strip().split("\t")
peakHeightColA = 0
peakHeightColB = 1
for comb in fastaCombinations:
# Iterate through the combinations of sequences and make the images for each
makeSingleSequenceInputsKMerCounts(((seqRecordMaternalA, seqRecordPaternalA), (seqRecordMaternalB, seqRecordPaternalB)), valueFile, outputFile, kMerDict, WINDOWSIZE, WINDOWSTRIDE, peakHeightLineElements, peakHeightColA, peakHeightColB)
peakHeightColB = peakHeightColB + 1
if peakHeightColB >= len(maternalFastas):
# The sequence inputs for all of the pairs for the first individual in the current pair have been made
peakHeightColA = peakHeightColA + 1
peakHeigh |
# Copyright (c) 2020, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/spherical_functions/blob/master/LICENSE>
### NOTE: The functions in this file are intended purely for inclusion in the Grid class. In
### particular, they assume that the first argument, `self` is an instance of Grid. They should
### probably not be used outside of that class.
def modes(self, ell_max=None, **kwargs):
"""Return mode weights of function decomposed into SWSHs
This method uses `spinsfast` to convert values on an equiangular grid to mode weights.
The output array has one less dimension than this object; rather than the last two axes giving
the values on the two-dimensional grid, the last axis gives the mode weights.
Parameters
==========
ell_max: None or int [defaults to None]
Maximum ell value in the output. If None, the result will have enough ell values to express
the data on the grid without aliasing: (max(n_phi, n_theta) - 1) // 2.
**kwargs: any types
Additional keyword arguments are passed through to the Modes constructor on output
"""
import copy
import numpy as np
import spinsfast
from .. import Modes
ell_max = ell_max or (max(n_phi, n_theta) - 1) // 2
metadata = copy.copy
return Modes(spinsfast.map2salm(self.view(np.ndarray), self.s, ell_max),
spin_weight=self.s, ell_min=0, ell_max=ell_max, **metadata)
def _check_broadcasting(self, array, reverse=False):
"""Test whether | or not the given array can broadcast against this object"""
import numpy as np
if isinstance(array, type(self)):
try:
if reverse:
np.broadcast(array, self)
else:
np.broadcast(self, array)
except ValueError:
return False
else:
return True
| else:
if np.ndim(array) > np.ndim(self)-2:
raise ValueError(f"Cannot broadcast array of {np.ndim(array)} dimensions against {type(self).__name__} "
f"object of fewer ({np.ndim(self)-2}) non-grid dimensions.\n"
"This is to ensure that scalars do not operate on individual "
"grid values; they must operate on all simultaneously.\n"
"If that is the case and you still want to broadcast, add more "
"dimensions before this object's first dimension.")
try:
if reverse:
np.broadcast(array, self[..., 0, 0])
else:
np.broadcast(self[..., 0, 0], array)
except ValueError:
return False
else:
return True
|
#
# Copyright © 2012–2022 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in | the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
from django.app | s import AppConfig
class MetricsConfig(AppConfig):
name = "weblate.metrics"
label = "metrics"
verbose_name = "Metrics"
|
# -*- coding: utf-8 -*-
import sys
#Éú³ÉºòÑ¡¼¯C1
#return£º×Öµäkey=item;value=item³öÏֵĴÎÊý
def getC1(srcdata):
c1 = {}
for transaction in srcdata:
for item in transaction:
key = frozenset(set([item])) #frozenset²Å¿ÉÒÔ×÷Ϊ×ÖµäµÄkey
#¼ÆÊýitem
if key in c1:
c1[key] = c1[key] + 1
else:
c1[key] = 1
return c1
#return£º Âú×ã×îС֧³Ö¶ÈµÄºòÑ¡¼¯
def getL(c, supct):
# ɾ³ýСÓÚ×îС֧³Ö¶ÈµÄitem
for key in [item for item in c if c[item] < supct]: |
del c[key]
#if c != {}:
#print c
return c
#¸ù¾ÝÉÏÒ»¸öL²úÉúºòÑ¡¼¯C
#ɨÃèÔ´Êý¾ | Ý£¬¼ÆÊýitem
def getnextcandi(preL, srcdata):
c = {}
for key1 in preL:
for key2 in preL:
if key1 != key2:
# preL ºÍ preL Éú³ÉµÑ¿¨¶û»ý
key = key1.union(key2)
c[key] = 0
#¼ÆÊýitem
for i in srcdata:
for item in c:
if item.issubset(i):
c[item] = c[item] + 1
return c
# Apriori Ëã·¨
def Apriori(filename, supct):
#¶ÁÈ¡Êý¾ÝÎļþ
#Îļþ¸ñʽ:Ò»ÐÐÒ»¸öÊÂÎñ,Ò»¸öÊÂÎñµÄ¸÷¸öÔªËØÒÔTab(\t)·Ö¸ô
srcdata = [line.strip("\n").split(" ") for line in file(filename)]
c = getC1(srcdata)
L = getL(c, supct)
c = getnextcandi(L, srcdata)
return c
if __name__ == "__main__":
if len(sys.argv) == 3:
#Usage: apri.py filename surpport
items = Apriori(sys.argv[1], int(sys.argv[2]))
for key in [item for item in items if items[item] < int(sys.argv[2])]:
del items[key]
ap = {}
for itor in items:
#print items[itor]
#print itor
strword = ''
for word in itor:
strword += word + " "
ap[strword.strip(' ')] = items[itor]
linelst = sorted(ap.items(), lambda x, y: cmp(x[1], y[1]), reverse=True)
for i in range(len(linelst)):
print "#" + str(linelst[i][1]) + " " + linelst[i][0]
#for (k, v) in ap.items():
#print "#" + str(v) + " " + k
else:
#for example
print "err args"
|
#!/usr/bin/env python
# encoding: utf-8
import urllib
from con | fig import USERNAME, EXTENSION, PASSWORD, APP_KEY, APP_SECRET, SERVER, MOBILE
from ringcentral import SDK
def main():
sdk = SDK(APP_KEY, APP_SECRET, SERVER)
platform = sdk.platform()
platform.login(USERNAME, EXTENSION, PASSWORD)
to_numbers = "1234567890"
params = {'from': {'phoneNumber': USERNAME},'to': [{'phoneNumber': to_number}],'text': "SMS message"}
response = platform.post('/restapi/v1.0/account/~/extension/~/sms', params | )
print 'Sent SMS: ' + response.json().uri
if __name__ == '__main__':
main()
|
# encoding: utf-8
'''
@author: Jose Emilio Romero Lopez
@copyright: Copyright 2013-2014, Jose Emilio Romero Lopez | .
@license: GPL
@contact: jemromerol@gmail.com
This file is part of APASVO.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the imp | lied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from PySide import QtGui
from apasvo.gui.views.generated import ui_loaddialog
from apasvo.utils.formats import rawfile
FORMATS = {'Autodetect': None,
'Binary': rawfile.format_binary,
'Text': rawfile.format_text,
}
DEFAULT_FORMAT = 'Autodetect'
DTYPES = (rawfile.datatype_int16,
rawfile.datatype_int32,
rawfile.datatype_int64,
rawfile.datatype_float16,
rawfile.datatype_float32,
rawfile.datatype_float64, )
DTYPES_LABELS = ('16 bits, PCM',
'32 bits, PCM',
'64 bits, PCM',
'16 bits, float',
'32 bits, float',
'64 bits, float', )
BYTEORDERS = (rawfile.byteorder_little_endian,
rawfile.byteorder_big_endian)
class LoadDialog(QtGui.QDialog, ui_loaddialog.Ui_LoadDialog):
"""A dialog window to load seismic data stored in a binary or text file.
Allows the user to choose several settings in order to load a seismic
signal, i.e.:
Format: Binary or text format.
Data-type: Float16, Float32 or Float64,
Endianness: Little-endian or big-endian.
Sample rate.
The class also infers the right parameters for the chosen file and shows
a preview of the loaded data for the selected parameters.
Attributes:
filename: Name of the opened file.
"""
def __init__(self, parent, filename):
super(LoadDialog, self).__init__(parent)
self.setupUi(self)
self.FileFormatComboBox.currentIndexChanged.connect(self.on_format_change)
self.FileFormatComboBox.currentIndexChanged.connect(self.load_preview)
self.DataTypeComboBox.currentIndexChanged.connect(self.load_preview)
self.ByteOrderComboBox.currentIndexChanged.connect(self.load_preview)
# init file format combobox
self.FileFormatComboBox.addItems(FORMATS.keys())
self.FileFormatComboBox.setCurrentIndex(FORMATS.keys().index(DEFAULT_FORMAT))
# init datatype combobox
self.DataTypeComboBox.addItems(DTYPES_LABELS)
self.DataTypeComboBox.setCurrentIndex(DTYPES.index(rawfile.datatype_float64))
self.filename = filename
self.load_preview()
def on_format_change(self, idx):
"""Updates UI after toggling the format value."""
fmt = FORMATS[self.FileFormatComboBox.currentText()]
if fmt == rawfile.format_binary:
self.DataTypeComboBox.setVisible(True)
self.DataTypeLabel.setVisible(True)
self.ByteOrderComboBox.setVisible(True)
self.ByteOrderLabel.setVisible(True)
self.groupBox_2.setVisible(True)
self.SampleFrequencySpinBox.setVisible(True)
self.SampleFrequencyLabel.setVisible(True)
elif fmt == rawfile.format_text:
self.DataTypeComboBox.setVisible(False)
self.DataTypeLabel.setVisible(False)
self.ByteOrderComboBox.setVisible(False)
self.ByteOrderLabel.setVisible(False)
self.groupBox_2.setVisible(True)
self.SampleFrequencySpinBox.setVisible(True)
self.SampleFrequencyLabel.setVisible(True)
else:
self.DataTypeComboBox.setVisible(False)
self.DataTypeLabel.setVisible(False)
self.ByteOrderComboBox.setVisible(False)
self.ByteOrderLabel.setVisible(False)
self.groupBox_2.setVisible(False)
self.SampleFrequencySpinBox.setVisible(False)
self.SampleFrequencyLabel.setVisible(False)
self.groupBox.adjustSize()
self.adjustSize()
def load_preview(self):
"""Shows a preview of loaded data using the selected parameters."""
# Load parameters
values = self.get_values()
try:
# Set up a file handler according to the type of raw data (binary or text)
fhandler = rawfile.get_file_handler(self.filename, **values)
# Print data preview
array = fhandler.read_in_blocks().next()
data = ''
for x in array:
data += ("%g\n" % x)
except:
data = '*** There was a problem reading the file content ***'
self.buttonBox.button(QtGui.QDialogButtonBox.Ok).setEnabled(False)
else:
self.buttonBox.button(QtGui.QDialogButtonBox.Ok).setEnabled(True)
self.PreviewTextEdit.clear()
self.PreviewTextEdit.setText(data)
def get_values(self):
"""Gets selected parameters."""
return {'fmt': FORMATS[self.FileFormatComboBox.currentText()],
'dtype': DTYPES[self.DataTypeComboBox.currentIndex()],
'byteorder': BYTEORDERS[self.ByteOrderComboBox.currentIndex()],
'fs': float(self.SampleFrequencySpinBox.value())}
|
from fibonacci import Fibonacci
def ans():
return Fib | onacci.index(Fibonacci.after(int('9' * 999) | ))
if __name__ == '__main__':
print(ans())
|
# -*- coding: utf-8 -*-
import json
import re
import unicodedata
import string
from urllib import urlencode
from requests import get
languages = {'de', 'en', 'es', 'fr', 'hu', 'it', 'nl', 'jp'}
url_template = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&{query}&props=labels%7Cdatatype%7Cclaims%7Caliases&languages=' + '|'.join(languages)
url_wmflabs_template = 'http://wdq.wmflabs.org/api?q='
url_wikidata_search_template='http://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectiontitle&{query}'
wmflabs_queries = [
'CLAIM[31:8142]', # all devise
]
db = {
'iso4217' : {
},
'names' : {
}
}
def remove_accents(data):
return unicodedata.normalize('NFKD', data).lower()
def normalize_name(name):
return re.sub(' +',' ', remove_accents(name.lower()).replace('-', ' '))
def add_currency_name(name, iso4217):
global db
db_names = db['names']
if not isinstance(iso4217, basestring):
print "problem", name, iso4217
return
name = normalize_name(name)
if name == '':
print "name empty", iso4217
return
iso4217_set = db_names.get(name, None)
if iso4217_set is not None and iso4217 not in iso4217_set:
db_names[name].append(iso4217)
else:
db_names[name] = [ iso4217 ]
def add_currency_label(label, iso4217, language):
global db
db['iso4217'][iso4217] = db['iso4217'].get(iso4217, {})
db['iso4217'][iso4217][language] = label
def get_property_value(data, name):
prop = data.get('claims', {}).get(name, {})
if len(prop) == 0:
return None
value = prop[0].get('mainsnak', {}).get('datavalue', {}).get('value', '')
if value == '':
return None
return value
def parse_currency(data):
iso4217 = get_property_value(data, 'P498')
if iso4217 is not None:
unit = get_property_value(data, 'P558')
if unit is not None:
add_currency_name(unit, iso4217)
|
labels = data.get('labels', {})
for language in languages:
name = labels.get(language, {}).get('value', None)
if name != None:
add_currency_name(name, iso4217)
| add_currency_label(name, iso4217, language)
aliases = data.get('aliases', {})
for language in aliases:
for i in range(0, len(aliases[language])):
alias = aliases[language][i].get('value', None)
add_currency_name(alias, iso4217)
def fetch_data(wikidata_ids):
url = url_template.format(query=urlencode({'ids' : '|'.join(wikidata_ids)}))
htmlresponse = get(url)
jsonresponse = json.loads(htmlresponse.content)
entities = jsonresponse.get('entities', {})
for pname in entities:
pvalue = entities.get(pname)
parse_currency(pvalue)
def add_q(i):
return "Q" + str(i)
def fetch_data_batch(wikidata_ids):
while len(wikidata_ids) > 0:
if len(wikidata_ids) > 50:
fetch_data(wikidata_ids[0:49])
wikidata_ids = wikidata_ids[50:]
else:
fetch_data(wikidata_ids)
wikidata_ids = []
def wdq_query(query):
url = url_wmflabs_template + query
htmlresponse = get(url)
jsonresponse = json.loads(htmlresponse.content)
qlist = map(add_q, jsonresponse.get('items', {}))
error = jsonresponse.get('status', {}).get('error', None)
if error != None and error != 'OK':
print "error for query '" + query + "' :" + error
fetch_data_batch(qlist)
def wd_query(query, offset=0):
qlist = []
url = url_wikidata_search_template.format(query=urlencode({'srsearch': query, 'srlimit': 50, 'sroffset': offset}))
htmlresponse = get(url)
jsonresponse = json.loads(htmlresponse.content)
for r in jsonresponse.get('query', {}).get('search', {}):
qlist.append(r.get('title', ''))
fetch_data_batch(qlist)
## fetch ##
for q in wmflabs_queries:
wdq_query(q)
# static
add_currency_name(u"euro", 'EUR')
add_currency_name(u"euros", 'EUR')
add_currency_name(u"dollar", 'USD')
add_currency_name(u"dollars", 'USD')
add_currency_name(u"peso", 'MXN')
add_currency_name(u"pesos", 'MXN')
# write
f = open("currencies.json", "wb")
json.dump(db, f, indent=4, encoding="utf-8")
f.close()
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test the ListVariable canned Variable type.
"""
import os
import TestSCons
test = TestSCons.TestSCons()
SConstruct_path = test.workpath('SConstruct')
def check(expect):
result = test.stdout().split('\n')
r = result[1:len(expect)+1]
assert r == expect, (r, expect)
test.write(SConstruct_path, """\
from SCons.Variables.ListVariable import ListVariable
LV = ListVariable
from SCons.Variables import ListVariable
list_of_libs = Split('x11 gl qt ical')
optsfile = 'scons.variables'
opts = Variables(optsfile, args=ARGUMENTS)
opts.AddVariables(
ListVariable('shared',
'libraries to build as shared libraries',
'all',
names = list_of_libs,
map = {'GL':'gl', 'QT':'qt'}),
LV('listvariable', 'listvariable help', 'all', names=['l1', 'l2', 'l3'])
)
env = Environment(variables=opts)
opts.Save(optsfile, env)
Help(opts.GenerateHelpText(env))
print(env['shared'])
if 'ical' in env['shared']:
print('1')
else:
print('0')
print(" ".join(env['shared']))
print(env.subst('$shared'))
# Test subst_path() because it's used in $CPPDEFINES expansions.
print(env.subst_path('$shared'))
Default(env.Alias('dummy', None))
""")
test.run()
check(['all', '1', 'gl ical qt x11', 'gl ical qt x11',
"['gl ical qt x11']"])
expect = "shared = 'all'"+os.linesep+"listvariable = 'all'"+os.linesep
test.must_match(test.workpath('scons.variables'), expect)
check(['all', '1', 'gl ical qt x11', 'gl ical qt x11',
"['gl ical qt x11']"])
test.run(arguments='shared=none')
check(['none', '0', '', '', "['']"])
test.run(arguments='shared=')
check(['none', '0', '', '', "['']"])
test.run(arguments='shared=x11,ical')
check(['ical,x11', '1', 'ical x11', 'ical x11',
"['ical x11']"])
test.run(arguments='shared=x11,,ical,,')
check(['ical,x11', '1', 'ical x11', 'ical x11',
"['ical x11']"])
test.run(arguments='shared=GL')
check(['gl', '0', 'gl', 'gl'])
test.run(arguments='shared=QT,GL')
check(['gl,qt', '0', 'gl qt', 'gl qt', "['gl qt']"])
expect_stderr = """
scons: *** Error converting option: shared
Invalid value(s) for option: foo
""" + test.python_file_line(SConstruct_path, 19)
test.run(arguments='shared=foo', stderr=expect_stderr, status=2)
# be paranoid in testing some more combinations
expect_stderr = """
scons: *** Error converting option: shared
Invalid value(s) for option: foo
""" + test.python_file_line(SConstruct_path, 19)
test.run(arguments='shared=foo,ical', stderr=expect_stderr, status=2)
expect_stderr = """
scons: *** Error converting option: shared
Invalid value(s) for option: foo
""" + test.python_file_line(SConstruct_path, 19)
test.run(arguments='shared=ical,foo', stderr=expect_stderr, status=2)
expect_stderr = """
scons: *** Error converting option: shared
Invalid | value(s) for option: foo
""" + test.python_file_line(SConstruct_path, 19)
test.run(arguments='shared=ical,foo,x11', stderr=expect_stderr, status=2)
expect_stderr = """
scons: *** Error converting option: shared
Invalid value(s) for option: foo,bar
""" + test.python_file_line(SConstruct_path, 19)
test.run(arguments='shared=foo,x11,,,bar', stderr=expect_stderr, status=2)
test.write('SConstruct', | """
from SCons.Variables import ListVariable
opts = Variables(args=ARGUMENTS)
opts.AddVariables(
ListVariable('gpib',
'comment',
['ENET', 'GPIB'],
names = ['ENET', 'GPIB', 'LINUX_GPIB', 'NO_GPIB']),
)
env = Environment(variables=opts)
Help(opts.GenerateHelpText(env))
print(env['gpib'])
Default(env.Alias('dummy', None))
""")
test.run(stdout=test.wrap_stdout(read_str="ENET,GPIB\n", build_str="""\
scons: Nothing to be done for `dummy'.
"""))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from cabot.cabotapp.views import (
run_status_check, graphite_api_data, checks_run_recently,
duplicate_icmp_check, duplicate_graphite_check, duplicate_http_check, duplicate_jenkins_check,
duplicate_instance, acknowledge_alert, remove_acknowledgement,
GraphiteCheckCreateView, GraphiteCheckUpdateView,
HttpCheckCreateView, HttpCheckUpdateView,
ICMPCheckCreateView, ICMPCheckUpdateView,
JenkinsCheckCreateView, JenkinsCheckUpdateView,
StatusCheckDeleteView, StatusCheckListView, StatusCheckDetailView,
StatusCheckResultDetailView, StatusCheckReportView, UserProfileUpdateAlert)
from cabot.cabotapp.views import (InstanceListView, InstanceDetailView,
InstanceUpdateView, InstanceCreateView, InstanceDeleteView,
ServiceListView, ServiceDetailView,
ServiceUpdateView, ServiceCreateView, ServiceDeleteView,
UserProfileUpdateView, ShiftListView, subscriptions)
from cabot import rest_urls
from django.contrib import admin
from django.views.generic.base import RedirectView
from django.contrib.auth.views import login, logout, password_reset, password_reset_done, password_reset_confirm
admin.autodiscover()
from importlib import import_module
import logging
logger = logging.getLogger(__name__)
urlpatterns = patterns('',
url(r'^$', view=RedirectView.as_view(url='services/', permanent=False),
| name='dashboard'),
url(r'^subscriptions/', view=subscriptions,
name='subscriptions'),
url(r'^accounts/login/', view=login, name='login'),
url(r'^accounts/logout/', view=logout, | name='logout'),
url(r'^accounts/password-reset/',
view=password_reset, name='password-reset'),
url(r'^accounts/password-reset-done/',
view=password_reset_done, name='password-reset-done'),
url(r'^accounts/password-reset-confirm/',
view=password_reset_confirm, name='password-reset-confirm'),
url(r'^status/', view=checks_run_recently,
name='system-status'),
url(r'^services/', view=ServiceListView.as_view(),
name='services'),
url(r'^service/create/', view=ServiceCreateView.as_view(),
name='create-service'),
url(r'^service/update/(?P<pk>\d+)/',
view=ServiceUpdateView.as_view(
), name='update-service'),
url(r'^service/delete/(?P<pk>\d+)/',
view=ServiceDeleteView.as_view(
), name='delete-service'),
url(r'^service/(?P<pk>\d+)/',
view=ServiceDetailView.as_view(), name='service'),
url(r'^service/acknowledge_alert/(?P<pk>\d+)/',
view=acknowledge_alert, name='acknowledge-alert'),
url(r'^service/remove_acknowledgement/(?P<pk>\d+)/',
view=remove_acknowledgement, name='remove-acknowledgement'),
url(r'^instances/', view=InstanceListView.as_view(),
name='instances'),
url(r'^instance/create/', view=InstanceCreateView.as_view(),
name='create-instance'),
url(r'^instance/update/(?P<pk>\d+)/',
view=InstanceUpdateView.as_view(
), name='update-instance'),
url(r'^instance/duplicate/(?P<pk>\d+)/',
view=duplicate_instance, name='duplicate-instance'),
url(r'^instance/delete/(?P<pk>\d+)/',
view=InstanceDeleteView.as_view(
), name='delete-instance'),
url(r'^instance/(?P<pk>\d+)/',
view=InstanceDetailView.as_view(), name='instance'),
url(r'^checks/$', view=StatusCheckListView.as_view(),
name='checks'),
url(r'^check/run/(?P<pk>\d+)/',
view=run_status_check, name='run-check'),
url(r'^check/delete/(?P<pk>\d+)/',
view=StatusCheckDeleteView.as_view(
), name='delete-check'),
url(r'^check/(?P<pk>\d+)/',
view=StatusCheckDetailView.as_view(), name='check'),
url(r'^checks/report/$',
view=StatusCheckReportView.as_view(), name='checks-report'),
url(r'^icmpcheck/create/', view=ICMPCheckCreateView.as_view(),
name='create-icmp-check'),
url(r'^icmpcheck/update/(?P<pk>\d+)/',
view=ICMPCheckUpdateView.as_view(
), name='update-icmp-check'),
url(r'^icmpcheck/duplicate/(?P<pk>\d+)/',
view=duplicate_icmp_check, name='duplicate-icmp-check'),
url(r'^graphitecheck/create/',
view=GraphiteCheckCreateView.as_view(
), name='create-graphite-check'),
url(r'^graphitecheck/update/(?P<pk>\d+)/',
view=GraphiteCheckUpdateView.as_view(
), name='update-graphite-check'),
url(r'^graphitecheck/duplicate/(?P<pk>\d+)/',
view=duplicate_graphite_check, name='duplicate-graphite-check'),
url(r'^httpcheck/create/', view=HttpCheckCreateView.as_view(),
name='create-http-check'),
url(r'^httpcheck/update/(?P<pk>\d+)/',
view=HttpCheckUpdateView.as_view(
), name='update-http-check'),
url(r'^httpcheck/duplicate/(?P<pk>\d+)/',
view=duplicate_http_check, name='duplicate-http-check'),
url(r'^jenkins_check/create/', view=JenkinsCheckCreateView.as_view(),
name='create-jenkins-check'),
url(r'^jenkins_check/update/(?P<pk>\d+)/',
view=JenkinsCheckUpdateView.as_view(
), name='update-jenkins-check'),
url(r'^jenkins_check/duplicate/(?P<pk>\d+)/',
view=duplicate_jenkins_check, name='duplicate-jenkins-check'),
url(r'^result/(?P<pk>\d+)/',
view=StatusCheckResultDetailView.as_view(
), name='result'),
url(r'^shifts/', view=ShiftListView.as_view(),
name='shifts'),
url(r'^graphite/', view=graphite_api_data,
name='graphite-data'),
url(r'^user/(?P<pk>\d+)/profile/$',
view=UserProfileUpdateView.as_view(), name='user-profile'),
url(r'^user/(?P<pk>\d+)/profile/(?P<alerttype>.+)',
view=UserProfileUpdateAlert.as_view(
), name='update-alert-user-data'),
url(r'^admin/', include(admin.site.urls)),
# Comment below line to disable browsable rest api
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/', include(rest_urls.router.urls)),
)
def append_plugin_urls():
"""
Appends plugin specific URLs to the urlpatterns variable.
"""
global urlpatterns
for plugin in settings.CABOT_PLUGINS_ENABLED_PARSED:
try:
_module = import_module('%s.urls' % plugin)
except Exception as e:
pass
else:
urlpatterns += patterns('',
url(r'^plugins/%s/' % plugin, include('%s.urls' % plugin))
)
append_plugin_urls() |
"""Testing for ORM"""
from unittest import TestCase
import nose
from nose.tools import eq_
from sets import Set
from mdcorpus.orm import *
class ORMTestCase(TestCase):
def setUp(self):
self.store = Store(create_database("sqlite:"))
self.store.execute(MovieTitlesMetadata.CREATE_SQL)
self.store.execute(MovieCharactersMetadata.CREATE_SQL)
self.store.execute(RawScriptUrl.CREATE_SQL)
self.store.execute(MovieConversation.CREATE_SQL)
self.store.execute(MovieLine.CREATE_SQL)
movie = self.store.add(MovieTitlesMetadata(0,
u"10 things i hate about you",
1999,
6.90,
62847))
bianca = self.store.add(MovieCharactersMetadata(0,
"BIANCA",
"f",
4))
bruce = self.store.add(MovieCharactersMetadata(1,
"BRUCE",
"?",
"?"))
cameron = self.store.add(MovieCharactersMetadata(2,
"CAMERON",
| "m",
"3"))
url = self.store.add(RawScriptUrl("http://www.dailyscript.com/scripts/10Things.html"))
conversation = self.store.add(MovieConversation(0, 2, 0))
line194 = self.stor | e.add(MovieLine(
194, "Can we make this quick? Roxanne Korrine and Andrew Barrett are having an incredibly horrendous public break- up on the quad. Again."))
line195 = self.store.add(MovieLine(
195, "Well, I thought we'd start with pronunciation, if that's okay with you."))
line196 = self.store.add(MovieLine(
196, "Not the hacking and gagging and spitting part. Please."))
line197 = self.store.add(MovieLine(
197, "Okay... then how 'bout we try out some French cuisine. Saturday? Night?"))
self.store.flush()
movie.characters.add(bianca)
movie.characters.add(bruce)
movie.characters.add(cameron)
url.movie = movie
line_id_list = [194, 195, 196, 197]
for (i, line_id) in enumerate(line_id_list):
line = self.store.find(MovieLine, MovieLine.id == line_id).one()
line.number = i + 1
conversation.lines.add(line)
self.store.commit()
def tearDown(self):
print "done"
class MovieTitlesMetadataTestCase(ORMTestCase):
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_url(self):
movie = self.store.find(MovieTitlesMetadata, MovieTitlesMetadata.id == 0).one()
eq_(movie.url(), "http://www.dailyscript.com/scripts/10Things.html")
class MovieCharactersMetadataTestCase(ORMTestCase):
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_gender(self):
bianca = self.store.find(MovieCharactersMetadata, MovieCharactersMetadata.id == 0).one()
bruce = self.store.find(MovieCharactersMetadata, MovieCharactersMetadata.id == 1).one()
cameron = self.store.find(MovieCharactersMetadata, MovieCharactersMetadata.id == 2).one()
eq_(bianca.gender(), "f")
eq_(bruce.gender(), "?")
eq_(cameron.gender(), "m")
class MovieConversationTestCase(ORMTestCase):
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_consistency(self):
conversation = self.store.find(MovieConversation, MovieConversation.id == 1).one()
eq_(conversation.first_character.movie.title, conversation.movie.title)
eq_(conversation.second_character.movie.title, conversation.movie.title)
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_line_list(self):
conversation = self.store.find(MovieConversation, MovieConversation.id == 1).one()
line_ids = [line.id for line in conversation.line_list()]
eq_(line_ids, [194, 195, 196, 197])
|
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
"""Core rules for Pants to operate correctly.
These are always activated and cannot be disabled.
"""
from pants.core.goals import check, fmt, lint, package, publish, repl, run, tailor, test
from pants.core.target_types import (
ArchiveTarget,
FilesGeneratorTarget,
FileTarget,
GenericTarget,
RelocatedFiles,
ResourcesGeneratorTarget,
ResourceTarget,
)
from pants.core.target_types import rules as target_type_rules
from pants.core.util_rules import (
archive,
config_files,
distdir,
external_tool,
filter_empty_sources,
pants_bin,
source_files,
stripped_source_files,
subprocess_environment,
)
from pants.goal import anonymous_telemetry, stats_aggregator
from pants.source import source_root
def rules():
return [
# goals
*check.rules(),
*fmt.rules(),
*lint.rules(),
*package.rules(),
*publish.rules(),
*repl.rules(),
*run.rules(),
*tailor.rules(),
*test.rules(),
# util_rules
*anonymous_telemetry.rules(),
*archive.rules(),
*config_files.rules(),
*distdir.rules(),
*external_tool.rules(),
*filter_empty_sources.rules(),
*pants_bin.rules(),
*source_files.rules(),
*source_root.rules(),
*stats_aggregator.rules(),
*stripped_source_files.rules(),
*subprocess_envir | onment.rules(),
*target_type_rules(),
]
def target_types():
return [
ArchiveTarget,
FileTarget,
FilesGeneratorTarget,
GenericTarget,
ResourceTarget,
ResourcesGeneratorTarget,
RelocatedFi | les,
]
|
from django.db import models
class MaternalArvPostModManager(models.Manager):
def get_by_natural_key(
self, arv_code, report_datetime, visit_instance, appt_status,
visit_ | definition_code, subject_identifier_as_pk):
MaternalVisit = models.get_model('mb_maternal', 'MaternalVisit')
| MaternalArvPost = models.get_model('mb_maternal', 'MaternalArvPost')
maternal_visit = MaternalVisit.objects.get_by_natural_key(
report_datetime, visit_instance, appt_status, visit_definition_code, subject_identifier_as_pk)
maternal_arv_post = MaternalArvPost.objects.get(maternal_visit=maternal_visit)
return self.get(arv_code=arv_code, maternal_arv_post=maternal_arv_post)
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management import *
import sys
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from ambari_commons import OSConst
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY | )
def hcat():
import params
XmlConfig("hive-site.xml",
conf_ | dir = params.hive_conf_dir,
configurations = params.config['configurations']['hive-site'],
owner=params.hive_user,
configuration_attributes=params.config['configuration_attributes']['hive-site']
)
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def hcat():
import params
Directory(params.hive_conf_dir,
create_parents = True,
owner=params.hcat_user,
group=params.user_group,
)
Directory(params.hcat_conf_dir,
create_parents = True,
owner=params.hcat_user,
group=params.user_group,
)
Directory(params.hcat_pid_dir,
owner=params.webhcat_user,
create_parents = True
)
XmlConfig("hive-site.xml",
conf_dir=params.hive_client_conf_dir,
configurations=params.hive_site_config,
configuration_attributes=params.config['configuration_attributes']['hive-site'],
owner=params.hive_user,
group=params.user_group,
mode=0644)
File(format("{hcat_conf_dir}/hcat-env.sh"),
owner=params.hcat_user,
group=params.user_group,
content=InlineTemplate(params.hcat_env_sh_template)
)
|
from django.test import TestCase
from app_forum.models import Forum, Comment
from app_forum.forms import CommentForm, ThreadForm
# test for forms
class CommentFormTest(TestCase):
def test_comment_forms(self):
form_data = {
| 'comment_content' : 'comment'
}
form = CommentForm(data=form_data)
self.assertTrue(form.is_valid())
class ThreadFormTest(TestCase):
def test_thread_forms(self):
thread_data = {
'forum_title' : 'title',
'forum_category' : 'category',
'forum_content' : 'content'
| }
thread = ThreadForm(data=thread_data)
self.assertFalse(thread.is_valid())
|
#!/usr/bin/env python
# coding: UTF-8
from __future__ import division
import numpy as np
def left_multiplica | tion(g, x):
"""
Multiplication action of a group and a vector.
"""
return np.dot(g, x)
def trans_adjoint(g, x):
return np.dot(np.dot(g,x),g.T)
class RungeKutta(object):
def __init__(self, method):
self.method = method
self.movement = self.method.movement
self.nb_stages = len(self.method.edges) + 1
def compute_vectors(self, movement_field, stages):
"""
Compute the Lie algebra elements | for the stages.
"""
return np.array([movement_field(stage) for stage in stages])
def get_iterate(self, movement_field, action):
def evol(stages):
new_stages = stages.copy()
for (i,j, transition) in self.method.edges:
# inefficient as a) only some vectors are needed b) recomputed for each edge
vects = self.compute_vectors(movement_field, new_stages)
# the order of the edges matters; the goal is that explicit method need only one iteration
new_stages[i] = action(self.movement(transition(vects)), new_stages[j])
return new_stages
return evol
@classmethod
def fix(self, iterate, z):
"""
Find a fixed point to the iterating function `iterate`.
"""
for i in range(30):
new_z = iterate(z)
if np.allclose(z, new_z, atol=1e-10, rtol=1e-16):
break
z = new_z
else:
raise Exception("No convergence after {} steps".format(i))
return z, i
def step(self, movement_field, x0, action=None):
if action is None:
action = left_multiplication
iterate = self.get_iterate(movement_field, action)
z0 = np.array([x0]*self.nb_stages) # initial guess
z, i = self.fix(iterate, z0)
return z[-1]
|
from ete3 import Tree,TreeStyle,TextFace
t = Tree('tagfrog.phy')
for node in t.traverse():
nod | e.img_style['size'] = 3
if node. | is_leaf():
name_face = TextFace(node.name)
ts = TreeStyle()
ts.show_scale = True
t.render('tagfrog.pdf')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.