commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
cd07f1801f76b8481f2485f92dafb63bf3359c43 | add filter sim | liyi193328/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq | run_scripts/filter_sim_json.py | run_scripts/filter_sim_json.py | #encoding=utf-8
import sys
if sys.version_info[0] == 2:
reload(sys)
sys.setdefaultencoding("utf-8")
import os
import codecs
import json
import click
@click.command()
@click.argument("json_path", "json path from get_q2q_sim.py, every cell contain source, predict, score")
@click.argument("save_path", "store filter result path, every line is tab.join(s,p,score)")
@click.option("--sim_threshold", default=0.95,help="lowest sim threshold[0.95]")
def filter_low_sim_from_json(json_path, save_path, sim_threshold=0.95):
data = json.load(codecs.open(json_path, "r", "utf-8"))
fout = codecs.open(save_path, "w", "utf-8")
for i, cell in enumerate(data):
if i % 100000 == 0:
print("finished {]".format(i/len(data)))
try:
if cell["score"] >= sim_threshold:
s = "\t".join([cell["source"], cell["predict"], str(cell["score"])]) + "\n"
fout.write(s)
except KeyError:
print(cell)
continue
fout.close()
if __name__ == "__main__":
filter_low_sim_from_json() | #encoding=utf-8
import sys
if sys.version_info[0] == 2:
reload(sys)
sys.setdefaultencoding("utf-8")
import os
import codecs
import json
import click
@click.command()
@click.argument("json_path", "json path from get_q2q_sim.py, every cell contain source, predict, score")
@click.argument("save_path", "store filter result path, every line is tab.join(s,p,score)")
@click.option("--sim_threshold", default=0.95,help="lowest sim threshold[0.95]")
def filter_low_sim_from_json(json_path, save_path, sim_threshold=0.95):
data = json.load(codecs.open(json_path, "r", "utf-8"))
fout = codecs.open(save_path, "w", "utf-8")
for i, cell in enumerate(data):
if cell["score"] >= sim_threshold:
s = "\t".join([cell["source"], cell["predict"], str(cell["score"])]) + "\n"
fout.write(s)
fout.close()
if __name__ == "__main__":
filter_low_sim_from_json() | apache-2.0 | Python |
caec3ea0bb901103a228fd767c32ca90d5632210 | Remove mention of `core` from `print_columnized()`-docstring. | seblin/shcol | shcol/highlevel.py | shcol/highlevel.py | # -*- coding: utf-8 -*-
# Copyright (c) 2013-2015, Sebastian Linke
# Released under the Simplified BSD license
# (see LICENSE file for details).
"""
Highlevel functions to support some cases where columnizing can be useful.
"""
from __future__ import print_function
from . import config, core, helpers
__all__ = ['print_columnized', 'print_attr_names', 'print_filenames']
def print_columnized(items, output_stream=config.OUTPUT_STREAM, **options):
"""
Shorthand for writing columnized `items` to `output_stream`.
`items` can be a sequence or a dictionary. In case of being a dictionary
the result will be a string with two columns (i.e. one for the keys and one
for the values). In case of a sequence the resulting number of columns is
calculated by the underlying algorithm.
`output_stream` should be a file-like object that provides at least a
`.write()`-method.
Additional `options` are passed as-is to the `columnize()`-function and are
interpreted there. See `columnize()`-documentation for details.
"""
result = core.columnize(items, **options)
print(result, file=output_stream)
def print_attr_names(obj, pattern=None, **options):
"""
Like `print_columnized()` but columnizes the attribute names of `obj`.
If `pattern` is not `None` then the resulting names are filtered by using
the expression defined by `pattern`. This works like matching filenames in
a shell (e.g. using "get_*" will only columnize attribute names starting
with "get_").
"""
names = dir(obj)
if pattern is not None:
names = helpers.filter_names(names, pattern)
print_columnized(names, sort_items=True, **options)
def print_filenames(path='.', hide_dotted=False, **options):
"""
Like `print_columnized()` but columnizes the filenames living in given
`path`. If `hide_dotted` is `True` then all filenames starting with a "."
are excluded from the result. Note that this function does shell-like
expansion of symbols such as "*", "?" or even "~" (user's home).
"""
filenames = helpers.get_filenames(path, hide_dotted)
print_columnized(filenames, sort_items=True, **options)
| # -*- coding: utf-8 -*-
# Copyright (c) 2013-2015, Sebastian Linke
# Released under the Simplified BSD license
# (see LICENSE file for details).
"""
Highlevel functions to support some cases where columnizing can be useful.
"""
from __future__ import print_function
from . import config, core, helpers
__all__ = ['print_columnized', 'print_attr_names', 'print_filenames']
def print_columnized(items, output_stream=config.OUTPUT_STREAM, **options):
"""
Shorthand for writing columnized `items` to `output_stream`.
`items` can be a sequence or a dictionary. In case of being a dictionary
the result will be a string with two columns (i.e. one for the keys and one
for the values). In case of a sequence the resulting number of columns is
calculated by the underlying algorithm.
`output_stream` should be a file-like object that provides at least a
`.write()`-method.
Additional `options` are passed as-is to the `columnize()`-function and are
interpreted there. See `core.columnize()`-documentation for details.
"""
result = core.columnize(items, **options)
print(result, file=output_stream)
def print_attr_names(obj, pattern=None, **options):
"""
Like `print_columnized()` but columnizes the attribute names of `obj`.
If `pattern` is not `None` then the resulting names are filtered by using
the expression defined by `pattern`. This works like matching filenames in
a shell (e.g. using "get_*" will only columnize attribute names starting
with "get_").
"""
names = dir(obj)
if pattern is not None:
names = helpers.filter_names(names, pattern)
print_columnized(names, sort_items=True, **options)
def print_filenames(path='.', hide_dotted=False, **options):
"""
Like `print_columnized()` but columnizes the filenames living in given
`path`. If `hide_dotted` is `True` then all filenames starting with a "."
are excluded from the result. Note that this function does shell-like
expansion of symbols such as "*", "?" or even "~" (user's home).
"""
filenames = helpers.get_filenames(path, hide_dotted)
print_columnized(filenames, sort_items=True, **options)
| bsd-2-clause | Python |
1abeef7014481083da23caf0324dfbc0e05ceece | fix view | wolfg1969/my-wechat-app | wechat_app.py | wechat_app.py | import io
from flask import Flask, request, send_file
from flask_redis import Redis
from wechat_sdk import WechatConf, WechatBasic
import wechat_message_handler
app = Flask(__name__)
app.config.from_envvar('MY_WECHAT_APP_SETTINGS')
redis = Redis(app)
wechat_conf = WechatConf(
token=app.config['WX_TOKEN'],
appid=app.config['WX_APP_ID'],
appsecret=app.config['WX_APP_SECRET'],
encrypt_mode=app.config['WX_ENCRYPT_MODE'],
encoding_aes_key=app.config['WX_ENCODING_AES_KEY']
)
@app.route('/wx', methods=['GET', 'POST'])
def handle_wechat_msg():
app.logger.info('handle_wechat_msg')
wechat = WechatBasic(conf=wechat_conf)
handler_name = '%s_handler' % request.method.lower()
dispatch_method = getattr(wechat_message_handler, handler_name)
return dispatch_method(request, wechat)
@app.route('/apod.jpg', methods=['GET'])
def apod_image():
redis_conn = app.config.extensions['redis']
apod_image_message = redis_conn.get(app.config['APOD_CACHE_KEY'])
if not apod_image_message:
return 'APOD Not Found', 404
# response = make_response(image_binary)
# response.headers['Content-Type'] = 'image/jpeg'
# response.headers['Content-Disposition'] = 'attachment; filename=apod.jpg'
# return response
return send_file(io.BytesIO(apod_image_message['picdata']))
if __name__ == '__main__':
app.run()
| import io
from flask import Flask, request, send_file
from flask_redis import Redis
from wechat_sdk import WechatConf, WechatBasic
import wechat_message_handler
app = Flask(__name__)
app.config.from_envvar('MY_WECHAT_APP_SETTINGS')
redis = Redis(app)
wechat_conf = WechatConf(
token=app.config['WX_TOKEN'],
appid=app.config['WX_APP_ID'],
appsecret=app.config['WX_APP_SECRET'],
encrypt_mode=app.config['WX_ENCRYPT_MODE'],
encoding_aes_key=app.config['WX_ENCODING_AES_KEY']
)
@app.route('/wx', methods=['GET', 'POST'])
def handle_wechat_msg():
app.logger.info('handle_wechat_msg')
wechat = WechatBasic(conf=wechat_conf)
handler_name = '%s_handler' % request.method.lower()
dispatch_method = getattr(wechat_message_handler, handler_name)
return dispatch_method(request, wechat)
@app.route('/apod.jpg', methods=['GET'])
def apod_image():
redis = app.config.extensions['redis']
apod_image_message = redis.get(app.config['APOD_CACHE_KEY'])
if not apod_image_message:
return 'APOD Not Found', 404
# response = make_response(image_binary)
# response.headers['Content-Type'] = 'image/jpeg'
# response.headers['Content-Disposition'] = 'attachment; filename=apod.jpg'
# return response
return send_file(io.BytesIO(apod_image_message['picdata']))
if __name__ == '__main__':
app.run()
| mit | Python |
854d4246fc559de85379bf8ff6df42c456ee1592 | simplify migration from pre-ids | paulfitz/sheetsite,paulfitz/sheetsite | sheetsite/chain.py | sheetsite/chain.py | import daff
import os
from sheetsite.ids import process_ids
from sheetsite.site import Site
from sheetsite.source import read_source
from sheetsite.destination import write_destination
import shutil
def apply_chain(site, path):
if not(os.path.exists(path)):
os.makedirs(path)
source = site['source']
destination = site['destination']
wb = read_source(source)
ss = Site(wb, os.path.join(path, 'geocache.sqlite'))
if 'flags' in site:
ss.configure(site['flags'])
raw_file = os.path.join(path, 'raw.json')
output_file = os.path.join(path, 'public.json')
prev_raw_file = os.path.join(path, 'prev_raw.json')
private_output_file = os.path.join(path, 'private.json')
id_file = os.path.join(path, 'ids.json')
prev_id_file = os.path.join(path, 'prev_ids.json')
if os.path.exists(raw_file):
shutil.copyfile(raw_file, prev_raw_file)
if os.path.exists(id_file):
shutil.copyfile(id_file, prev_id_file)
ss.save_local(raw_file, enhance=False)
ss.add_ids(process_ids(prev_raw_file, raw_file, prev_id_file, id_file))
ss.save_local(output_file)
if not os.path.exists(prev_raw_file):
# once daff can cope with blank tables correctly, switch to this
# with open(prev_raw_file, 'w') as fout:
# fout.write('{ "names": [], "tables": [] }')
shutil.copyfile(raw_file, prev_raw_file)
shutil.copyfile(id_file, prev_id_file)
ss.save_local(private_output_file, private_sheets=True)
state = {
'path': path,
'output_file': output_file,
'workbook': ss.public_workbook()
}
write_destination(destination, state)
return {
'prev_raw_file': prev_raw_file,
'raw_file': raw_file
}
def compute_diff(files, format='html'):
io = daff.TableIO()
dapp = daff.Coopy(io)
t1 = dapp.loadTable(files['prev_raw_file'])
t2 = dapp.loadTable(files['raw_file'])
if format == 'both':
r1 = daff.diffAsHtml(t1, t2)
r2 = daff.diffAsAnsi(t1, t2)
return (r1, r2)
if format == 'html':
return daff.diffAsHtml(t1, t2)
return daff.diffAsAnsi(t1, t2)
| import daff
import os
from sheetsite.ids import process_ids
from sheetsite.site import Site
from sheetsite.source import read_source
from sheetsite.destination import write_destination
import shutil
def apply_chain(site, path):
if not(os.path.exists(path)):
os.makedirs(path)
source = site['source']
destination = site['destination']
wb = read_source(source)
ss = Site(wb, os.path.join(path, 'geocache.sqlite'))
if 'flags' in site:
ss.configure(site['flags'])
raw_file = os.path.join(path, 'raw.json')
output_file = os.path.join(path, 'public.json')
prev_raw_file = os.path.join(path, 'prev_raw.json')
private_output_file = os.path.join(path, 'private.json')
id_file = os.path.join(path, 'ids.json')
prev_id_file = os.path.join(path, 'prev_ids.json')
if os.path.exists(raw_file):
shutil.copyfile(raw_file, prev_raw_file)
shutil.copyfile(id_file, prev_id_file)
ss.save_local(raw_file, enhance=False)
ss.add_ids(process_ids(prev_raw_file, raw_file, prev_id_file, id_file))
ss.save_local(output_file)
if not os.path.exists(prev_raw_file):
# once daff can cope with blank tables correctly, switch to this
# with open(prev_raw_file, 'w') as fout:
# fout.write('{ "names": [], "tables": [] }')
shutil.copyfile(raw_file, prev_raw_file)
shutil.copyfile(id_file, prev_id_file)
ss.save_local(private_output_file, private_sheets=True)
state = {
'path': path,
'output_file': output_file,
'workbook': ss.public_workbook()
}
write_destination(destination, state)
return {
'prev_raw_file': prev_raw_file,
'raw_file': raw_file
}
def compute_diff(files, format='html'):
io = daff.TableIO()
dapp = daff.Coopy(io)
t1 = dapp.loadTable(files['prev_raw_file'])
t2 = dapp.loadTable(files['raw_file'])
if format == 'both':
r1 = daff.diffAsHtml(t1, t2)
r2 = daff.diffAsAnsi(t1, t2)
return (r1, r2)
if format == 'html':
return daff.diffAsHtml(t1, t2)
return daff.diffAsAnsi(t1, t2)
| mit | Python |
df5dd28f19a6e82164ecda9278cd73dfef78f048 | Use strict template rendering. | PaulMcMillan/sherry,PaulMcMillan/sherry | sherry/__init__.py | sherry/__init__.py | import jinja2
from flask import Flask
from sherry import converters
app = Flask(__name__)
app.config.from_object('sherry.default_settings')
app.url_map.converters['mac'] = converters.MacConverter
app.jinja_env.undefined = jinja2.StrictUndefined
import sherry.views
| from flask import Flask
app = Flask(__name__)
app.config.from_object('sherry.default_settings')
import sherry.views
| bsd-2-clause | Python |
62c18241f0da960c44210445a850a834a181e6cd | Add support for Django >=1.6, which dropped django.conf.urls.defaults | KonstantinSchubert/django-shibboleth-adapter,ties/django-shibboleth-remoteuser,abhishekshivanna/django-shibboleth-remoteuser,trevoriancox/django-shibboleth-remoteuser,UCL-RITS/django-shibboleth-remoteuser,ties/django-shibboleth-remoteuser,KonstantinSchubert/django-shibboleth-adapter,CloudComputingCourse/django-shibboleth-remoteuser,denisvlr/django-shibboleth-remoteuser,CloudComputingCourse/django-shibboleth-remoteuser,kennydude/django-shibboleth-remoteuser,uchicago-library/django-shibboleth-remoteuser,kennydude/django-shibboleth-remoteuser,UCL-RITS/django-shibboleth-remoteuser,denisvlr/django-shibboleth-remoteuser,Brown-University-Library/django-shibboleth-remoteuser,uchicago-library/django-shibboleth-remoteuser,trevoriancox/django-shibboleth-remoteuser,abhishekshivanna/django-shibboleth-remoteuser,Brown-University-Library/django-shibboleth-remoteuser | shibboleth/urls.py | shibboleth/urls.py | from distutils.version import StrictVersion
import django
if StrictVersion(django.get_version()) >= StrictVersion('1.6'):
from django.conf.urls import patterns, url, include
else:
from django.conf.urls.defaults import *
from views import ShibbolethView, ShibbolethLogoutView, ShibbolethLoginView
urlpatterns = patterns('',
url(r'^login/$', ShibbolethLoginView.as_view(), name='login'),
url(r'^logout/$', ShibbolethLogoutView.as_view(), name='logout'),
url(r'^$', ShibbolethView.as_view(), name='info'),
) | from django.conf.urls.defaults import *
from views import ShibbolethView, ShibbolethLogoutView, ShibbolethLoginView
urlpatterns = patterns('',
url(r'^login/$', ShibbolethLoginView.as_view(), name='login'),
url(r'^logout/$', ShibbolethLogoutView.as_view(), name='logout'),
url(r'^$', ShibbolethView.as_view(), name='info'),
) | mit | Python |
28adc5bd5d51fac404d7e8ae6de2d67c5dbc1a5f | Bump version. | concordusapps/python-shield | shield/_version.py | shield/_version.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, division
__version_info__ = (0, 2, 2)
__version__ = '.'.join(map(str, __version_info__))
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, division
__version_info__ = (0, 2, 1)
__version__ = '.'.join(map(str, __version_info__))
| mit | Python |
17ddd05e35f7cff90530cdb2df0c4971b97e7302 | Update logging to log async functions properly | festinuz/cmcb,festinuz/cmcb | cmcb/utils.py | cmcb/utils.py | import sys
import inspect
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
result = function(*args, **kwargs)
if log:
print(function.__name__, args, kwargs, result, file=out)
return result
return wrapped_function
def async_wrapper(async_function):
@wraps(async_function)
async def wrapped_async_function(*args, **kwargs):
result = await async_function(*args, **kwargs)
if log:
print(async_function.__name__, args, kwargs, result, file=out)
return result
return wrapped_async_function
def cool_wrapper(function):
is_async_function = inspect.iscoroutinefunction(function)
if is_async_function:
return async_wrapper(function)
else:
return wrapper(function)
return cool_wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
| import sys
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
if log:
print('calling', function.__name__, args, kwargs, file=out)
result = function(*args, **kwargs)
if log:
print('result', function.__name__, result, file=out)
return result
return wrapped_function
return wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
| mit | Python |
89af9edbfd9651e93c910b19e6c77bfd0b91394a | Update d.py | xsthunder/acm,xsthunder/acm,xsthunder/a,xsthunder/a,xsthunder/a,xsthunder/acm,xsthunder/a,xsthunder/a,xsthunder/acm | at/abc126/d.py | at/abc126/d.py | # 二分图染色,本题中图是树
read = input
n = int(read())
v = [[] for i in range(n+1)] # 邻接
used = [0 for i in range(n+1)] # 节点 1~n
def add(f,t,w):
v[f].append((t, w))
for i in range(n-1):
f,t,w = map(int, read().split())
add(f,t,w)
add(t,f,w)
# 从1出发,贪心即可,1画1,理论不会有回环
# def dfs(f, s, fa=-1):
# if s % 2 == 0:
# used[f] = 1
# for t, w in v[f]:
# # print('%d->%d->%d'%(fa, f, t,), s, w)
# if t == fa:continue
# dfs(t, w + s, f)
from queue import Queue
q = Queue() # 队列
#def dfs(f, s, fa=-1):
# if s % 2 == 0:
# used[f] = 1
# for t, w in v[f]:
## print('%d->%d->%d'%(fa, f, t,), s, w)
# if t == fa:continue
# dfs(t, w + s, f)
#
#
#dfs(1,0)
q.put_nowait((1, 0, -1))
while not q.empty():
f, s, fa = q.get_nowait()
if s % 2 == 0:
used[f] = 1
for t, w in v[f]:
# print('%d->%d->%d'%(fa, f, t,), s, w)
if t == fa:continue
q.put_nowait(
(t, w + s, f)
)
for i in used[1:]:
print(used[i])
| read = input
n = int(read())
v = [[] for i in range(n+1)] # 邻接
used = [0 for i in range(n+1)] # 节点 1~n
def add(f,t,w):
v[f].append((t, w))
for i in range(n-1):
f,t,w = map(int, read().split())
add(f,t,w)
add(t,f,w)
# 从1出发,贪心即可,1画1,理论不会有回环
# def dfs(f, s, fa=-1):
# if s % 2 == 0:
# used[f] = 1
# for t, w in v[f]:
# # print('%d->%d->%d'%(fa, f, t,), s, w)
# if t == fa:continue
# dfs(t, w + s, f)
from queue import Queue
q = Queue() # 队列
#def dfs(f, s, fa=-1):
# if s % 2 == 0:
# used[f] = 1
# for t, w in v[f]:
## print('%d->%d->%d'%(fa, f, t,), s, w)
# if t == fa:continue
# dfs(t, w + s, f)
#
#
#dfs(1,0)
q.put_nowait((1, 0, -1))
while not q.empty():
f, s, fa = q.get_nowait()
if s % 2 == 0:
used[f] = 1
for t, w in v[f]:
# print('%d->%d->%d'%(fa, f, t,), s, w)
if t == fa:continue
q.put_nowait(
(t, w + s, f)
)
for i in used[1:]:
print(used[i])
| mit | Python |
ff77fd14081b7828e9ba6a60b5d9b3e0580d2b54 | use call instead of run to make python2 compatible | NeurotechBerkeley/bci-course,NeurotechBerkeley/bci-course | lab7/paradigm/play_tag_movie_new.py | lab7/paradigm/play_tag_movie_new.py | # from kivy.app import App
# from kivy.uix.video import Video
import sys
import time
from pylsl import StreamInfo, StreamOutlet
import subprocess
try:
input = raw_input
except NameError:
pass
info = StreamInfo('Ganglion_EEG', 'Markers', 1, 0.0, 'int32',
'marker')
outlet = StreamOutlet(info)
if len(sys.argv) != 2:
print("usage: %s file" % sys.argv[0])
sys.exit(1)
outlet.push_sample([-1], time.time())
_ = input('\nStart recording and press Enter to start')
outlet.push_sample([1], time.time())
subprocess.call(['mpv',sys.argv[1]])
outlet.push_sample([2], time.time())
| # from kivy.app import App
# from kivy.uix.video import Video
import sys
import time
from pylsl import StreamInfo, StreamOutlet
import subprocess
try:
input = raw_input
except NameError:
pass
info = StreamInfo('Ganglion_EEG', 'Markers', 1, 0.0, 'int32',
'marker')
outlet = StreamOutlet(info)
if len(sys.argv) != 2:
print("usage: %s file" % sys.argv[0])
sys.exit(1)
outlet.push_sample([-1], time.time())
_ = input('\nStart recording and press Enter to start')
outlet.push_sample([1], time.time())
subprocess.run(['mpv',sys.argv[1]])
outlet.push_sample([2], time.time())
| mit | Python |
40a18d111952a6bac82c240a008d6f423b5e6838 | Use unicode literals | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/utils/decorators/state.py | salt/utils/decorators/state.py | # -*- coding: utf-8 -*-
'''
Decorators for salt.state
:codeauthor: :email:`Bo Maryniuk (bo@suse.de)`
'''
from __future__ import absolute_import, unicode_literals
from salt.exceptions import SaltException
def state_output_check(func):
'''
Checks for specific types in the state output.
Raises an Exception in case particular rule is broken.
:param func:
:return:
'''
def _func(*args, **kwargs):
'''
Ruleset.
'''
result = func(*args, **kwargs)
if not isinstance(result, dict):
err_msg = 'Malformed state return, return must be a dict.'
elif not isinstance(result.get('changes'), dict):
err_msg = "'Changes' should be a dictionary."
else:
missing = []
for val in ['name', 'result', 'changes', 'comment']:
if val not in result:
missing.append(val)
if missing:
err_msg = 'The following keys were not present in the state return: {0}.'.format(', '.join(missing))
else:
err_msg = None
if err_msg:
raise SaltException(err_msg)
return result
return _func
def state_output_unifier(func):
'''
While comments as a list are allowed,
comments needs to be strings for backward compatibility.
See such claim here: https://github.com/saltstack/salt/pull/43070
Rules applied:
- 'comment' is joined into a multi-line string, in case the value is a list.
- 'result' should be always either True, False or None.
:param func: module function
:return: Joins 'comment' list into a multi-line string
'''
def _func(*args, **kwargs):
result = func(*args, **kwargs)
if isinstance(result.get('comment'), list):
result['comment'] = '\n'.join([str(elm) for elm in result['comment']])
if result.get('result') is not None:
result['result'] = bool(result['result'])
return result
return _func
| # -*- coding: utf-8 -*-
'''
Decorators for salt.state
:codeauthor: :email:`Bo Maryniuk (bo@suse.de)`
'''
from __future__ import absolute_import
from salt.exceptions import SaltException
def state_output_check(func):
'''
Checks for specific types in the state output.
Raises an Exception in case particular rule is broken.
:param func:
:return:
'''
def _func(*args, **kwargs):
'''
Ruleset.
'''
result = func(*args, **kwargs)
if not isinstance(result, dict):
err_msg = 'Malformed state return, return must be a dict.'
elif not isinstance(result.get('changes'), dict):
err_msg = "'Changes' should be a dictionary."
else:
missing = []
for val in ['name', 'result', 'changes', 'comment']:
if val not in result:
missing.append(val)
if missing:
err_msg = 'The following keys were not present in the state return: {0}.'.format(', '.join(missing))
else:
err_msg = None
if err_msg:
raise SaltException(err_msg)
return result
return _func
def state_output_unifier(func):
'''
While comments as a list are allowed,
comments needs to be strings for backward compatibility.
See such claim here: https://github.com/saltstack/salt/pull/43070
Rules applied:
- 'comment' is joined into a multi-line string, in case the value is a list.
- 'result' should be always either True, False or None.
:param func: module function
:return: Joins 'comment' list into a multi-line string
'''
def _func(*args, **kwargs):
result = func(*args, **kwargs)
if isinstance(result.get('comment'), list):
result['comment'] = '\n'.join([str(elm) for elm in result['comment']])
if result.get('result') is not None:
result['result'] = bool(result['result'])
return result
return _func
| apache-2.0 | Python |
f8a721108a593c2ae277641ec3ebe315749070bd | Check array data types. | eliteraspberries/avena | avena/xcor2.py | avena/xcor2.py | #!/usr/bin/env python
"""Cross-correlation of image arrays."""
import numpy
from numpy import fft
from . import filter, image, np, tile
_DETREND_FACTOR = 0.10
def _detrend_filter(array):
m, n = array.shape
r = int(numpy.sqrt(m * n) * _DETREND_FACTOR)
f = filter._high_pass_filter((m, n), r)
numpy.multiply(array, f, out=array)
def _xcor2_shape(shapes):
shape1, shape2 = shapes
a, b = shape1
c, d = shape2
return (a + c, b + d)
def _center(array, shape):
m, n = array.shape
a, b = shape
i, j = (m - a) // 2, (n - b) // 2
return array[i:(i + a), j:(j + b)]
def _xcor2(array1, array2):
x = tile.tile9_periodic(array1)
a, b = x.shape
y = array2[::-1, ::-1]
c, d = y.shape
m, n = _xcor2_shape(((a, b), (c, d)))
x = np._zeropad(x, (m, n))
y = np._zeropad(y, (m, n))
X = fft.rfft2(x)
Y = fft.rfft2(y)
X = fft.fftshift(X)
Y = fft.fftshift(Y)
_detrend_filter(X)
_detrend_filter(Y)
numpy.multiply(X, Y, out=X)
X = fft.ifftshift(X)
x = fft.irfft2(X, s=(m, n))
z = _center(x, (a // 3 + c, b // 3 + d))
z = _center(z, (a // 3, b // 3))
return z
def xcor2(array1, array2):
"""Compute the cross-correlation of two image arrays."""
assert array1.dtype == array2.dtype
z = numpy.ones(array1.shape[:2], dtype=array1.dtype)
channel_pairs = list(zip(
image.get_channels(array1),
image.get_channels(array2),
))
for (xi, yi) in channel_pairs:
xcori = _xcor2(xi, yi)
numpy.multiply(z, xcori, out=z)
return z
if __name__ == '__main__':
pass
| #!/usr/bin/env python
"""Cross-correlation of image arrays."""
import numpy
from numpy import fft
from . import filter, image, np, tile
_DETREND_FACTOR = 0.10
def _detrend_filter(array):
m, n = array.shape
r = int(numpy.sqrt(m * n) * _DETREND_FACTOR)
f = filter._high_pass_filter((m, n), r)
numpy.multiply(array, f, out=array)
def _xcor2_shape(shapes):
shape1, shape2 = shapes
a, b = shape1
c, d = shape2
return (a + c, b + d)
def _center(array, shape):
m, n = array.shape
a, b = shape
i, j = (m - a) // 2, (n - b) // 2
return array[i:(i + a), j:(j + b)]
def _xcor2(array1, array2):
x = tile.tile9_periodic(array1)
a, b = x.shape
y = array2[::-1, ::-1]
c, d = y.shape
m, n = _xcor2_shape(((a, b), (c, d)))
x = np._zeropad(x, (m, n))
y = np._zeropad(y, (m, n))
X = fft.rfft2(x)
Y = fft.rfft2(y)
X = fft.fftshift(X)
Y = fft.fftshift(Y)
_detrend_filter(X)
_detrend_filter(Y)
numpy.multiply(X, Y, out=X)
X = fft.ifftshift(X)
x = fft.irfft2(X, s=(m, n))
z = _center(x, (a // 3 + c, b // 3 + d))
z = _center(z, (a // 3, b // 3))
return z
def xcor2(array1, array2):
"""Compute the cross-correlation of two image arrays."""
z = numpy.ones(array1.shape[:2])
channel_pairs = list(zip(
image.get_channels(array1),
image.get_channels(array2),
))
for (xi, yi) in channel_pairs:
xcori = _xcor2(xi, yi)
numpy.multiply(z, xcori, out=z)
return z
if __name__ == '__main__':
pass
| isc | Python |
9e03c98c3c890b65d497be17eb05648fcd6a355a | Set id on start | openprocurement/openprocurement.buildout | aws_startup.py | aws_startup.py | import argparse
import urlparse
import os
import ConfigParser
import subprocess
from requests import Session
ZONE_TO_ID = {
'eu-west-1a': 'a',
'eu-west-1b': 'b',
'eu-west-1c': 'c'
}
cur_dir = os.path.dirname(__file__)
parser = argparse.ArgumentParser(description='------ AWS Startup Script ------')
parser.add_argument('api_dest', type=str, help='Destination to database')
params = parser.parse_args()
api_ini_file_path = os.path.join(cur_dir, 'etc/openprocurement.api.ini')
session = Session()
resp = session.get('http://169.254.169.254/latest/meta-data/placement/availability-zone')
if resp.status_code == 200:
zone = resp.text
domain = '{}.{}'.format(zone, params.api_dest)
if os.path.isfile(api_ini_file_path):
config = ConfigParser.ConfigParser()
config.read([api_ini_file_path])
for k in ['couchdb.url', 'couchdb.admin_url']:
value = config.get('app:api', k)
url = urlparse.urlparse(value)
if url.username:
url = url._replace(netloc='{}:{}@{}:{}'.format(url.username, url.password,
domain, url.port))
else:
url = url._replace(netloc='{}:{}'.format(domain, url.port))
config.set('app:api', k, url.geturl())
if zone in ZONE_TO_ID:
config.set('app:api', 'id', ZONE_TO_ID[zone])
with open(api_ini_file_path, 'wb') as configfile:
config.write(configfile)
subprocess.check_call([os.path.join(cur_dir, 'bin/circusd'), "--daemon"])
| import argparse
import urlparse
import os
import ConfigParser
import subprocess
from requests import Session
cur_dir = os.path.dirname(__file__)
parser = argparse.ArgumentParser(description='------ AWS Startup Script ------')
parser.add_argument('api_dest', type=str, help='Destination to database')
params = parser.parse_args()
api_ini_file_path = os.path.join(cur_dir, 'etc/openprocurement.api.ini')
session = Session()
resp = session.get('http://169.254.169.254/latest/meta-data/placement/availability-zone')
if resp.status_code == 200:
zone = resp.text
domain = '{}.{}'.format(zone, params.api_dest)
if os.path.isfile(api_ini_file_path):
config = ConfigParser.ConfigParser()
config.read([api_ini_file_path])
for k in ['couchdb.url', 'couchdb.admin_url']:
value = config.get('app:api', k)
url = urlparse.urlparse(value)
if url.username:
url = url._replace(netloc='{}:{}@{}:{}'.format(url.username, url.password,
domain, url.port))
else:
url = url._replace(netloc='{}:{}'.format(domain, url.port))
config.set('app:api', k, url.geturl())
with open(api_ini_file_path, 'wb') as configfile:
config.write(configfile)
subprocess.check_call([os.path.join(cur_dir, 'bin/circusd'), "--daemon"])
| apache-2.0 | Python |
c039e4a2e322ee4e0a173f164be598dc630d3579 | fix ProxyError inheritance | mitmproxy/mitmproxy,tekii/mitmproxy,meizhoubao/mitmproxy,dxq-git/mitmproxy,ikoz/mitmproxy,ujjwal96/mitmproxy,zlorb/mitmproxy,mosajjal/mitmproxy,zbuc/mitmproxy,liorvh/mitmproxy,sethp-jive/mitmproxy,ikoz/mitmproxy,onlywade/mitmproxy,macmantrl/mitmproxy,zbuc/mitmproxy,MatthewShao/mitmproxy,ddworken/mitmproxy,Kriechi/mitmproxy,claimsmall/mitmproxy,onlywade/mitmproxy,vhaupert/mitmproxy,gzzhanghao/mitmproxy,jvillacorta/mitmproxy,xtso520ok/mitmproxy,legendtang/mitmproxy,rauburtin/mitmproxy,0xwindows/InfoLeak,meizhoubao/mitmproxy,devasia1000/anti_adblock,dufferzafar/mitmproxy,macmantrl/mitmproxy,ryoqun/mitmproxy,tfeagle/mitmproxy,fimad/mitmproxy,owers19856/mitmproxy,ujjwal96/mitmproxy,0x0mar/mitmproxy,bazzinotti/mitmproxy,sethp-jive/mitmproxy,scriptmediala/mitmproxy,mosajjal/mitmproxy,xtso520ok/mitmproxy,inscriptionweb/mitmproxy,azureplus/mitmproxy,devasia1000/mitmproxy,bazzinotti/mitmproxy,StevenVanAcker/mitmproxy,jvillacorta/mitmproxy,syjzwjj/mitmproxy,noikiy/mitmproxy,laurmurclar/mitmproxy,fimad/mitmproxy,cortesi/mitmproxy,tdickers/mitmproxy,dufferzafar/mitmproxy,0x0mar/mitmproxy,claimsmall/mitmproxy,guiquanz/mitmproxy,ADemonisis/mitmproxy,sethp-jive/mitmproxy,noikiy/mitmproxy,gzzhanghao/mitmproxy,tdickers/mitmproxy,ddworken/mitmproxy,rauburtin/mitmproxy,mhils/mitmproxy,onlywade/mitmproxy,ParthGanatra/mitmproxy,gzzhanghao/mitmproxy,Endika/mitmproxy,liorvh/mitmproxy,ddworken/mitmproxy,guiquanz/mitmproxy,StevenVanAcker/mitmproxy,ikoz/mitmproxy,zlorb/mitmproxy,cortesi/mitmproxy,xbzbing/mitmproxy,elitest/mitmproxy,ADemonisis/mitmproxy,xaxa89/mitmproxy,xtso520ok/mitmproxy,0xwindows/InfoLeak,dwfreed/mitmproxy,mhils/mitmproxy,mhils/mitmproxy,guiquanz/mitmproxy,0x0mar/mitmproxy,MatthewShao/mitmproxy,bltb/mitmproxy,mitmproxy/mitmproxy,laurmurclar/mitmproxy,vhaupert/mitmproxy,elitest/mitmproxy,ddworken/mitmproxy,macmantrl/mitmproxy,Fuzion24/mitmproxy,byt3bl33d3r/mitmproxy,MatthewShao/mitmproxy,ParthGanatra/mitmproxy,ccccccccccc/mitmproxy,inscriptionweb/mitmproxy,ujjwal96/mitmproxy,ZeYt/mitmproxy,syjzwjj/mitmproxy,scriptmediala/mitmproxy,byt3bl33d3r/mitmproxy,Fuzion24/mitmproxy,ZeYt/mitmproxy,laurmurclar/mitmproxy,pombredanne/mitmproxy,tfeagle/mitmproxy,ryoqun/mitmproxy,azureplus/mitmproxy,cortesi/mitmproxy,Endika/mitmproxy,ZeYt/mitmproxy,mosajjal/mitmproxy,cortesi/mitmproxy,azureplus/mitmproxy,zlorb/mitmproxy,xbzbing/mitmproxy,macmantrl/mitmproxy,zlorb/mitmproxy,byt3bl33d3r/mitmproxy,tdickers/mitmproxy,dweinstein/mitmproxy,tfeagle/mitmproxy,Kriechi/mitmproxy,elitest/mitmproxy,syjzwjj/mitmproxy,ikoz/mitmproxy,inscriptionweb/mitmproxy,sethp-jive/mitmproxy,Kriechi/mitmproxy,guiquanz/mitmproxy,laurmurclar/mitmproxy,xbzbing/mitmproxy,dweinstein/mitmproxy,dxq-git/mitmproxy,ParthGanatra/mitmproxy,legendtang/mitmproxy,dwfreed/mitmproxy,ujjwal96/mitmproxy,liorvh/mitmproxy,byt3bl33d3r/mitmproxy,devasia1000/mitmproxy,owers19856/mitmproxy,rauburtin/mitmproxy,StevenVanAcker/mitmproxy,fimad/mitmproxy,devasia1000/mitmproxy,mitmproxy/mitmproxy,mitmproxy/mitmproxy,tekii/mitmproxy,dwfreed/mitmproxy,jpic/mitmproxy,vhaupert/mitmproxy,rauburtin/mitmproxy,dwfreed/mitmproxy,scriptmediala/mitmproxy,ccccccccccc/mitmproxy,bltb/mitmproxy,xaxa89/mitmproxy,liorvh/mitmproxy,fimad/mitmproxy,tekii/mitmproxy,bltb/mitmproxy,ZeYt/mitmproxy,ccccccccccc/mitmproxy,dxq-git/mitmproxy,xaxa89/mitmproxy,ryoqun/mitmproxy,tekii/mitmproxy,tfeagle/mitmproxy,Endika/mitmproxy,scriptmediala/mitmproxy,pombredanne/mitmproxy,bltb/mitmproxy,devasia1000/mitmproxy,onlywade/mitmproxy,pombredanne/mitmproxy,Fuzion24/mitmproxy,syjzwjj/mitmproxy,azureplus/mitmproxy,tdickers/mitmproxy,mosajjal/mitmproxy,mhils/mitmproxy,dufferzafar/mitmproxy,jpic/mitmproxy,xaxa89/mitmproxy,zbuc/mitmproxy,ADemonisis/mitmproxy,Kriechi/mitmproxy,legendtang/mitmproxy,claimsmall/mitmproxy,noikiy/mitmproxy,0xwindows/InfoLeak,noikiy/mitmproxy,devasia1000/anti_adblock,gzzhanghao/mitmproxy,zbuc/mitmproxy,MatthewShao/mitmproxy,ryoqun/mitmproxy,Fuzion24/mitmproxy,pombredanne/mitmproxy,owers19856/mitmproxy,jvillacorta/mitmproxy,elitest/mitmproxy,0xwindows/InfoLeak,bazzinotti/mitmproxy,vhaupert/mitmproxy,jpic/mitmproxy,meizhoubao/mitmproxy,StevenVanAcker/mitmproxy,legendtang/mitmproxy,jvillacorta/mitmproxy,dufferzafar/mitmproxy,meizhoubao/mitmproxy,claimsmall/mitmproxy,mhils/mitmproxy,dweinstein/mitmproxy,devasia1000/anti_adblock,xbzbing/mitmproxy,owers19856/mitmproxy,bazzinotti/mitmproxy,ccccccccccc/mitmproxy,ParthGanatra/mitmproxy,ADemonisis/mitmproxy,mitmproxy/mitmproxy,dxq-git/mitmproxy,Endika/mitmproxy,dweinstein/mitmproxy,inscriptionweb/mitmproxy,jpic/mitmproxy | libmproxy/proxy/primitives.py | libmproxy/proxy/primitives.py | from __future__ import absolute_import
class ProxyError(Exception):
def __init__(self, code, message, headers=None):
super(ProxyError, self).__init__(self, message)
self.code, self.headers = code, headers
class ConnectionTypeChange(Exception):
"""
Gets raised if the connection type has been changed (e.g. after HTTP/1.1 101 Switching Protocols).
It's up to the raising ProtocolHandler to specify the new conntype before raising the exception.
"""
pass
class ProxyServerError(Exception):
pass
class UpstreamServerResolver(object):
def __call__(self, conn):
"""
Returns the address of the server to connect to.
"""
raise NotImplementedError # pragma: nocover
class ConstUpstreamServerResolver(UpstreamServerResolver):
def __init__(self, dst):
self.dst = dst
def __call__(self, conn):
return self.dst
class TransparentUpstreamServerResolver(UpstreamServerResolver):
def __init__(self, resolver, sslports):
self.resolver = resolver
self.sslports = sslports
def __call__(self, conn):
dst = self.resolver.original_addr(conn)
if not dst:
raise ProxyError(502, "Transparent mode failure: could not resolve original destination.")
if dst[1] in self.sslports:
ssl = True
else:
ssl = False
return [ssl, ssl] + list(dst)
class AddressPriority(object):
"""
Enum that signifies the priority of the given address when choosing the destination host.
Higher is better (None < i)
"""
MANUALLY_CHANGED = 3
"""user changed the target address in the ui"""
FROM_SETTINGS = 2
"""upstream server from arguments (reverse proxy, upstream proxy or from transparent resolver)"""
FROM_PROTOCOL = 1
"""derived from protocol (e.g. absolute-form http requests)"""
class Log:
def __init__(self, msg, level="info"):
self.msg = msg
self.level = level | from __future__ import absolute_import
class ProxyError(Exception):
def __init__(self, code, msg, headers=None):
self.code, self.msg, self.headers = code, msg, headers
def __str__(self):
return "ProxyError(%s, %s)" % (self.code, self.msg)
class ConnectionTypeChange(Exception):
"""
Gets raised if the connection type has been changed (e.g. after HTTP/1.1 101 Switching Protocols).
It's up to the raising ProtocolHandler to specify the new conntype before raising the exception.
"""
pass
class ProxyServerError(Exception):
pass
class UpstreamServerResolver(object):
def __call__(self, conn):
"""
Returns the address of the server to connect to.
"""
raise NotImplementedError # pragma: nocover
class ConstUpstreamServerResolver(UpstreamServerResolver):
def __init__(self, dst):
self.dst = dst
def __call__(self, conn):
return self.dst
class TransparentUpstreamServerResolver(UpstreamServerResolver):
def __init__(self, resolver, sslports):
self.resolver = resolver
self.sslports = sslports
def __call__(self, conn):
dst = self.resolver.original_addr(conn)
if not dst:
raise ProxyError(502, "Transparent mode failure: could not resolve original destination.")
if dst[1] in self.sslports:
ssl = True
else:
ssl = False
return [ssl, ssl] + list(dst)
class AddressPriority(object):
"""
Enum that signifies the priority of the given address when choosing the destination host.
Higher is better (None < i)
"""
MANUALLY_CHANGED = 3
"""user changed the target address in the ui"""
FROM_SETTINGS = 2
"""upstream server from arguments (reverse proxy, upstream proxy or from transparent resolver)"""
FROM_PROTOCOL = 1
"""derived from protocol (e.g. absolute-form http requests)"""
class Log:
def __init__(self, msg, level="info"):
self.msg = msg
self.level = level | mit | Python |
ff492e1acb78f43677c926b231a04da7ac1012b4 | use full path of library insted of the relative path, when import a module. | zordsdavini/qtile,zordsdavini/qtile | libqtile/widget/quick_exit.py | libqtile/widget/quick_exit.py | # Copyright (c) 2019, Shunsuke Mie
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from enum import Enum, auto
from libqtile.widget import base
from libqtile import bar
class State(Enum):
Neutral = auto()
Counting = auto()
class QuickExit(base._TextBox):
"""
A button of exiting the running qtile easily. When clicked this button, a countdown
start. If the button pushed with in the countdown again, the qtile shutdown.
"""
defaults = [
('default_text', '[ shutdown ]', 'A text displayed as a button'),
('countdown_format', '[ {} seconds ]', 'This text is showed when counting down.'),
('timer_interval', 1, 'A countdown interval.'),
('countdown_start', 5, 'Time to accept the second pushing.'),
]
def __init__(self, widget=bar.CALCULATED, **config):
base._TextBox.__init__(self, '', widget, **config)
self.add_defaults(QuickExit.defaults)
self.state = State.Neutral
self.text = self.default_text
self.countdown = self.countdown_start
def update(self):
if self.state == State.Neutral:
return
self.countdown -= 1
if self.countdown < 0:
self.state = State.Neutral
self.countdown = self.countdown_start
self.text = self.default_text
self.draw()
return
self.text = self.countdown_format.format(self.countdown)
self.timeout_add(self.timer_interval, self.update)
self.draw()
def button_press(self, x, y, button):
if self.state == State.Neutral:
self.state = State.Counting
self.update()
return
if self.state == State.Counting:
self.qtile.stop()
| # Copyright (c) 2019, Shunsuke Mie
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from enum import Enum, auto
from . import base
from .. import bar
class State(Enum):
Neutral = auto()
Counting = auto()
class QuickExit(base._TextBox):
"""
A button of exiting the running qtile easily. When clicked this button, a countdown
start. If the button pushed with in the countdown again, the qtile shutdown.
"""
defaults = [
('default_text', '[ shutdown ]', 'A text displayed as a button'),
('countdown_format', '[ {} seconds ]', 'This text is showed when counting down.'),
('timer_interval', 1, 'A countdown interval.'),
('countdown_start', 5, 'Time to accept the second pushing.'),
]
def __init__(self, widget=bar.CALCULATED, **config):
base._TextBox.__init__(self, '', widget, **config)
self.add_defaults(QuickExit.defaults)
self.state = State.Neutral
self.text = self.default_text
self.countdown = self.countdown_start
def update(self):
if self.state == State.Neutral:
return
self.countdown -= 1
if self.countdown < 0:
self.state = State.Neutral
self.countdown = self.countdown_start
self.text = self.default_text
self.draw()
return
self.text = self.countdown_format.format(self.countdown)
self.timeout_add(self.timer_interval, self.update)
self.draw()
def button_press(self, x, y, button):
if self.state == State.Neutral:
self.state = State.Counting
self.update()
return
if self.state == State.Counting:
self.qtile.stop()
| mit | Python |
a40f96ba76657ee1180089447ce6f35d99c00e3d | fix python dependance | iw3hxn/LibrERP,iw3hxn/LibrERP,iw3hxn/LibrERP,iw3hxn/LibrERP,iw3hxn/LibrERP | l10n_it_account/__openerp__.py | l10n_it_account/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2010-2013 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Italian Localisation',
'version': '2.14.28.31',
'category': 'Localisation/Italy',
'description': """This module customizes OpenERP in order to fit italian laws and mores - Account version
Functionalities:
- Fiscal code computation for partner, and fiscal code check
- Check invoice date consistency
- CIG on invoice
""",
'author': 'OpenERP Italian Community, Didotech srl',
'website': 'http://www.openerp-italia.org, http://www.didotech.com',
'license': 'AGPL-3',
'depends': [
'account',
'base_vat',
'account_chart',
'base_iban',
'l10n_it_base',
'account_voucher',
'sale_order_confirm',
#'account_invoice_entry_date', not possible for use of a field defined here invoice_supplier_number
],
'data': [
'account/partner_view.xml',
'account/fiscal_position_view.xml',
'account/account_sequence.xml',
'account/invoice_view.xml',
'account/voucher_view.xml',
'account/payment_type_view.xml',
'wizard/select_fiscal_position_view.xml',
'data/bank_iban_data.xml',
'account/account_move.xml',
'account/res_bank_view.xml',
'account/account_tax_view.xml',
'account/res_company_view.xml',
'account/account_invoice_workflow.xml',
],
'demo': [],
'active': False,
'installable': True
'external_dependencies': {
'python': ['codicefiscale'],
}
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2010-2013 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Italian Localisation',
'version': '2.14.28.31',
'category': 'Localisation/Italy',
'description': """This module customizes OpenERP in order to fit italian laws and mores - Account version
Functionalities:
- Fiscal code computation for partner
- Check invoice date consistency
- CIG on invoice
""",
'author': 'OpenERP Italian Community, Didotech srl',
'website': 'http://www.openerp-italia.org, http://www.didotech.com',
'license': 'AGPL-3',
'depends': [
'account',
'base_vat',
'account_chart',
'base_iban',
'l10n_it_base',
'account_voucher',
'sale_order_confirm',
#'account_invoice_entry_date', not possible for use of a field defined here invoice_supplier_number
],
'data': [
'account/partner_view.xml',
'account/fiscal_position_view.xml',
'account/account_sequence.xml',
'account/invoice_view.xml',
'account/voucher_view.xml',
'account/payment_type_view.xml',
'wizard/select_fiscal_position_view.xml',
'data/bank_iban_data.xml',
'account/account_move.xml',
'account/res_bank_view.xml',
'account/account_tax_view.xml',
'account/res_company_view.xml',
'account/account_invoice_workflow.xml',
],
'demo': [],
'active': False,
'installable': True
}
| agpl-3.0 | Python |
f04667fc229e8b9be2567b9687a6cc5ec5980ab1 | remove TL from master component __init__ | landlab/landlab,ManuSchmi88/landlab,Carralex/landlab,ManuSchmi88/landlab,RondaStrauch/landlab,ManuSchmi88/landlab,Carralex/landlab,RondaStrauch/landlab,amandersillinois/landlab,amandersillinois/landlab,cmshobe/landlab,landlab/landlab,SiccarPoint/landlab,RondaStrauch/landlab,Carralex/landlab,landlab/landlab,laijingtao/landlab,laijingtao/landlab,SiccarPoint/landlab,csherwood-usgs/landlab,csherwood-usgs/landlab,cmshobe/landlab,cmshobe/landlab | landlab/components/__init__.py | landlab/components/__init__.py | from .craters import CratersComponent
from .chi_index import ChiFinder
from .diffusion import LinearDiffuser
from .fire_generator import FireGenerator
from .flexure import Flexure
from .flow_accum import AccumFlow
from .flow_routing import FlowRouter, DepressionFinderAndRouter
from .glacier_thin_ice_model import Glacier
from .nonlinear_diffusion import PerronNLDiffuse
from .overland_flow import OverlandFlowBates, OverlandFlow
from .pet import PotentialEvapotranspiration
from .potentiality_flowrouting import PotentialityFlowRouter
from .radiation import Radiation
from .sed_trp_shallow_flow import SurfaceFlowTransport
from .single_vegetation import Vegetation
from .sink_fill import SinkFiller
from .soil_moisture import SoilMoisture
from .steepness_index import SteepnessFinder
from .stream_power import StreamPowerEroder, FastscapeEroder, SedDepEroder
from .uniform_precip import PrecipitationDistribution
from .vegetation_ca import VegCA
from .gflex import gFlex
COMPONENTS = [CratersComponent, ChiFinder, LinearDiffuser, FireGenerator,
Flexure, AccumFlow, FlowRouter, DepressionFinderAndRouter,
Glacier, PerronNLDiffuse, OverlandFlowBates,
OverlandFlow, PotentialEvapotranspiration,
PotentialityFlowRouter, Radiation, SurfaceFlowTransport,
Vegetation, SinkFiller, SoilMoisture,
StreamPowerEroder, FastscapeEroder, SedDepEroder,
SteepnessFinder,
PrecipitationDistribution, VegCA, gFlex]
__all__ = [cls.__name__ for cls in COMPONENTS]
| from .craters import CratersComponent
from .chi_index import ChiFinder
from .diffusion import LinearDiffuser
from .fire_generator import FireGenerator
from .flexure import Flexure
from .flow_accum import AccumFlow
from .flow_routing import FlowRouter, DepressionFinderAndRouter
from .glacier_thin_ice_model import Glacier
from .nonlinear_diffusion import PerronNLDiffuse
from .overland_flow import OverlandFlowBates, OverlandFlow
from .pet import PotentialEvapotranspiration
from .potentiality_flowrouting import PotentialityFlowRouter
from .radiation import Radiation
from .sed_trp_shallow_flow import SurfaceFlowTransport
from .single_vegetation import Vegetation
from .sink_fill import SinkFiller
from .soil_moisture import SoilMoisture
from .steepness_index import SteepnessFinder
from .stream_power import StreamPowerEroder, FastscapeEroder, SedDepEroder
from .transport_limited_fluvial import TransportLimitedEroder
from .uniform_precip import PrecipitationDistribution
from .vegetation_ca import VegCA
from .gflex import gFlex
COMPONENTS = [CratersComponent, ChiFinder, LinearDiffuser, FireGenerator,
Flexure, AccumFlow, FlowRouter, DepressionFinderAndRouter,
Glacier, PerronNLDiffuse, OverlandFlowBates,
OverlandFlow, PotentialEvapotranspiration,
PotentialityFlowRouter, Radiation, SurfaceFlowTransport,
Vegetation, SinkFiller, SoilMoisture,
StreamPowerEroder, FastscapeEroder, SedDepEroder,
TransportLimitedEroder, SteepnessFinder,
PrecipitationDistribution, VegCA, gFlex]
__all__ = [cls.__name__ for cls in COMPONENTS]
| mit | Python |
85afc6bdae871d9ffe90a50b7b7232938e6f6beb | bump revision number | beni55/gunicorn,pschanely/gunicorn,jamesblunt/gunicorn,urbaniak/gunicorn,wong2/gunicorn,malept/gunicorn,alex/gunicorn,prezi/gunicorn,pschanely/gunicorn,keakon/gunicorn,ccl0326/gunicorn,ccl0326/gunicorn,1stvamp/gunicorn,prezi/gunicorn,jamesblunt/gunicorn,mvaled/gunicorn,mvaled/gunicorn,z-fork/gunicorn,GitHublong/gunicorn,tempbottle/gunicorn,elelianghh/gunicorn,urbaniak/gunicorn,alex/gunicorn,gtrdotmcs/gunicorn,alex/gunicorn,mvaled/gunicorn,WSDC-NITWarangal/gunicorn,1stvamp/gunicorn,wong2/gunicorn,zhoucen/gunicorn,gtrdotmcs/gunicorn,malept/gunicorn,1stvamp/gunicorn,harrisonfeng/gunicorn,wong2/gunicorn,zhoucen/gunicorn,gtrdotmcs/gunicorn,ammaraskar/gunicorn,pschanely/gunicorn,jamesblunt/gunicorn,urbaniak/gunicorn,ephes/gunicorn,tejasmanohar/gunicorn,MrKiven/gunicorn,ccl0326/gunicorn,zhoucen/gunicorn,malept/gunicorn,prezi/gunicorn | gunicorn/__init__.py | gunicorn/__init__.py | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (0, 7, 1)
__version__ = ".".join(map(str, version_info))
| # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (0, 7, 0)
__version__ = ".".join(map(str, version_info))
| mit | Python |
e918ef8240f0f532d10ee3694816b0c9899d3f30 | add tag property to DomainRecord | linode/python-linode-api | linode_api4/objects/domain.py | linode_api4/objects/domain.py | from __future__ import absolute_import
from linode_api4.errors import UnexpectedResponseError
from linode_api4.objects import Base, DerivedBase, Property
class DomainRecord(DerivedBase):
api_endpoint = "/domains/{domain_id}/records/{id}"
derived_url_path = "records"
parent_id_name = "domain_id"
properties = {
'id': Property(identifier=True),
'domain_id': Property(identifier=True),
'type': Property(),
'name': Property(mutable=True, filterable=True),
'target': Property(mutable=True, filterable=True),
'priority': Property(mutable=True),
'weight': Property(mutable=True),
'port': Property(mutable=True),
'service': Property(mutable=True),
'protocol': Property(mutable=True),
'ttl_sec': Property(mutable=True),
'tag': Property(mutable=True),
}
class Domain(Base):
api_endpoint = "/domains/{id}"
properties = {
'id': Property(identifier=True),
'domain': Property(mutable=True, filterable=True),
'group': Property(mutable=True, filterable=True),
'description': Property(mutable=True),
'status': Property(mutable=True),
'soa_email': Property(mutable=True),
'retry_sec': Property(mutable=True),
'master_ips': Property(mutable=True, filterable=True),
'axfr_ips': Property(mutable=True),
'expire_sec': Property(mutable=True),
'refresh_sec': Property(mutable=True),
'ttl_sec': Property(mutable=True),
'records': Property(derived_class=DomainRecord),
'type': Property(mutable=True),
'tags': Property(mutable=True),
}
def record_create(self, record_type, **kwargs):
params = {
"type": record_type,
}
params.update(kwargs)
result = self._client.post("{}/records".format(Domain.api_endpoint), model=self, data=params)
self.invalidate()
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response creating domain record!', json=result)
zr = DomainRecord(self._client, result['id'], self.id, result)
return zr
| from __future__ import absolute_import
from linode_api4.errors import UnexpectedResponseError
from linode_api4.objects import Base, DerivedBase, Property
class DomainRecord(DerivedBase):
api_endpoint = "/domains/{domain_id}/records/{id}"
derived_url_path = "records"
parent_id_name = "domain_id"
properties = {
'id': Property(identifier=True),
'domain_id': Property(identifier=True),
'type': Property(),
'name': Property(mutable=True, filterable=True),
'target': Property(mutable=True, filterable=True),
'priority': Property(mutable=True),
'weight': Property(mutable=True),
'port': Property(mutable=True),
'service': Property(mutable=True),
'protocol': Property(mutable=True),
'ttl_sec': Property(mutable=True),
}
class Domain(Base):
api_endpoint = "/domains/{id}"
properties = {
'id': Property(identifier=True),
'domain': Property(mutable=True, filterable=True),
'group': Property(mutable=True, filterable=True),
'description': Property(mutable=True),
'status': Property(mutable=True),
'soa_email': Property(mutable=True),
'retry_sec': Property(mutable=True),
'master_ips': Property(mutable=True, filterable=True),
'axfr_ips': Property(mutable=True),
'expire_sec': Property(mutable=True),
'refresh_sec': Property(mutable=True),
'ttl_sec': Property(mutable=True),
'records': Property(derived_class=DomainRecord),
'type': Property(mutable=True),
'tags': Property(mutable=True),
}
def record_create(self, record_type, **kwargs):
params = {
"type": record_type,
}
params.update(kwargs)
result = self._client.post("{}/records".format(Domain.api_endpoint), model=self, data=params)
self.invalidate()
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response creating domain record!', json=result)
zr = DomainRecord(self._client, result['id'], self.id, result)
return zr
| bsd-3-clause | Python |
3f9cb658d1b3070534b00e32316284b92748b3e9 | Use roots_approx_equal | rparini/cxroots,rparini/cxroots | cxroots/tests/test_simplerootfinding.py | cxroots/tests/test_simplerootfinding.py | import unittest
import numpy as np
from scipy import pi
from cxroots import Circle, Rectangle, PolarRect
from cxroots.tests.ApproxEqual import roots_approx_equal
class TestRootfindingPolynomial(unittest.TestCase):
def setUp(self):
self.roots = roots = [-1.234, 0, 1+1j, 1-1j, 2.345]
self.multiplicities = [1,1,1,1,1]
self.f = lambda z: (z-roots[0])*(z-roots[1])*(z-roots[2])*(z-roots[3])*(z-roots[4])
self.df = lambda z: (z-roots[1])*(z-roots[2])*(z-roots[3])*(z-roots[4]) + (z-roots[0])*(z-roots[2])*(z-roots[3])*(z-roots[4]) + (z-roots[0])*(z-roots[1])*(z-roots[3])*(z-roots[4]) + (z-roots[0])*(z-roots[1])*(z-roots[2])*(z-roots[4]) + (z-roots[0])*(z-roots[1])*(z-roots[2])*(z-roots[3])
self.Circle = Circle(0,3)
self.Rectangle = Rectangle([-2,2],[-2,2])
self.halfAnnulus = PolarRect(0, [0.5,3], [-pi/2, pi/2])
def test_rootfinding_polynomial_circle_fdf(self):
roots_approx_equal(self.Circle.roots(self.f, self.df), (self.roots, self.multiplicities), decimal=7)
def test_rootfinding_polynomial_circle_f(self):
roots_approx_equal(self.Circle.roots(self.f, self.df), (self.roots, self.multiplicities), decimal=7)
def test_rootfinding_polynomial_rectangle_fdf(self):
roots_approx_equal(self.Rectangle.roots(self.f, self.df), (self.roots[:-1], self.multiplicities[:-1]), decimal=7)
def test_rootfinding_polynomial_rectangle_f(self):
roots_approx_equal(self.Rectangle.roots(self.f, self.df), (self.roots[:-1], self.multiplicities[:-1]), decimal=7)
def test_rootfinding_polynomial_halfAnnulus_fdf(self):
roots_approx_equal(self.halfAnnulus.roots(self.f, self.df), (self.roots[2:], self.multiplicities[2:]), decimal=7)
def test_rootfinding_polynomial_halfAnnulus_f(self):
roots_approx_equal(self.halfAnnulus.roots(self.f, self.df), (self.roots[2:], self.multiplicities[2:]), decimal=7)
if __name__ == '__main__':
unittest.main(verbosity=3)
| import unittest
import numpy as np
from scipy import pi
from cxroots import Circle, Rectangle, PolarRect
from cxroots.tests.SetsApproxEqual import sets_approx_equal
class TestRootfindingPolynomial(unittest.TestCase):
def setUp(self):
self.roots = roots = [-1.234, 0, 1+1j, 1-1j, 2.345]
self.f = lambda z: (z-roots[0])*(z-roots[1])*(z-roots[2])*(z-roots[3])*(z-roots[4])
self.df = lambda z: (z-roots[1])*(z-roots[2])*(z-roots[3])*(z-roots[4]) + (z-roots[0])*(z-roots[2])*(z-roots[3])*(z-roots[4]) + (z-roots[0])*(z-roots[1])*(z-roots[3])*(z-roots[4]) + (z-roots[0])*(z-roots[1])*(z-roots[2])*(z-roots[4]) + (z-roots[0])*(z-roots[1])*(z-roots[2])*(z-roots[3])
self.Circle = Circle(0,3)
self.Rectangle = Rectangle([-2,2],[-2,2])
self.halfAnnulus = PolarRect(0, [0.5,3], [-pi/2, pi/2])
def test_rootfinding_polynomial_circle_fdf(self):
approxRoots, multiplicities = self.Circle.roots(self.f, self.df)
sets_approx_equal(approxRoots, self.roots, decimal=7)
def test_rootfinding_polynomial_circle_f(self):
approxRoots, multiplicities = self.Circle.roots(self.f)
sets_approx_equal(approxRoots, self.roots, decimal=7)
def test_rootfinding_polynomial_rectangle_fdf(self):
approxRoots, multiplicities = self.Rectangle.roots(self.f, self.df)
sets_approx_equal(approxRoots, self.roots[:-1], decimal=7)
def test_rootfinding_polynomial_rectangle_f(self):
approxRoots, multiplicities = self.Rectangle.roots(self.f)
sets_approx_equal(approxRoots, self.roots[:-1], decimal=7)
def test_rootfinding_polynomial_halfAnnulus_fdf(self):
approxRoots, multiplicities = self.halfAnnulus.roots(self.f, self.df)
sets_approx_equal(approxRoots, self.roots[2:], decimal=7)
def test_rootfinding_polynomial_halfAnnulus_f(self):
approxRoots, multiplicities = self.halfAnnulus.roots(self.f)
sets_approx_equal(approxRoots, self.roots[2:], decimal=7)
if __name__ == '__main__':
unittest.main(verbosity=3)
| bsd-3-clause | Python |
fa772e8a4abe0e9fba10720a0ebaadb8d240dbde | allow reading only first n rows in amazon data | Evfro/polara | polara/datasets/amazon.py | polara/datasets/amazon.py | from ast import literal_eval
import gzip
import pandas as pd
def parse_meta(path):
with gzip.open(path, 'rt') as gz:
for line in gz:
yield literal_eval(line)
def get_amazon_data(path=None, meta_path=None, nrows=None):
res = []
if path:
data = pd.read_csv(path, header=None,
names=['userid', 'asin', 'rating', 'timestamp'],
usecols=['userid', 'asin', 'rating'],
nrows=nrows)
res.append(data)
if meta_path:
meta = pd.DataFrame.from_records(parse_meta(meta_path), nrows=nrows)
res.append(meta)
if len(res) == 1:
res = res[0]
return res
| from ast import literal_eval
import gzip
import pandas as pd
def parse_meta(path):
with gzip.open(path, 'rt') as gz:
for line in gz:
yield literal_eval(line)
def get_amazon_data(path=None, meta_path=None):
res = []
if path:
data = pd.read_csv(path, header=None,
names=['userid', 'asin', 'rating', 'timestamp'],
usecols=['userid', 'asin', 'rating'])
res.append(data)
if meta_path:
meta = pd.DataFrame.from_records(parse_meta(meta_path))
res.append(meta)
if len(res) == 1:
res = res[0]
return res
| mit | Python |
dd65ea98f7fa00394a302ac0e5e0bc32b6298277 | Set base | mywaystar/sublime-42-norminette | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Baptiste JAMIN
# Copyright (c) 2016 Baptiste JAMIN
#
# License: MIT
#
"""This module exports the 42Norminette plugin class."""
import shlex
from SublimeLinter.lint import Linter, persist
import sublime
import os
import string
class Norminette(Linter):
"""Provides an interface to norminette."""
syntax = 'c'
executable = 'norminette'
regex = r'''(?xi)
^^(?:(?P<error>Error)|(?P<warning>Warning)) # Error
# Norminette emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:(.+?(?P<line>\d+)))?
(?:(.+?(?P<col>\d+)))?
(?:(?P<message>.+))
'''
line_col_base = (1, 0)
multiline = True
error_stream = util.STREAM_BOTH
selectors = {}
defaults = {}
def split_match(self, match):
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
def cmd(self):
result = self.executable
return result + ' ' + sublime.active_window().active_view().file_name()
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Baptiste JAMIN
# Copyright (c) 2016 Baptiste JAMIN
#
# License: MIT
#
"""This module exports the 42Norminette plugin class."""
import shlex
from SublimeLinter.lint import Linter, persist
import sublime
import os
import string
class Norminette(Linter):
"""Provides an interface to norminette."""
syntax = 'c'
executable = 'norminette'
regex = r'''(?xi)
^^(?:(?P<error>Error)|(?P<warning>Warning)) # Error
# Norminette emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:(.+?(?P<line>\d+)))?
(?:(.+?(?P<col>\d+)))?
(?:(?P<message>.+))
'''
multiline = True
error_stream = util.STREAM_BOTH
selectors = {}
defaults = {}
def split_match(self, match):
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
def cmd(self):
result = self.executable
return result + ' ' + sublime.active_window().active_view().file_name()
| mit | Python |
c545b2cac93e18be11efeacad28bb0d7e0d4bef8 | Reposition W293 | SublimeLinter/SublimeLinter-flake8 | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013-2014 Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
#
# License: MIT
#
"""This module exports the Flake8 plugin linter class."""
from SublimeLinter.lint import PythonLinter
class Flake8(PythonLinter):
"""Provides an interface to the flake8 python module/script."""
syntax = ('python', 'python3')
cmd = ('flake8', '--format', 'default', '${args}', '-')
# The following regex marks these pyflakes and pep8 codes as errors.
# All other codes are marked as warnings.
#
# Pyflake Errors:
# - F402 import module from line N shadowed by loop variable
# - F404 future import(s) name after other statements
# - F812 list comprehension redefines name from line N
# - F823 local variable name ... referenced before assignment
# - F831 duplicate argument name in function definition
# - F821 undefined name name
# - F822 undefined name name in __all__
#
# Pep8 Errors:
# - E112 expected an indented block
# - E113 unexpected indentation
# - E901 SyntaxError or IndentationError
# - E902 IOError
# - E999 SyntaxError
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>(?:F(?:40[24]|8(?:12|2[123]|31))|E(?:11[23]|90[12]|999)))|'
r'(?P<warning>\w\d+)) '
r'(?P<message>\'(.*\.)?(?P<near>.+)\' imported but unused|.*)'
)
multiline = True
def split_match(self, match):
"""
Extract and return values from match.
We override this method because sometimes we capture near,
and a column will always override near.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if near:
col = None
return match, line, col, error, warning, message, near
def reposition_match(self, line, col, m, virtual_view):
code = m.error or m.warning
if code in ('W291', 'W293'):
txt = virtual_view.select_line(line).rstrip('\n')
return (line, col, len(txt))
return super().reposition_match(line, col, m, virtual_view)
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013-2014 Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
#
# License: MIT
#
"""This module exports the Flake8 plugin linter class."""
from SublimeLinter.lint import PythonLinter
class Flake8(PythonLinter):
"""Provides an interface to the flake8 python module/script."""
syntax = ('python', 'python3')
cmd = ('flake8', '--format', 'default', '${args}', '-')
# The following regex marks these pyflakes and pep8 codes as errors.
# All other codes are marked as warnings.
#
# Pyflake Errors:
# - F402 import module from line N shadowed by loop variable
# - F404 future import(s) name after other statements
# - F812 list comprehension redefines name from line N
# - F823 local variable name ... referenced before assignment
# - F831 duplicate argument name in function definition
# - F821 undefined name name
# - F822 undefined name name in __all__
#
# Pep8 Errors:
# - E112 expected an indented block
# - E113 unexpected indentation
# - E901 SyntaxError or IndentationError
# - E902 IOError
# - E999 SyntaxError
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>(?:F(?:40[24]|8(?:12|2[123]|31))|E(?:11[23]|90[12]|999)))|'
r'(?P<warning>\w\d+)) '
r'(?P<message>\'(.*\.)?(?P<near>.+)\' imported but unused|.*)'
)
multiline = True
def split_match(self, match):
"""
Extract and return values from match.
We override this method because sometimes we capture near,
and a column will always override near.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if near:
col = None
return match, line, col, error, warning, message, near
def reposition_match(self, line, col, m, vv):
code = m.error or m.warning
if code == 'W291':
start, end = vv.full_line(line)
return (line, col, end - start - 1)
return super().reposition_match(line, col, m, vv)
| mit | Python |
564bca1e051f6b1cc068d1dd53de55fcf4dc7c6f | Use SublimeLinter-javac as a base | jawshooah/SublimeLinter-contrib-scalac | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = 'scala'
executable = 'scalac'
version_args = '-version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.11'
regex = (
r'^(?P<file>.+?):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning)): '
r'(?:\[.+?\] )?(?P<message>[^\r\n]+)\r?\n'
r'[^\r\n]+\r?\n'
r'(?P<col>[^\^]*)\^'
)
multiline = True
# line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_STDERR
# selectors = {}
# word_re = None
defaults = {
'lint': ''
}
inline_settings = 'lint'
# inline_overrides = None
comment_re = r'\s*/[/*]'
def cmd(self):
"""
Return the command line to execute.
We override this because we have to munge the -Xlint argument
based on the 'lint' setting.
"""
xlint = '-Xlint'
settings = self.get_view_settings()
options = settings.get('lint')
if options:
xlint += ':' + options
return (self.executable_path, xlint, '-encoding', 'UTF8', '*')
def split_match(self, match):
"""
Return the components of the match.
We override this because scalac lints all referenced files,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
match = None
return super().split_match(match) | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = ''
cmd = 'scalac'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r''
multiline = False
line_col_base = (1, 1)
tempfile_suffix = None
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None
| mit | Python |
1262366714500b34a943ae5c481f6489d7c44563 | Rename environment variable to match slim-lint | elstgav/SublimeLinter-slim-lint | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Gavin Elster
# Copyright (c) 2015 Gavin Elster
#
# License: MIT
#
"""This module exports the SlimLint plugin class."""
import os
from SublimeLinter.lint import RubyLinter, util
class SlimLint(RubyLinter):
"""Provides an interface to slim-lint."""
syntax = 'ruby slim'
cmd = 'slim-lint'
tempfile_suffix = '-'
config_file = ('--config', '.slim-lint.yml', '~')
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ' >= 0.4.0'
regex = (
r'^.+?:(?P<line>\d+) '
r'(?:(?P<error>\[E\])|(?P<warning>\[W\])) '
r'(?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)'
)
def build_args(self, settings):
"""
Return a list of args to add to cls.cmd.
We hook into this method to find the rubocop config and set it as an
environment variable for the rubocop linter to pick up.
"""
if self.filename:
config = util.find_file(
os.path.dirname(self.filename),
'.rubocop.yml',
aux_dirs='~'
)
if config:
self.env["SLIM_LINT_RUBOCOP_CONF"] = config
return super().build_args(settings)
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Gavin Elster
# Copyright (c) 2015 Gavin Elster
#
# License: MIT
#
"""This module exports the SlimLint plugin class."""
import os
from SublimeLinter.lint import RubyLinter, util
class SlimLint(RubyLinter):
"""Provides an interface to slim-lint."""
syntax = 'ruby slim'
cmd = 'slim-lint'
tempfile_suffix = '-'
config_file = ('--config', '.slim-lint.yml', '~')
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ' >= 0.4.0'
regex = (
r'^.+?:(?P<line>\d+) '
r'(?:(?P<error>\[E\])|(?P<warning>\[W\])) '
r'(?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)'
)
def build_args(self, settings):
"""
Return a list of args to add to cls.cmd.
We hook into this method to find the rubocop config and set it as an
environment variable for the rubocop linter to pick up.
"""
if self.filename:
config = util.find_file(
os.path.dirname(self.filename),
'.rubocop.yml',
aux_dirs='~'
)
if config:
self.env["RUBOCOP_CONFIG"] = config
return super().build_args(settings)
| mit | Python |
628d65a15b5c51cb7d4a68e1e6babc01a712a538 | Add initial file for GUI | cgsheeh/SFWR3XA3_Redevelopment,cgsheeh/SFWR3XA3_Redevelopment | src/redevbazaar.py | src/redevbazaar.py | import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
self.mainmenu = [
{ 'Home': [
{'My Listings': self.__getlistingsEvent},
'-',
{'My Orders': self.__getordersEvent},
{'Settings': self.__getsettingsEvent}
]
},
{ 'Messages': [
{'Inbox': self.__getinboxEvent},
{'Send a message': self.__sendmessageEvent},
{'Sent Messages': self.__getoutboxEvent}
]
}
]
def __getlistingsEvent(self):
print "hey"
def __getordersEvent(self):
print "hey"
def __getsettingsEvent(self):
print "hey"
def __getinboxEvent(self):
print "hey"
def __sendmessageEvent(self):
print "hey"
def __getoutboxEvent(self):
print "hey"
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen) | import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen) | mit | Python |
e94f66f74f2f93f5d9b8f359a709000a3f529b36 | add mp image url | MashSoftware/petitions,MashSoftware/petitions,MashSoftware/petitions | application/routes.py | application/routes.py | import json
import requests
from application import app
from application.utils import get_mp, constituency_extent, constituency_collection
from flask import render_template, request
from operator import itemgetter
@app.route('/', methods=["GET"])
def index():
return render_template('index.html')
@app.route('/petitions', methods=["GET"])
def petitions():
args = request.args.items()
url = 'https://petition.parliament.uk/petitions.json?' + ''.join("%s=%s&" % tup for tup in args)
page = request.args.get('page')
r = requests.get(url)
data = json.loads(r.text)
return render_template('petitions.html', data=data, page=page, args=args)
@app.route('/petitions/<id>', methods=["GET"])
def petition(id):
url = 'https://petition.parliament.uk/petitions/' + id + '.json'
r = requests.get(url)
data = json.loads(r.text)
countries = data['data']['attributes']['signatures_by_country']
sorted_countries = sorted(countries, key=itemgetter('signature_count'), reverse=True)
constituencies = data['data']['attributes']['signatures_by_constituency']
sorted_constituencies = sorted(constituencies, key=itemgetter('signature_count'), reverse=True)
for constituency in sorted_constituencies [:10]:
mp = get_mp(constituency['name'])
constituency['party'] = mp['party']
constituency['url'] = mp['url']
if 'image' in mp:
constituency['mp_image'] = mp['image']
extents = constituency_collection(sorted_constituencies)
return render_template('petition.html',
data=data,
countries=sorted_countries,
constituencies=sorted_constituencies,
extents=extents)
@app.route('/petitions/<id>/map', methods=["GET"])
def map(id):
url = 'https://petition.parliament.uk/petitions/' + id + '.json'
r = requests.get(url)
data = json.loads(r.text)
constituencies = data['data']['attributes']['signatures_by_constituency']
sorted_constituencies = sorted(constituencies, key=itemgetter('signature_count'), reverse=True)
for constituency in sorted_constituencies [:10]:
mp = get_mp(constituency['name'])
constituency['party'] = mp['party']
constituency['url'] = mp['url']
extents = constituency_collection(sorted_constituencies)
return render_template('map.html',
extents=extents)
| import json
import requests
from application import app
from application.utils import get_mp, constituency_extent, constituency_collection
from flask import render_template, request
from operator import itemgetter
@app.route('/', methods=["GET"])
def index():
return render_template('index.html')
@app.route('/petitions', methods=["GET"])
def petitions():
args = request.args.items()
url = 'https://petition.parliament.uk/petitions.json?' + ''.join("%s=%s&" % tup for tup in args)
page = request.args.get('page')
r = requests.get(url)
data = json.loads(r.text)
return render_template('petitions.html', data=data, page=page, args=args)
@app.route('/petitions/<id>', methods=["GET"])
def petition(id):
url = 'https://petition.parliament.uk/petitions/' + id + '.json'
r = requests.get(url)
data = json.loads(r.text)
countries = data['data']['attributes']['signatures_by_country']
sorted_countries = sorted(countries, key=itemgetter('signature_count'), reverse=True)
constituencies = data['data']['attributes']['signatures_by_constituency']
sorted_constituencies = sorted(constituencies, key=itemgetter('signature_count'), reverse=True)
for constituency in sorted_constituencies [:10]:
mp = get_mp(constituency['name'])
constituency['party'] = mp['party']
constituency['url'] = mp['url']
extents = constituency_collection(sorted_constituencies)
print sorted_countries
return render_template('petition.html',
data=data,
countries=sorted_countries,
constituencies=sorted_constituencies,
extents=extents)
@app.route('/petitions/<id>/map', methods=["GET"])
def map(id):
url = 'https://petition.parliament.uk/petitions/' + id + '.json'
r = requests.get(url)
data = json.loads(r.text)
constituencies = data['data']['attributes']['signatures_by_constituency']
sorted_constituencies = sorted(constituencies, key=itemgetter('signature_count'), reverse=True)
for constituency in sorted_constituencies [:10]:
mp = get_mp(constituency['name'])
constituency['party'] = mp['party']
constituency['url'] = mp['url']
extents = constituency_collection(sorted_constituencies)
return render_template('map.html',
extents=extents)
| mit | Python |
2724a345c4a722f493425e2853e3a4ba3cbff8ff | Add a spot for the city. | lectroidmarc/SacTraffic,lectroidmarc/SacTraffic | appengine/models.py | appengine/models.py | """Model classes for SacTraffic."""
from datetime import datetime, timedelta
from google.appengine.ext import db
from google.appengine.api import memcache
class CHPData(db.Model):
"""Holds the last successful CHP data fetch."""
data = db.BlobProperty(required=True)
updated = db.DateTimeProperty(auto_now=True)
def put(self):
"""Stick the updated date into memcache on put()."""
memcache.set("%s-updated" % self.key().id_or_name(), self.updated)
db.Model.put(self)
class CHPIncident(db.Model):
"""Represents a CHP Incident."""
CenterID = db.StringProperty(required=True)
DispatchID = db.StringProperty(required=True)
LogID = db.StringProperty(required=True)
LogTime = db.DateTimeProperty()
LogType = db.StringProperty()
LogTypeID = db.StringProperty()
Location = db.StringProperty()
Area = db.StringProperty()
ThomasBrothers = db.StringProperty()
TBXY = db.StringProperty()
LogDetails = db.BlobProperty()
geolocation = db.GeoPtProperty()
city = db.StringProperty()
updated = db.DateTimeProperty(auto_now=True)
@property
def status(self):
if self.LogTime > datetime.utcnow() - timedelta(minutes=5):
# less than 5 min old == new
return 'new'
chp_data_last_updated = memcache.get("chp_data-updated")
if chp_data_last_updated is None:
chp_data = CHPData.get_by_key_name("chp_data")
memcache.add("chp_data-updated", chp_data.updated)
chp_data_last_updated = chp_data.updated
if self.updated < chp_data_last_updated - timedelta(minutes=15):
# not updated w/in 15 min of the last successful update == inactive
# 15 min assumes 3 misses on a 5 min cron cycle.
return 'inactive'
# what's left... active
return 'active'
class Camera(db.Model):
"""Represents a live camera."""
name = db.StringProperty()
url = db.LinkProperty()
geolocation = db.GeoPtProperty()
width = db.IntegerProperty()
height = db.IntegerProperty()
is_online = db.BooleanProperty()
updated = db.DateTimeProperty(auto_now=True)
| """Model classes for SacTraffic."""
from datetime import datetime, timedelta
from google.appengine.ext import db
from google.appengine.api import memcache
class CHPData(db.Model):
"""Holds the last successful CHP data fetch."""
data = db.BlobProperty(required=True)
updated = db.DateTimeProperty(auto_now=True)
def put(self):
"""Stick the updated date into memcache on put()."""
memcache.set("%s-updated" % self.key().id_or_name(), self.updated)
db.Model.put(self)
class CHPIncident(db.Model):
"""Represents a CHP Incident."""
CenterID = db.StringProperty(required=True)
DispatchID = db.StringProperty(required=True)
LogID = db.StringProperty(required=True)
LogTime = db.DateTimeProperty()
LogType = db.StringProperty()
LogTypeID = db.StringProperty()
Location = db.StringProperty()
Area = db.StringProperty()
ThomasBrothers = db.StringProperty()
TBXY = db.StringProperty()
LogDetails = db.BlobProperty()
geolocation = db.GeoPtProperty()
updated = db.DateTimeProperty(auto_now=True)
@property
def status(self):
if self.LogTime > datetime.utcnow() - timedelta(minutes=5):
# less than 5 min old == new
return 'new'
chp_data_last_updated = memcache.get("chp_data-updated")
if chp_data_last_updated is None:
chp_data = CHPData.get_by_key_name("chp_data")
memcache.add("chp_data-updated", chp_data.updated)
chp_data_last_updated = chp_data.updated
if self.updated < chp_data_last_updated - timedelta(minutes=15):
# not updated w/in 15 min of the last successful update == inactive
# 15 min assumes 3 misses on a 5 min cron cycle.
return 'inactive'
# what's left... active
return 'active'
class Camera(db.Model):
"""Represents a live camera."""
name = db.StringProperty()
url = db.LinkProperty()
geolocation = db.GeoPtProperty()
width = db.IntegerProperty()
height = db.IntegerProperty()
is_online = db.BooleanProperty()
updated = db.DateTimeProperty(auto_now=True)
| isc | Python |
1f5930b4ac9b91562768c01c8ff32ad6ceaa721e | Add docstring to app_engine_config.py | tstillwell2k17/GAE-BLOGAPP-MULTI_USER-DEMO,tstillwell2k17/GAE-BLOGAPP-MULTI_USER-DEMO,tstillwell2k17/GAE-BLOGAPP-MULTI_USER-DEMO | appengine_config.py | appengine_config.py | """ This module is used to add any third party libraries to
the project codebase. See the libs folder and the README file.
This file is parsed when deploying the application or running the
local development server.
"""
# appengine_config.py
from google.appengine.ext import vendor
# Add any libraries install in the "libs" folder.
vendor.add('libs')
| # appengine_config.py
from google.appengine.ext import vendor
# Add any libraries install in the "libs" folder.
vendor.add('libs')
| mit | Python |
579b1a2052959edc853bc8cfbbddd60c5c70a196 | use redis to lock updates | oremj/freddo | apps/updater/tasks.py | apps/updater/tasks.py | import json
from subprocess import Popen, PIPE, STDOUT
from celery.task import task
from django.conf import settings
import redisutils
def run(script):
p = Popen(script, stdout=PIPE, stderr=STDOUT, shell=True)
out, err = p.communicate()
return p.returncode, out, err
@task(ignore_result=True)
def update_app(name, payload):
redis = redisutils.connections['master']
lock = redis.lock('updater.%s' % name, timeout=300)
try:
app_config = settings.APP_UPDATERS[name]
except KeyError:
return
if not lock.acquire(False):
update_app.retry()
return
script = app_config['script']
outfile = app_config.get('outfile')
l = update_app.get_logger()
l.info("Running: %s" % script)
redis.publish('update.%s' % name, json.dumps(payload))
rv = run(script)
if outfile:
with open(outfile, 'w') as f:
f.write(rv[1])
redis.publish('update.%s' % name, json.dumps(rv + (payload,)))
l.info("Finished updating.")
lock.release()
| import json
from subprocess import Popen, PIPE, STDOUT
from celery.task import task
from django.conf import settings
import redisutils
def run(script):
p = Popen(script, stdout=PIPE, stderr=STDOUT, shell=True)
out, err = p.communicate()
return p.returncode, out, err
@task(ignore_result=True)
def update_app(name, payload):
redis = redisutils.connections['master']
try:
app_config = settings.APP_UPDATERS[name]
except KeyError:
return
script = app_config['script']
outfile = app_config.get('outfile')
l = update_app.get_logger()
l.info("Running: %s" % script)
redis.publish('update.%s' % name, json.dumps(payload))
rv = run(script)
if outfile:
with open(outfile, 'w') as f:
f.write(rv[1])
redis.publish('update.%s' % name, json.dumps(rv + (payload,)))
l.info("Finished updating.")
| mpl-2.0 | Python |
7fc57f554dc199564cd66257078a9d67180a61f6 | Add a match word. | shinshin86/little-magnifying-py-glass,shinshin86/little-magnifying-py-glass | word_match.py | word_match.py | # -*- coding: utf-8 -*-
import re
def word_check(check_word):
# Not process case
# => TODO this function
if check_word == "。":
return True
elif check_word == "、":
return True
elif check_word == " ":
return True
elif check_word == "":
return True
elif check_word == "の":
return True
elif check_word == "・":
return True
elif check_word == "!":
return True
elif check_word == "?":
return True
return False
| # -*- coding: utf-8 -*-
import re
def word_check(check_word):
# Not process case
# => TODO this function
if check_word == "。":
return True
elif check_word == "、":
return True
elif check_word == " ":
return True
elif check_word == "":
return True
elif check_word == "の":
return True
else:
return False
| mit | Python |
8b331a126d15665e22004304eb8c8b34e248b736 | Add images to articles | mfitzp/django-golifescience | apps/blog/models.py | apps/blog/models.py | import os.path
import datetime
# Django
from django.conf import settings
from django.db import models
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.contrib.sites.managers import CurrentSiteManager
from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse as django_reverse
# Externals
from taggit.models import Tag
from taggit.managers import TaggableManager
from autoslug.fields import AutoSlugField
from subdomains.utils import reverse
# Methodmint
from core.actions import object_saved
def article_file_path(instance=None, filename=None):
return os.path.join('article', str(instance.id), filename)
# Blog article
class Article(models.Model):
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('article',kwargs={'article_id':str(self.id),'article_slug':str(self.slug)},
subdomain=None)
def get_absolute_path(self):
return django_reverse('article',kwargs={'article_id':str(self.id)})
title = models.CharField('Title', max_length = 80, blank = False)
tagline = models.CharField('Tagline', max_length = 200, blank = False)
slug = AutoSlugField(populate_from='title')
image = ThumbnailerImageField(max_length=255, upload_to=article_file_path, blank=True)
content = models.TextField(blank = True)
tags = TaggableManager() #through=TaggedArticle)
created_by = models.ForeignKey(User, related_name='authored_articles') # Author originally submitted article
edited_by = models.ForeignKey(User, related_name='edited_articles', blank=True, null=True) # Author of current version/sets
objects = models.Manager()
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
class Meta:
ordering = ['created_at']
# Action Stream
post_save.connect(object_saved, sender=Article)
| import os.path
import datetime
# Django
from django.conf import settings
from django.db import models
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.contrib.sites.managers import CurrentSiteManager
from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse as django_reverse
# Externals
from taggit.models import Tag
from taggit.managers import TaggableManager
from autoslug.fields import AutoSlugField
from subdomains.utils import reverse
# Methodmint
from core.actions import object_saved
# Blog article
class Article(models.Model):
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('article',kwargs={'article_id':str(self.id),'article_slug':str(self.slug)},
subdomain=None)
def get_absolute_path(self):
return django_reverse('article',kwargs={'article_id':str(self.id)})
title = models.CharField('Title', max_length = 80, blank = False)
tagline = models.CharField('Tagline', max_length = 200, blank = False)
slug = AutoSlugField(populate_from='title')
content = models.TextField(blank = True)
tags = TaggableManager() #through=TaggedArticle)
created_by = models.ForeignKey(User, related_name='authored_articles') # Author originally submitted article
edited_by = models.ForeignKey(User, related_name='edited_articles', blank=True, null=True) # Author of current version/sets
objects = models.Manager()
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
class Meta:
ordering = ['created_at']
# Action Stream
post_save.connect(object_saved, sender=Article)
| bsd-3-clause | Python |
13b602c50f3be62b2a3a8b267ba00b685fc0c7fe | Update data migration with new CPS description | smartchicago/chicago-early-learning,smartchicago/chicago-early-learning,smartchicago/chicago-early-learning,smartchicago/chicago-early-learning | python/ecep/portal/migrations/0011_auto_20160518_1211.py | python/ecep/portal/migrations/0011_auto_20160518_1211.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Chicago Public Schools early childhood school based preschool programs work to ensure children ages 3 and 4 years old, particularly those most in need, have access to high-quality programs. Schools are committed to creating an engaging, developmentally appropriate learning environment that supports and respects the unique potential of each individual child through best professional practices, parent engagement, and community involvement.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Visit a child-friendly location near you:</p><ul><li><strong>Loop</strong> 42 W. Madison Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Colman</strong> 4655 S. Dearborn Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Hall Mall</strong> 4638 W. Diversey Avenue Hours 8:00 AM - 5:00 PM</li></ul><p>All sites are open until 7:00 PM on Wednesdays!</p><p>Many people find it helpful to make a plan to visit. You can make your plan <a href="/static/files/enrollment-plan-cps.pdf">here</a>.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
| mit | Python |
a66a6bb3f4ba6db11a762e44621c6752b4f9f5ca | Fix for Window.onError not using $pyjs | pombredanne/pyjs,lovelysystems/pyjamas,lovelysystems/pyjamas,anandology/pyjamas,lancezlin/pyjs,andreyvit/pyjamas,minghuascode/pyj,minghuascode/pyj,Hasimir/pyjs,anandology/pyjamas,certik/pyjamas,spaceone/pyjs,gpitel/pyjs,Hasimir/pyjs,lovelysystems/pyjamas,gpitel/pyjs,Hasimir/pyjs,pyjs/pyjs,minghuascode/pyj,Hasimir/pyjs,certik/pyjamas,gpitel/pyjs,anandology/pyjamas,andreyvit/pyjamas,anandology/pyjamas,minghuascode/pyj,spaceone/pyjs,lovelysystems/pyjamas,pombredanne/pyjs,lancezlin/pyjs,andreyvit/pyjamas,certik/pyjamas,pombredanne/pyjs,spaceone/pyjs,certik/pyjamas,lancezlin/pyjs,pyjs/pyjs,andreyvit/pyjamas,pyjs/pyjs,lancezlin/pyjs,pombredanne/pyjs,pyjs/pyjs,gpitel/pyjs,spaceone/pyjs | library/pyjamas/platform/WindowPyJS.py | library/pyjamas/platform/WindowPyJS.py |
def setOnError(onError):
if (not callable(onError)):
raise TypeError("object is not callable")
JS("""\
$wnd.onerror=function(msg, url, linenumber){
return onError(msg, url, linenumber);
}
""")
def onError(msg, url, linenumber):
dialog=doc().createElement("div")
dialog.className='errordialog'
# Note: $pyjs.trackstack is a global javascript array
tracestr = sys.trackstackstr(JS("$pyjs.trackstack.slice(0,-1)"))
tracestr = tracestr.replace("\n", "<br />\n ")
dialog.innerHTML=' <b style="color:red">JavaScript Error: </b>' + \
msg +' at line number ' + linenumber +'. Please inform webmaster.' + \
'<br /> ' + tracestr
doc().body.appendChild(dialog)
return True
def alert(msg):
wnd().alert(msg)
def confirm(msg):
return wnd().confirm(msg)
def prompt(msg, defaultReply=""):
return wnd().prompt(msg, defaultReply)
def init_listeners():
global closingListeners
global resizeListeners
if not closingListeners:
closingListeners = []
if not resizeListeners:
resizeListeners = []
def init():
global onError
init_listeners()
JS("""
$wnd.__pygwt_initHandlers(
function() {
Window.onResize();
},
function() {
return Window.onClosing();
},
function() {
Window.onClosed();
/*$wnd.onresize = null;
$wnd.onbeforeclose = null;
$wnd.onclose = null;*/
}
);
""")
setOnError(onError)
|
def setOnError(onError):
if (not callable(onError)):
raise TypeError("object is not callable")
JS("""\
$wnd.onerror=function(msg, url, linenumber){
return onError(msg, url, linenumber);
}
""")
def onError(msg, url, linenumber):
dialog=JS("""$doc.createElement("div")""")
dialog.className='errordialog'
# Note: trackstackstr is a global javascript array
tracestr = sys.trackstackstr(trackstack.slice(0,-1))
tracestr = tracestr.replace("\n", "<br />\n ")
dialog.innerHTML=' <b style="color:red">JavaScript Error: </b>' + \
msg +' at line number ' + linenumber +'. Please inform webmaster.' + \
'<br /> ' + tracestr
JS("""$doc.body.appendChild(dialog)""")
return True
def alert(msg):
wnd().alert(msg)
def confirm(msg):
return wnd().confirm(msg)
def prompt(msg, defaultReply=""):
return wnd().prompt(msg, defaultReply)
def init_listeners():
global closingListeners
global resizeListeners
if not closingListeners:
closingListeners = []
if not resizeListeners:
resizeListeners = []
def init():
global onError
init_listeners()
JS("""
$wnd.__pygwt_initHandlers(
function() {
Window.onResize();
},
function() {
return Window.onClosing();
},
function() {
Window.onClosed();
/*$wnd.onresize = null;
$wnd.onbeforeclose = null;
$wnd.onclose = null;*/
}
);
""")
setOnError(onError)
| apache-2.0 | Python |
0ff8211cd69d3c957c09e95f612eb842f1edb335 | add comment | OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft | tests/syft/lib/numpy/array_test.py | tests/syft/lib/numpy/array_test.py | # third party
import numpy as np
import pytest
# syft absolute
import syft as sy
ExampleArray = [
np.array([1, 2, -3], dtype=np.int8),
np.array([1, 2, -3], dtype=np.int16),
np.array([1, 2, -3], dtype=np.int32),
np.array([1, 2, -3], dtype=np.int64),
np.array([1, 2, 3], dtype=np.uint8),
# np.array([1, 2, 3], dtype=np.uint16),
# np.array([1, 2, 3], dtype=np.uint32),
# np.array([1, 2, 3], dtype=np.uint64),
np.array([1.2, 2.2, 3.0], dtype=np.float16),
np.array([1.2, 2.2, 3.0], dtype=np.float32),
np.array([1.2, 2.2, 3.0], dtype=np.float64),
# np.array([1 + 2j, 3 + 4j, 5 + 0j], dtype=np.complex64),
# np.array([1 + 2j, 3 + 4j, 5 + 0j], dtype=np.complex128),
np.array([True, False, True], dtype=np.bool_),
]
@pytest.mark.vendor(lib="numpy")
def test_remote_numpy_array() -> None:
sy.load("numpy")
vm = sy.VirtualMachine()
client = vm.get_root_client()
for test_array in ExampleArray:
remote_array = test_array.send(client)
received_array = remote_array.get()
assert all(test_array == received_array)
| # third party
import numpy as np
import pytest
# syft absolute
import syft as sy
ExampleArray = [
np.array([1, 2, -3], dtype=np.int8),
np.array([1, 2, -3], dtype=np.int16),
np.array([1, 2, -3], dtype=np.int32),
np.array([1, 2, -3], dtype=np.int64),
np.array([1, 2, 3], dtype=np.uint8),
# np.array([1, 2, 3], dtype=np.uint16),
# np.array([1, 2, 3], dtype=np.uint32),
# np.array([1, 2, 3], dtype=np.uint64),
np.array([1.2, 2.2, 3.0], dtype=np.float16),
np.array([1.2, 2.2, 3.0], dtype=np.float32),
np.array([1.2, 2.2, 3.0], dtype=np.float64),
# np.array([1 + 2j, 3 + 4j, 5 + 0j], dtype=np.complex64),
np.array([True, False, True], dtype=np.bool_),
]
@pytest.mark.vendor(lib="numpy")
def test_remote_numpy_array() -> None:
sy.load("numpy")
vm = sy.VirtualMachine()
client = vm.get_root_client()
for test_array in ExampleArray:
remote_array = test_array.send(client)
received_array = remote_array.get()
assert all(test_array == received_array)
| apache-2.0 | Python |
c2b1ba76beebef7030ee1a9a063db95cd843674f | Update venv | snipsco/snipsskills,snipsco/snipsskills,snipsco/snipsskills,snipsco/snipsskills | snipsskills/commands/install/skill.py | snipsskills/commands/install/skill.py | # -*-: coding utf-8 -*-
import os
from ..base import Base
from ...utils.pip_installer import PipInstaller
from snipsskillscore import pretty_printer as pp
class SkillInstallerException(Exception):
pass
class SkillInstallerWarning(Exception):
pass
class SkillInstaller(Base):
def run(self, force_download=False):
url_or_pip = self.options['<skill_url>']
try:
SkillInstaller.install(url_or_pip, force_download=force_download)
except SkillInstallerWarning as e:
pp.pwarning(str(e))
except Exception as e:
pp.perror(str(e))
@staticmethod
def install(url_or_pip, force_download=False):
message = pp.ConsoleMessage("Installing skill: $GREEN{}$RESET".format(url_or_pip))
message.start()
# try:
PipInstaller.install(url_or_pip, force_download=force_download)
message.done()
# except Exception as e:
# message.error()
# raise SkillInstallerWarning("Error installing skill {}: make sure you have the required access rights, and that the module is available".format(url_or_pip))
| # -*-: coding utf-8 -*-
import os
from ..base import Base
from ...utils.pip_installer import PipInstaller
from snipsskillscore import pretty_printer as pp
class SkillInstallerException(Exception):
pass
class SkillInstallerWarning(Exception):
pass
class SkillInstaller(Base):
def run(self, force_download=False):
url_or_pip = self.options['<skill_url>']
try:
SkillInstaller.install(url_or_pip, force_download=force_download)
except SkillInstallerWarning as e:
pp.pwarning(str(e))
except Exception as e:
pp.perror(str(e))
@staticmethod
def install(url_or_pip, force_download=False):
message = pp.ConsoleMessage("Installing skill: $GREEN{}$RESET".format(url_or_pip))
message.start()
try:
PipInstaller.install(url_or_pip, force_download=force_download)
message.done()
except Exception as e:
message.error()
raise SkillInstallerWarning("Error installing skill {}: make sure you have the required access rights, and that the module is available".format(url_or_pip))
| mit | Python |
ee90073152bb3e147fa619d91d3cb74ee7bc47e9 | Add dequeue method | derekmpham/interview-prep,derekmpham/interview-prep | array/create-queue.py | array/create-queue.py | # Create queue using a list
class Queue:
def __init__(self, head=None):
self.storage = [head]
def enqueue(self, new_element):
self.storage.append(new_element)
def dequeue(self):
return self.storage.pop(0)
| # Create queue using a list
class Queue:
def __init__(self, head=None):
self.storage = [head]
def enqueue(self, new_element):
self.storage.append(new_element)
| mit | Python |
6f94526a2cbc5acbc60e869c2cce88707df6ada5 | update after StructIntf implementation | Nic30/hwtLib,Nic30/hwtLib | hwtLib/samples/iLvl/axi/simpleAxiRegs.py | hwtLib/samples/iLvl/axi/simpleAxiRegs.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from hwt.code import If, connect
from hwt.hdlObjects.typeShortcuts import vecT
from hwt.interfaces.utils import addClkRstn, propagateClkRstn
from hwt.synthesizer.interfaceLevel.unit import Unit
from hwt.synthesizer.param import Param
from hwtLib.amba.axiLite import AxiLite
from hwtLib.amba.axiLite_comp.endpoint import AxiLiteEndpoint
from hwt.hdlObjects.types.struct import HStruct
from hwtLib.types.ctypes import uint32_t
class SimpleAxiRegs(Unit):
"""
Axi litle mapped registers example,
0x0 - reg0
0x4 - reg1
"""
def _config(self):
self.ADDR_WIDTH = Param(8)
self.DATA_WIDTH = Param(32)
def _declr(self):
addClkRstn(self)
with self._paramsShared():
self.axi = AxiLite()
with self._paramsShared():
# this structure is configuration of interfaces
# fields can also be arrays and metaclass can be used
# to specify field interface and R/W access to field
self.conv = AxiLiteEndpoint(
HStruct((uint32_t, "reg0"),
(uint32_t, "reg1")
))
def _impl(self):
propagateClkRstn(self)
connect(self.axi, self.conv.bus, fit=True)
reg0 = self._reg("reg0", vecT(32), defVal=0)
reg1 = self._reg("reg1", vecT(32), defVal=1)
conv = self.conv
def connectRegToConveror(convPort, reg):
If(convPort.dout.vld,
reg ** convPort.dout.data
)
convPort.din ** reg
connectRegToConveror(conv.decoded.reg0, reg0)
connectRegToConveror(conv.decoded.reg1, reg1)
if __name__ == "__main__":
from hwt.synthesizer.shortcuts import toRtl
u = SimpleAxiRegs()
print(toRtl(u))
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from hwt.code import If, connect
from hwt.hdlObjects.typeShortcuts import vecT
from hwt.interfaces.utils import addClkRstn, propagateClkRstn
from hwt.synthesizer.interfaceLevel.unit import Unit
from hwt.synthesizer.param import Param
from hwtLib.amba.axiLite import AxiLite
from hwtLib.amba.axiLite_comp.endpoint import AxiLiteEndpoint
from hwt.hdlObjects.types.struct import HStruct
from hwtLib.types.ctypes import uint32_t
class SimpleAxiRegs(Unit):
"""
Axi litle mapped registers example,
0x0 - reg0
0x4 - reg1
"""
def _config(self):
self.ADDR_WIDTH = Param(8)
self.DATA_WIDTH = Param(32)
def _declr(self):
addClkRstn(self)
with self._paramsShared():
self.axi = AxiLite()
with self._paramsShared():
# this structure is configuration of interfaces
# fields can also be arrays and metaclass can be used
# to specify field interface and R/W access to field
self.conv = AxiLiteEndpoint(
HStruct((uint32_t, "reg0"),
(uint32_t, "reg1")
))
def _impl(self):
propagateClkRstn(self)
connect(self.axi, self.conv.bus, fit=True)
reg0 = self._reg("reg0", vecT(32), defVal=0)
reg1 = self._reg("reg1", vecT(32), defVal=1)
conv = self.conv
def connectRegToConveror(convPort, reg):
If(convPort.dout.vld,
reg ** convPort.dout.data
)
convPort.din ** reg
connectRegToConveror(conv.reg0, reg0)
connectRegToConveror(conv.reg1, reg1)
if __name__ == "__main__":
from hwt.synthesizer.shortcuts import toRtl
u = SimpleAxiRegs()
print(toRtl(u))
| mit | Python |
c19a38b0c09172fe74ed92646723a2c36583bb87 | reduce processes. | nbrady-techempower/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sxend/FrameworkBenchmarks,valyala/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,testn/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,methane/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,actframework/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,valyala/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,herloct/FrameworkBenchmarks,doom369/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,methane/FrameworkBenchmarks,jamming/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,grob/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,joshk/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,khellang/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zapov/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sxend/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zloster/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sgml/FrameworkBenchmarks,testn/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,methane/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jamming/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,doom369/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,testn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Verber/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,khellang/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sgml/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jamming/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,methane/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jamming/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zloster/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,actframework/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,herloct/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,actframework/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zapov/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jamming/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,grob/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jamming/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,grob/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,grob/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sxend/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,doom369/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,joshk/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,herloct/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,denkab/FrameworkBenchmarks,joshk/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zapov/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,testn/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zapov/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,denkab/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zloster/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Verber/FrameworkBenchmarks,methane/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sgml/FrameworkBenchmarks,herloct/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,valyala/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,grob/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,grob/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,testn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,methane/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,grob/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,grob/FrameworkBenchmarks,actframework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,actframework/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,valyala/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,denkab/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,herloct/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sxend/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,testn/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,methane/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,doom369/FrameworkBenchmarks,testn/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,actframework/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,torhve/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,khellang/FrameworkBenchmarks,testn/FrameworkBenchmarks,grob/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,actframework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,denkab/FrameworkBenchmarks,sxend/FrameworkBenchmarks,denkab/FrameworkBenchmarks,valyala/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zapov/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,methane/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Verber/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zloster/FrameworkBenchmarks,valyala/FrameworkBenchmarks,methane/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,khellang/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,grob/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zapov/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,denkab/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,valyala/FrameworkBenchmarks,actframework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,herloct/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sgml/FrameworkBenchmarks,denkab/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,khellang/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sgml/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,doom369/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,methane/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sgml/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sgml/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,methane/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,khellang/FrameworkBenchmarks,testn/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Verber/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,khellang/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,joshk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,torhve/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zloster/FrameworkBenchmarks,joshk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zapov/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sgml/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,herloct/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Verber/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,torhve/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,valyala/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,denkab/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,doom369/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,herloct/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zapov/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jamming/FrameworkBenchmarks,actframework/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jamming/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,methane/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jamming/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zapov/FrameworkBenchmarks,methane/FrameworkBenchmarks,khellang/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jamming/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sxend/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Verber/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,testn/FrameworkBenchmarks,jamming/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,grob/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,denkab/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,doom369/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,testn/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,actframework/FrameworkBenchmarks,testn/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,denkab/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,herloct/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,joshk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,testn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,methane/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,grob/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,herloct/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,denkab/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,grob/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,grob/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zloster/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,doom369/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,denkab/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,valyala/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,denkab/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks | wsgi/setup.py | wsgi/setup.py | import subprocess
import setup_util
import multiprocessing
import os
bin_dir = os.path.expanduser('~/FrameworkBenchmarks/installs/py2/bin')
NCPU = multiprocessing.cpu_count()
CIRCUS_INI = """\
[watcher:app]
cmd = {BIN}/chaussette --fd=$(circus.sockets.app) --backend=meinheld hello.app
use_sockets = True
numprocesses = {PROCS}
[socket:app]
host = 0.0.0.0
port = 8080
"""
proc = None
def start(args, logfile, errfile):
global proc
subprocess.check_call(bin_dir + "/pip install -r requirements.txt",
cwd="wsgi", stderr=errfile, stdout=logfile, shell=True)
with open("wsgi/circus.ini", "w") as f:
f.write(CIRCUS_INI.format(BIN=bin_dir, PROCS=NCPU*2))
proc = subprocess.Popen([bin_dir + "/circusd", "circus.ini"],
cwd="wsgi", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
global proc
if proc is None:
return 0
proc.terminate()
proc.wait()
proc = None
return 0
| import subprocess
import setup_util
import multiprocessing
import os
bin_dir = os.path.expanduser('~/FrameworkBenchmarks/installs/py2/bin')
NCPU = multiprocessing.cpu_count()
CIRCUS_INI = """\
[watcher:app]
cmd = {BIN}/chaussette --fd=$(circus.sockets.app) --backend=meinheld hello.app
use_sockets = True
numprocesses = {PROCS}
[socket:app]
host = 0.0.0.0
port = 8080
"""
proc = None
def start(args, logfile, errfile):
global proc
subprocess.check_call(bin_dir + "/pip install -r requirements.txt",
cwd="wsgi", stderr=errfile, stdout=logfile, shell=True)
with open("wsgi/circus.ini", "w") as f:
f.write(CIRCUS_INI.format(BIN=bin_dir, PROCS=NCPU*3))
proc = subprocess.Popen([bin_dir + "/circusd", "circus.ini"],
cwd="wsgi", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
global proc
if proc is None:
return 0
proc.terminate()
proc.wait()
proc = None
return 0
| bsd-3-clause | Python |
327fa87b7b6f2ac204312918c27d5437080e600e | make pretty | RoboJackets/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software | soccer/gameplay/positions/position.py | soccer/gameplay/positions/position.py | # See PEP 0563 : Enabling the future behavior in Python 3.7
# In this current version, it is not allowed to have a return
# type of the current class, which has not fully been defined yet
# pylint: disable=no-name-in-module
from __future__ import annotations # type: ignore
import robocup
import single_robot_composite_behavior
import enum
import typing
## Parent class of any position
# Deals with relative positions as well
# as the generic pass options
class Position(single_robot_composite_behavior.SingleRobotCompositeBehavior):
class Type(enum.Enum):
Striker = 0
Midfielder = 1
Defender = 2
Goalie = 3
def __init__(self, position_class: enum.Enum, name: str) -> None:
super().__init__(continuous=True)
self._position_class = position_class
self._str_name = name
self._relative_pos = None
self._pass_options = [] # type: typing.List[Position]
# Actual location the controller wants in field XY terms
# None follows the same rules as the `relative_pos`
self.target_pos = None
## What type of position this is
# (Striker/Midfielder/Defender/Goalie etc)
@property
def position_class(self) -> enum.Enum:
return self._position_class
## String name of the position (Left/Right/Center etc)
@property
def str_name(self) -> str:
return self._str_name
## Where the controller wants this position to be
# Set by controller in init and should not be touched
# None when the position is goalie and a relative position
# doesn't make sense
@property
def relative_pos(self) -> robocup.Point:
return self._relative_pos
@relative_pos.setter
def relative_pos(self, pos):
self._relative_pos = pos
## List of other positions that we can pass to
# These are the "triangles" that are naturally formed on the field
# In general, those in this list are the only ones who need to get
# open when I have the ball
# This is sorted from most "forward" option to furthest "back" option
# from left to right in formation
@property
def pass_options(self) -> typing.List[Position]:
return self._pass_options
@pass_options.setter
def pass_options(self, options: typing.List[Position]):
self._pass_options = options
def __str__(self):
desc = super().__str__()
desc += "\n " + self._str_name
return desc
| # See PEP 0563 : Enabling the future behavior in Python 3.7
# In this current version, it is not allowed to have a return
# type of the current class, which has not fully been defined yet
# pylint: disable=no-name-in-module
from __future__ import annotations # type: ignore
import robocup
import single_robot_composite_behavior
import enum
import typing
## Parent class of any position
# Deals with relative positions as well
# as the generic pass options
class Position(single_robot_composite_behavior.SingleRobotCompositeBehavior):
class Type(enum.Enum):
Striker = 0
Midfielder = 1
Defender = 2
Goalie = 3
def __init__(self, position_class: enum.Enum, name: str) -> None:
super().__init__(continuous=True)
self._position_class = position_class
self._str_name = name
self._relative_pos = None
self._pass_options = [] # type: typing.List[Position]
# Actual location the controller wants in field XY terms
# None follows the same rules as the `relative_pos`
self.target_pos = None
## What type of position this is
# (Striker/Midfielder/Defender/Goalie etc)
@property
def position_class(self) -> enum.Enum:
return self._position_class
## String name of the position (Left/Right/Center etc)
@property
def str_name(self) -> str:
return self._str_name
## Where the controller wants this position to be
# Set by controller in init and should not be touched
# None when the position is goalie and a relative position
# doesn't make sense
@property
def relative_pos(self) -> robocup.Point:
return self._relative_pos
@relative_pos.setter
def relative_pos(self, pos):
self._relative_pos = pos
## List of other positions that we can pass to
# These are the "triangles" that are naturally formed on the field
# In general, those in this list are the only ones who need to get
# open when I have the ball
# This is sorted from most "forward" option to furthest "back" option
# from left to right in formation
@property
def pass_options(self) -> typing.List[Position]:
return self._pass_options
@pass_options.setter
def pass_options(self, options: typing.List[Position]):
self._pass_options = options
def __str__(self):
desc = super().__str__()
desc += "\n " + self._str_name
return desc
| apache-2.0 | Python |
a968156d8a96cd4685c3973657b368d8f310ea74 | Add docstring | ironman5366/W.I.L.L,ironman5366/W.I.L.L | basicclient.py | basicclient.py | #An EXTREMELY basic client for W.I.L.L., mainly used for light debugging
import easygui
import urllib
import sys
def main(command):
'''A basic debugging oriented client using easygui'''
command=urllib.urlencode({"command":command})
answer=urllib.urlopen("http://127.0.0.1:5000?context=command&%s"%command).read()
easygui.msgbox(answer)
while True:
command=easygui.enterbox(title="W.I.L.L.", msg="Please enter a command")
if command=="exit":
sys.exit()
elif command==None:
sys.exit()
else:
main(command)
| #An EXTREMELY basic client for W.I.L.L., mainly used for light debugging
import easygui
import urllib
import sys
def main(command):
command=urllib.urlencode({"command":command})
answer=urllib.urlopen("http://127.0.0.1:5000?context=command&%s"%command).read()
easygui.msgbox(answer)
while True:
command=easygui.enterbox(title="W.I.L.L.", msg="Please enter a command")
if command=="exit":
sys.exit()
elif command==None:
sys.exit()
else:
main(command)
| mit | Python |
b286e03f96cce8518dd60b74ff8dac6d7b7c5a97 | Support users with no name | LABHR/octohatrack,glasnt/octohat | octohatrack/helpers.py | octohatrack/helpers.py | #!/usr/bin/env python
import sys
def _sort_by_name(contributor):
if contributor.get('name'):
return contributor['name'].lower()
return contributor['user_name']
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=_sort_by_name):
if user.get('name'):
key = user['name']
else:
key = user['user_name']
if key not in seen:
seen.append(key)
if key != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
| #!/usr/bin/env python
import sys
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=lambda k: k['name'].lower()):
if user["name"] not in seen:
seen.append(user["name"])
if user["name"] != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
| bsd-3-clause | Python |
e59118b9f72d060b6386301a984989ee7bb195eb | Remove an unused import | drj11/pypng,drj11/pypng | code/mkiccp.py | code/mkiccp.py | #!/usr/bin/env python
# $URL$
# $Rev$
# Make ICC Profile
# References
#
# [ICC 2001] ICC Specification ICC.1:2001-04 (Profile version 2.4.0)
# [ICC 2004] ICC Specification ICC.1:2004-10 (Profile version 4.2.0.0)
# Local module.
import iccp
def black(m):
"""Return a function that maps all values from [0.0,m] to 0, and maps
the range [m,1.0] into [0.0, 1.0] linearly.
"""
m = float(m)
def f(x):
if x <= m:
return 0.0
return (x-m)/(1.0-m)
return f
# For monochrome input the required tags are (See [ICC 2001] 6.3.1.1):
# profileDescription [ICC 2001] 6.4.32
# grayTRC [ICC 2001] 6.4.19
# mediaWhitePoint [ICC 2001] 6.4.25
# copyright [ICC 2001] 6.4.13
def agreyprofile(out):
it = iccp.Profile().greyInput()
it.addTags(kTRC=black(0.07))
it.write(out)
def main():
import sys
agreyprofile(sys.stdout)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# $URL$
# $Rev$
# Make ICC Profile
# References
#
# [ICC 2001] ICC Specification ICC.1:2001-04 (Profile version 2.4.0)
# [ICC 2004] ICC Specification ICC.1:2004-10 (Profile version 4.2.0.0)
import struct
# Local module.
import iccp
def black(m):
"""Return a function that maps all values from [0.0,m] to 0, and maps
the range [m,1.0] into [0.0, 1.0] linearly.
"""
m = float(m)
def f(x):
if x <= m:
return 0.0
return (x-m)/(1.0-m)
return f
# For monochrome input the required tags are (See [ICC 2001] 6.3.1.1):
# profileDescription [ICC 2001] 6.4.32
# grayTRC [ICC 2001] 6.4.19
# mediaWhitePoint [ICC 2001] 6.4.25
# copyright [ICC 2001] 6.4.13
def agreyprofile(out):
it = iccp.Profile().greyInput()
it.addTags(kTRC=black(0.07))
it.write(out)
def main():
import sys
agreyprofile(sys.stdout)
if __name__ == '__main__':
main()
| mit | Python |
a6bdf63816243b00c9f463b174886538b3705b91 | Update addons version | OCA/l10n-switzerland,OCA/l10n-switzerland | l10n_ch_base_bank/__openerp__.py | l10n_ch_base_bank/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Switzerland - Bank type',
'summary': 'Types and number validation for swiss electronic pmnt. DTA, ESR',
'description': """
Swiss bank type and fields
==========================
This addons will add different bank types required by specific swiss electronic
payment like DTA and ESR. It allows to manage both Post and Bank systems.
It'll perform some validation when entring bank account number or ESR number
in invoice and add some Swiss specific fields on bank.
This module is required if you want to use electornic payment in Switzerland.
""",
'version': '1.2',
'author': 'Camptocamp',
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'depends': ['account'],
'data': ['bank_view.xml', 'bank_data.xml'],
'demo': [],
'test': [],
'auto_install': False,
'installable': True,
'images': []
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Switzerland - Bank type',
'summary': 'Types and number validation for swiss electronic pmnt. DTA, ESR',
'description': """
Swiss bank type and fields
==========================
This addons will add different bank types required by specific swiss electronic
payment like DTA and ESR. It allows to manage both Post and Bank systems.
It'll perform some validation when entring bank account number or ESR number
in invoice and add some Swiss specific fields on bank.
This module is required if you want to use electornic payment in Switzerland.
""",
'version': '1.1',
'author': 'Camptocamp',
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'depends': ['account'],
'data': ['bank_view.xml', 'bank_data.xml'],
'demo': [],
'test': [],
'auto_install': False,
'installable': True,
'images': []
}
| agpl-3.0 | Python |
1c883c1931dbadc2b786db1d11968908f9b3201f | Fix broken SQL | fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver | src/ctf_gameserver/controller/database.py | src/ctf_gameserver/controller/database.py | from ctf_gameserver.lib.database import transaction_cursor
from ctf_gameserver.lib.date_time import ensure_utc_aware
from ctf_gameserver.lib.exceptions import DBDataError
def get_control_info(db_conn, prohibit_changes=False):
"""
Returns a dictionary containing relevant information about the competion, as stored in the database.
"""
with transaction_cursor(db_conn, prohibit_changes) as cursor:
cursor.execute('SELECT start, "end", tick_duration, current_tick FROM scoring_gamecontrol')
result = cursor.fetchone()
if result is None:
raise DBDataError('Game control information has not been configured')
start, end, duration, tick = result
return {
'start': ensure_utc_aware(start),
'end': ensure_utc_aware(end),
'tick_duration': duration,
'current_tick': tick
}
def increase_tick(db_conn, prohibit_changes=False):
with transaction_cursor(db_conn, prohibit_changes) as cursor:
cursor.execute('UPDATE scoring_gamecontrol SET current_tick = current_tick + 1')
# Create flags for every service and team in the new tick
cursor.execute('INSERT INTO scoring_flag (service_id, protecting_team_id, tick)'
' SELECT service.id, team.user_id, control.current_tick'
' FROM scoring_service service, auth_user, registration_team team,'
' scoring_gamecontrol control'
' WHERE auth_user.id = team.user_id AND auth_user.is_active')
def update_scoring(db_conn):
with transaction_cursor(db_conn) as cursor:
cursor.execute('UPDATE scoring_flag as outerflag'
' SET bonus = 1 / ('
' SELECT greatest(1, count(*))'
' FROM scoring_flag'
' LEFT OUTER JOIN scoring_capture ON scoring_capture.flag_id = scoring_flag.id'
' WHERE scoring_capture.flag_id = outerflag.id)'
' FROM scoring_gamecontrol'
' WHERE outerflag.tick + scoring_gamecontrol.valid_ticks < '
' scoring_gamecontrol.current_tick AND outerflag.bonus IS NULL')
cursor.execute('REFRESH MATERIALIZED VIEW "scoring_scoreboard"')
| from ctf_gameserver.lib.database import transaction_cursor
from ctf_gameserver.lib.date_time import ensure_utc_aware
from ctf_gameserver.lib.exceptions import DBDataError
def get_control_info(db_conn, prohibit_changes=False):
"""
Returns a dictionary containing relevant information about the competion, as stored in the database.
"""
with transaction_cursor(db_conn, prohibit_changes) as cursor:
cursor.execute('SELECT start, "end", tick_duration, current_tick FROM scoring_gamecontrol')
result = cursor.fetchone()
if result is None:
raise DBDataError('Game control information has not been configured')
start, end, duration, tick = result
return {
'start': ensure_utc_aware(start),
'end': ensure_utc_aware(end),
'tick_duration': duration,
'current_tick': tick
}
def increase_tick(db_conn, prohibit_changes=False):
with transaction_cursor(db_conn, prohibit_changes) as cursor:
cursor.execute('UPDATE scoring_gamecontrol SET current_tick = current_tick + 1')
# Create flags for every service and team in the new tick
cursor.execute('INSERT INTO scoring_flag (service_id, protecting_team_id, tick)'
' SELECT service.id, user_id, current_tick'
' FROM scoring_service service, auth_user user, registration_team,'
' scoring_gamecontrol'
' WHERE user.id = user_id AND user.is_active;')
def update_scoring(db_conn):
with transaction_cursor(db_conn) as cursor:
cursor.execute('UPDATE scoring_flag as outerflag'
' SET bonus = 1 / ('
' SELECT greatest(1, count(*))'
' FROM scoring_flag'
' LEFT OUTER JOIN scoring_capture ON scoring_capture.flag_id = scoring_flag.id'
' WHERE scoring_capture.flag_id = outerflag.id)'
' FROM scoring_gamecontrol'
' WHERE outerflag.tick + scoring_gamecontrol.valid_ticks < '
' scoring_gamecontrol.current_tick AND outerflag.bonus IS NULL')
cursor.execute('REFRESH MATERIALIZED VIEW "scoring_scoreboard"')
| isc | Python |
ac2f37f88475998197d619a584824e9c2c30eea1 | Update __init__.py | stiphyMT/plantcv,stiphyMT/plantcv,danforthcenter/plantcv,stiphyMT/plantcv,danforthcenter/plantcv,danforthcenter/plantcv | plantcv/plantcv/transform/__init__.py | plantcv/plantcv/transform/__init__.py | from plantcv.plantcv.transform.color_correction import get_color_matrix
from plantcv.plantcv.transform.color_correction import get_matrix_m
from plantcv.plantcv.transform.color_correction import calc_transformation_matrix
from plantcv.plantcv.transform.color_correction import apply_transformation_matrix
from plantcv.plantcv.transform.color_correction import save_matrix
from plantcv.plantcv.transform.color_correction import load_matrix
from plantcv.plantcv.transform.color_correction import correct_color
from plantcv.plantcv.transform.color_correction import create_color_card_mask
from plantcv.plantcv.transform.color_correction import quick_color_check
from plantcv.plantcv.transform.color_correction import find_color_card
from plantcv.plantcv.transform.rescale import rescale
from plantcv.plantcv.transform.rotate import rotate
from plantcv.plantcv.transform.nonuniform_illumination import nonuniform_illumination
from plantcv.plantcv.transform.resize import resize, resize_factor
from plantcv.plantcv.transform.warp import warp, warp_align
from plantcv.plantcv.transform.gamma_correct import gamma_correct
__all__ = ["get_color_matrix", "get_matrix_m", "calc_transformation_matrix", "apply_transformation_matrix",
"save_matrix", "load_matrix", "correct_color", "create_color_card_mask", "quick_color_check",
"find_color_card", "rescale", "nonuniform_illumination", "resize", "resize_factor",
"warp", "rotate", "warp", "warp_align", "gamma_correct"]
| from plantcv.plantcv.transform.color_correction import get_color_matrix
from plantcv.plantcv.transform.color_correction import get_matrix_m
from plantcv.plantcv.transform.color_correction import calc_transformation_matrix
from plantcv.plantcv.transform.color_correction import apply_transformation_matrix
from plantcv.plantcv.transform.color_correction import save_matrix
from plantcv.plantcv.transform.color_correction import load_matrix
from plantcv.plantcv.transform.color_correction import correct_color
from plantcv.plantcv.transform.color_correction import create_color_card_mask
from plantcv.plantcv.transform.color_correction import quick_color_check
from plantcv.plantcv.transform.color_correction import find_color_card
from plantcv.plantcv.transform.rescale import rescale
from plantcv.plantcv.transform.rotate import rotate
from plantcv.plantcv.transform.nonuniform_illumination import nonuniform_illumination
from plantcv.plantcv.transform.resize import resize, resize_factor
from plantcv.plantcv.transform.warp import warp, warp_align
__all__ = ["get_color_matrix", "get_matrix_m", "calc_transformation_matrix", "apply_transformation_matrix",
"save_matrix", "load_matrix", "correct_color", "create_color_card_mask", "quick_color_check",
"find_color_card", "rescale", "nonuniform_illumination", "resize", "resize_factor",
"warp", "rotate", "warp", "warp_align"]
| mit | Python |
8e747655f08fb56bcdd5b272c2b3a659afe228a9 | Add cors headers to the signing servlet | matrix-org/sydent,matrix-org/sydent,matrix-org/sydent | sydent/http/servlets/blindlysignstuffservlet.py | sydent/http/servlets/blindlysignstuffservlet.py | # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import signedjson.key
import signedjson.sign
from sydent.db.invite_tokens import JoinTokenStore
from sydent.http.servlets import require_args, jsonwrap, send_cors
class BlindlySignStuffServlet(Resource):
isLeaf = True
def __init__(self, syd):
self.server_name = syd.server_name
self.tokenStore = JoinTokenStore(syd)
def render_POST(self, request):
send_cors(request)
err = require_args(request, ("private_key", "token", "mxid"))
if err:
return json.dumps(err)
private_key_base64 = request.args['private_key'][0]
token = request.args['token'][0]
mxid = request.args['mxid'][0]
sender = self.tokenStore.getSenderForToken(token)
if sender is None:
request.setResponseCode(404)
return json.dumps({
"errcode": "M_UNRECOGNIZED",
"error": "Didn't recognized token",
})
to_sign = {
"mxid": mxid,
"sender": sender,
"token": token,
}
try:
private_key = signedjson.key.decode_signing_key_base64(
"ed25519",
"0",
private_key_base64
)
signed = signedjson.sign.sign_json(
to_sign,
self.server_name,
private_key
)
except:
return json.dumps({
"errcode": "M_UNKNOWN",
})
return json.dumps(signed)
@jsonwrap
def render_OPTIONS(self, request):
send_cors(request)
request.setResponseCode(200)
return {}
| # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import signedjson.key
import signedjson.sign
from sydent.db.invite_tokens import JoinTokenStore
from sydent.http.servlets import require_args
class BlindlySignStuffServlet(Resource):
isLeaf = True
def __init__(self, syd):
self.server_name = syd.server_name
self.tokenStore = JoinTokenStore(syd)
def render_POST(self, request):
err = require_args(request, ("private_key", "token", "mxid"))
if err:
return json.dumps(err)
private_key_base64 = request.args['private_key'][0]
token = request.args['token'][0]
mxid = request.args['mxid'][0]
sender = self.tokenStore.getSenderForToken(token)
if sender is None:
request.setResponseCode(404)
return json.dumps({
"errcode": "M_UNRECOGNIZED",
"error": "Didn't recognized token",
})
to_sign = {
"mxid": mxid,
"sender": sender,
"token": token,
}
try:
private_key = signedjson.key.decode_signing_key_base64(
"ed25519",
"0",
private_key_base64
)
signed = signedjson.sign.sign_json(
to_sign,
self.server_name,
private_key
)
except:
return json.dumps({
"errcode": "M_UNKNOWN",
})
return json.dumps(signed)
| apache-2.0 | Python |
d6f0bcaf272592dae046aa2d4fbf0e304da73d8e | fix bug | cloudorz/apple | launch.py | launch.py | # coding: utf-8
'''
'''
import os.path
import tornado.web
import tornado.httpserver
import tornado.database
import tornado.options
import tornado.ioloop
from tornado.options import define, options
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from apps.loud import LoudHandler, LoudSearchHandler, LoudManageHandler
from apps.user import UserHandler, AuthHandler, PasswordHandler, UploadHandler, SendCodeHandler
from utils.coredb import sql_db
# server
define('port', default=8888, help="run on the given port", type=int)
# database
define('db_uri', default="mysql://root:123@localhost/apple?charset=utf8", type=str, help="connect to mysql")
# avatar dir path
define('path', default="/data/web/static/i/", type=str, help="recommend default one")
# app key
define("app_name", default="apple", help="app name")
define("app_key", default="12345678", help="app key")
define("app_secret", default="jkafldjaklfjda978-=-^**&", help="app secret")
# main logic
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r'^/l/(?P<lid>\d+|)$', LoudHandler),
(r'^/l/list$', LoudSearchHandler),
(r'^/l/dels$', LoudManageHandler),
(r'^/u/(?P<phn>\d{11}|)$', UserHandler),
(r"^/u/(?P<phn>\d{11}|)/passwd$", PasswordHandler),
(r"^/auth$", AuthHandler),
(r"^/code$", SendCodeHandler),
(r"^/upload$", UploadHandler),
]
settings = dict(
static_path=os.path.join(os.path.dirname(__file__), 'static'),
xsrf_cookies=False,
cookie_secret='c8f48f9777f411e09fcd109add59054a',
debug=True,
)
super(Application, self).__init__(handlers, **settings)
# sqlalchemy session 'db'
self.db_session = (scoped_session(sessionmaker(autoflush=True, bind=create_engine(options.db_uri))))()
def main():
tornado.options.parse_command_line()
# ssl_options TODO
app = Application()
# init the modual
sql_db.init_app(app)
# server
http_server = tornado.httpserver.HTTPServer(app, xheaders=True)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == '__main__':
main()
| # coding: utf-8
'''
'''
import os.path
import tornado.web
import tornado.httpserver
import tornado.database
import tornado.options
import tornado.ioloop
from tornado.options import define, options
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from apps.loud import LoudHandler, LoudSearchHandler, LoudManageHandler
from apps.user import UserHandler, AuthHandler, PasswordHandler, UploadHandler, SendCodeHandler
from utils.coredb import sql_db
# server
define('port', default=8888, help="run on the given port", type=int)
# database
define('db_uri', default="mysql://root:123@localhost/apple?charset=utf8", type=str, help="connect to mysql")
# avatar dir path
define('path', default="/data/web/static/i/", type=str, help="recommend default one")
# app key
define("app_name", default="apple", help="app name")
define("app_key", default="12345678", help="app key")
define("app_secret", default="jkafldjaklfjda978-=-^**&", help="app secret")
# main logic
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r'^/l/(?P<lid>\d+|)$', LoudHandler, {}, 'loud'),
(r'^/l/list$', LoudSearchHandler),
(r'^/l/dels$', LoudManageHandler),
(r'^/u/(?P<phn>\d{11}|)$', UserHandler, {}, 'user'),
(r"^/u/(?P<phn>\d{11}|)/passwd$", PasswordHandler),
(r"^/auth$", AuthHandler),
(r"^/code$", SendCodeHandler),
(r"^/upload$", UploadHandler),
]
settings = dict(
static_path=os.path.join(os.path.dirname(__file__), 'static'),
xsrf_cookies=False,
cookie_secret='c8f48f9777f411e09fcd109add59054a',
debug=True,
)
super(Application, self).__init__(handlers, **settings)
# sqlalchemy session 'db'
self.db_session = (scoped_session(sessionmaker(autoflush=True, bind=create_engine(options.db_uri))))()
def main():
tornado.options.parse_command_line()
# ssl_options TODO
app = Application()
# init the modual
sql_db.init_app(app)
# server
http_server = tornado.httpserver.HTTPServer(app, xheaders=True)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
0566f69d12c80dfb50be20da5352f14457d00819 | fix the ptoblem about the observed monomass and calculated monomass, tagDTASelect.py | wangchulab/CIMAGE,wangchulab/CIMAGE,wangchulab/CIMAGE,wangchulab/CIMAGE,wangchulab/CIMAGE,wangchulab/CIMAGE,wangchulab/CIMAGE,wangchulab/CIMAGE,wangchulab/CIMAGE | python/tagDTASelect.py | python/tagDTASelect.py | #!/usr/bin/env python
#
# tag each peptide line in DTASelect output file with its IPI name at beginning
import sys
from sys import argv
if len(argv) != 2:
print 'Usage: %s <DTASelect-filter.txt>'%argv[0]
print 'tag each peptide line in DTASelect output file with its IPI name at beginning'
sys.exit(-1)
## list to save ipi and peptide lines in case multiple protein with the same peptides
ipi_lines = []
pep_lines = []
tag=''
last_line_is_peptide = False
## control header and tail printing
print_on = True
for line in open(argv[1]):
line = line.rstrip()
words = line.split()
if len(words) <= 3:
print
continue
# none peptide entry line
if words[3].find('%') != -1 or words[0]=='Proteins':
if words[0]=='Proteins':
print_on = True
else:
print_on = False
if last_line_is_peptide:
# print out saved lines
for ipi in ipi_lines:
print 'cimageipi-'+ipi
# find tag
ipi_words = ipi.split()
tmp_word = ipi_words[0].replace("IPI:", "").replace("sp|","").replace("tr|","")
i = tmp_word.find("|")
if i>0:
tmp2 = tmp_word.split("|")
tag = tmp2[0]
else:
tag = ipi_words[0]
# print out tagged lines
for pep in pep_lines:
linesplit = pep.split("\t")
if float(linesplit[6]) < float(linesplit[5]) + 0.5 :
print float(linesplit[6])
print float(linesplit[5])
firsttab=pep.index("\t")+1
print 'cimagepep-'+tag, pep[firsttab:]
# emtpy lists
ipi_lines = []
pep_lines = []
if not print_on:
ipi_lines.append(line)
last_line_is_peptide = False
else:
if not print_on:
pep_lines.append(line)
last_line_is_peptide = True
if print_on:
print line
| #!/usr/bin/env python
#
# tag each peptide line in DTASelect output file with its IPI name at beginning
import sys
from sys import argv
if len(argv) != 2:
print 'Usage: %s <DTASelect-filter.txt>'%argv[0]
print 'tag each peptide line in DTASelect output file with its IPI name at beginning'
sys.exit(-1)
## list to save ipi and peptide lines in case multiple protein with the same peptides
ipi_lines = []
pep_lines = []
tag=''
last_line_is_peptide = False
## control header and tail printing
print_on = True
for line in open(argv[1]):
line = line.rstrip()
words = line.split()
if len(words) <= 3:
print
continue
# none peptide entry line
if words[3].find('%') != -1 or words[0]=='Proteins':
if words[0]=='Proteins':
print_on = True
else:
print_on = False
if last_line_is_peptide:
# print out saved lines
for ipi in ipi_lines:
print 'cimageipi-'+ipi
# find tag
ipi_words = ipi.split()
tmp_word = ipi_words[0].replace("IPI:", "").replace("sp|","").replace("tr|","")
i = tmp_word.find("|")
if i>0:
tmp2 = tmp_word.split("|")
tag = tmp2[0]
else:
tag = ipi_words[0]
# print out tagged lines
for pep in pep_lines:
linesplit = pep.split("\t")
if linesplit[6] < linesplit[5]:
firsttab=pep.index("\t")+1
print 'cimagepep-'+tag, pep[firsttab:]
# emtpy lists
ipi_lines = []
pep_lines = []
if not print_on:
ipi_lines.append(line)
last_line_is_peptide = False
else:
if not print_on:
pep_lines.append(line)
last_line_is_peptide = True
if print_on:
print line
| mit | Python |
6dc5d39f5fb075a5a810e18506a714d89a57a016 | Change the ipython printing test to use init_printing(). | yashsharan/sympy,debugger22/sympy,dqnykamp/sympy,MechCoder/sympy,lidavidm/sympy,toolforger/sympy,Sumith1896/sympy,skidzo/sympy,dqnykamp/sympy,sahmed95/sympy,kaushik94/sympy,hrashk/sympy,vipulroxx/sympy,farhaanbukhsh/sympy,Davidjohnwilson/sympy,madan96/sympy,shikil/sympy,MridulS/sympy,rahuldan/sympy,wanglongqi/sympy,ahhda/sympy,rahuldan/sympy,beni55/sympy,rahuldan/sympy,iamutkarshtiwari/sympy,sampadsaha5/sympy,atsao72/sympy,farhaanbukhsh/sympy,sahilshekhawat/sympy,Curious72/sympy,MridulS/sympy,sahilshekhawat/sympy,saurabhjn76/sympy,oliverlee/sympy,atreyv/sympy,pandeyadarsh/sympy,pandeyadarsh/sympy,Sumith1896/sympy,yashsharan/sympy,ahhda/sympy,kaushik94/sympy,mafiya69/sympy,hrashk/sympy,Arafatk/sympy,ahhda/sympy,mafiya69/sympy,kaichogami/sympy,liangjiaxing/sympy,grevutiu-gabriel/sympy,hargup/sympy,wyom/sympy,skidzo/sympy,aktech/sympy,postvakje/sympy,mafiya69/sympy,maniteja123/sympy,postvakje/sympy,cswiercz/sympy,Davidjohnwilson/sympy,jbbskinny/sympy,emon10005/sympy,lindsayad/sympy,aktech/sympy,shikil/sympy,MechCoder/sympy,kevalds51/sympy,kumarkrishna/sympy,souravsingh/sympy,pandeyadarsh/sympy,shipci/sympy,ga7g08/sympy,AunShiLord/sympy,atreyv/sympy,Titan-C/sympy,Mitchkoens/sympy,sahmed95/sympy,kumarkrishna/sympy,kevalds51/sympy,hargup/sympy,sahilshekhawat/sympy,jbbskinny/sympy,beni55/sympy,meghana1995/sympy,yukoba/sympy,vipulroxx/sympy,jaimahajan1997/sympy,Titan-C/sympy,wyom/sympy,kaichogami/sympy,toolforger/sympy,saurabhjn76/sympy,postvakje/sympy,pbrady/sympy,shipci/sympy,AkademieOlympia/sympy,VaibhavAgarwalVA/sympy,kaichogami/sympy,abhiii5459/sympy,beni55/sympy,iamutkarshtiwari/sympy,chaffra/sympy,liangjiaxing/sympy,cswiercz/sympy,kmacinnis/sympy,jbbskinny/sympy,Curious72/sympy,moble/sympy,Designist/sympy,jerli/sympy,kmacinnis/sympy,moble/sympy,skidzo/sympy,jamesblunt/sympy,jerli/sympy,dqnykamp/sympy,moble/sympy,kaushik94/sympy,abloomston/sympy,souravsingh/sympy,toolforger/sympy,vipulroxx/sympy,wanglongqi/sympy,lindsayad/sympy,wanglongqi/sympy,Vishluck/sympy,cccfran/sympy,lidavidm/sympy,yashsharan/sympy,drufat/sympy,sahmed95/sympy,maniteja123/sympy,meghana1995/sympy,madan96/sympy,ga7g08/sympy,AunShiLord/sympy,cswiercz/sympy,Mitchkoens/sympy,grevutiu-gabriel/sympy,garvitr/sympy,shipci/sympy,abloomston/sympy,chaffra/sympy,ChristinaZografou/sympy,diofant/diofant,hargup/sympy,wyom/sympy,Shaswat27/sympy,lidavidm/sympy,sunny94/temp,cccfran/sympy,pbrady/sympy,Vishluck/sympy,asm666/sympy,AkademieOlympia/sympy,hrashk/sympy,Designist/sympy,jaimahajan1997/sympy,maniteja123/sympy,bukzor/sympy,sampadsaha5/sympy,kumarkrishna/sympy,AkademieOlympia/sympy,yukoba/sympy,jerli/sympy,aktech/sympy,ChristinaZografou/sympy,garvitr/sympy,debugger22/sympy,kmacinnis/sympy,abhiii5459/sympy,AunShiLord/sympy,bukzor/sympy,abhiii5459/sympy,shikil/sympy,VaibhavAgarwalVA/sympy,atsao72/sympy,garvitr/sympy,grevutiu-gabriel/sympy,ga7g08/sympy,ChristinaZografou/sympy,Shaswat27/sympy,drufat/sympy,saurabhjn76/sympy,madan96/sympy,sampadsaha5/sympy,yukoba/sympy,skirpichev/omg,MridulS/sympy,jaimahajan1997/sympy,sunny94/temp,sunny94/temp,abloomston/sympy,Designist/sympy,Gadal/sympy,pbrady/sympy,chaffra/sympy,Arafatk/sympy,cccfran/sympy,Arafatk/sympy,atreyv/sympy,lindsayad/sympy,jamesblunt/sympy,drufat/sympy,Vishluck/sympy,asm666/sympy,emon10005/sympy,Mitchkoens/sympy,Gadal/sympy,iamutkarshtiwari/sympy,emon10005/sympy,jamesblunt/sympy,mcdaniel67/sympy,mcdaniel67/sympy,oliverlee/sympy,mcdaniel67/sympy,VaibhavAgarwalVA/sympy,kevalds51/sympy,asm666/sympy,Davidjohnwilson/sympy,Shaswat27/sympy,Titan-C/sympy,bukzor/sympy,meghana1995/sympy,atsao72/sympy,Sumith1896/sympy,Gadal/sympy,liangjiaxing/sympy,souravsingh/sympy,debugger22/sympy,Curious72/sympy,oliverlee/sympy,MechCoder/sympy,farhaanbukhsh/sympy | sympy/interactive/tests/test_ipythonprinting.py | sympy/interactive/tests/test_ipythonprinting.py | """Tests that the IPython printing module is properly loaded. """
from sympy.interactive.session import init_ipython_session
from sympy.external import import_module
# run_cell was added in IPython 0.11
ipython = import_module("IPython", min_module_version="0.11")
# disable tests if ipython is not present
if not ipython:
disabled = True
def test_ipythonprinting():
# Initialize and setup IPython session
app = init_ipython_session()
app.run_cell("ip = get_ipython()")
app.run_cell("inst = ip.instance()")
app.run_cell("format = inst.display_formatter.format")
app.run_cell("from sympy import Symbol")
# Printing without printing extension
app.run_cell("a = format(Symbol('pi'))")
app.run_cell("a2 = format(Symbol('pi')**2)")
# Deal with API change starting at IPython 1.0
if int(ipython.__version__.split(".")[0]) < 1:
assert app.user_ns['a']['text/plain'] == "pi"
assert app.user_ns['a2']['text/plain'] == "pi**2"
else:
assert app.user_ns['a'][0]['text/plain'] == "pi"
assert app.user_ns['a2'][0]['text/plain'] == "pi**2"
# Load printing extension
app.run_cell("from sympy import init_printing")
app.run_cell("init_printing()")
# Printing with printing extension
app.run_cell("a = format(Symbol('pi'))")
app.run_cell("a2 = format(Symbol('pi')**2)")
# Deal with API change starting at IPython 1.0
if int(ipython.__version__.split(".")[0]) < 1:
assert app.user_ns['a']['text/plain'] in (u'\u03c0', 'pi')
assert app.user_ns['a2']['text/plain'] in (u' 2\n\u03c0 ', ' 2\npi ')
else:
assert app.user_ns['a'][0]['text/plain'] in (u'\u03c0', 'pi')
assert app.user_ns['a2'][0]['text/plain'] in (u' 2\n\u03c0 ', ' 2\npi ')
| """Tests that the IPython printing module is properly loaded. """
from sympy.interactive.session import init_ipython_session
from sympy.external import import_module
# run_cell was added in IPython 0.11
ipython = import_module("IPython", min_module_version="0.11")
# disable tests if ipython is not present
if not ipython:
disabled = True
def test_ipythonprinting():
# Initialize and setup IPython session
app = init_ipython_session()
app.run_cell("ip = get_ipython()")
app.run_cell("inst = ip.instance()")
app.run_cell("format = inst.display_formatter.format")
app.run_cell("from sympy import Symbol")
# Printing without printing extension
app.run_cell("a = format(Symbol('pi'))")
app.run_cell("a2 = format(Symbol('pi')**2)")
# Deal with API change starting at IPython 1.0
if int(ipython.__version__.split(".")[0]) < 1:
assert app.user_ns['a']['text/plain'] == "pi"
assert app.user_ns['a2']['text/plain'] == "pi**2"
else:
assert app.user_ns['a'][0]['text/plain'] == "pi"
assert app.user_ns['a2'][0]['text/plain'] == "pi**2"
# Load printing extension
app.run_cell("%load_ext sympy.interactive.ipythonprinting")
# Printing with printing extension
app.run_cell("a = format(Symbol('pi'))")
app.run_cell("a2 = format(Symbol('pi')**2)")
# Deal with API change starting at IPython 1.0
if int(ipython.__version__.split(".")[0]) < 1:
assert app.user_ns['a']['text/plain'] in (u'\u03c0', 'pi')
assert app.user_ns['a2']['text/plain'] in (u' 2\n\u03c0 ', ' 2\npi ')
else:
assert app.user_ns['a'][0]['text/plain'] in (u'\u03c0', 'pi')
assert app.user_ns['a2'][0]['text/plain'] in (u' 2\n\u03c0 ', ' 2\npi ')
| bsd-3-clause | Python |
1ba6f53fb0503f9443f13e5affea70c244ee98d1 | update to use the algorithm class | Xia0ben/IQPlayground | launch.py | launch.py | import os.path
import pickle
from files import InvertedFile, Reader
from algorithm import SimpleScanAlgorithm
file_path = "latimes/la100590"
pickle_path = "pickles/la100590"
if os.path.isfile(pickle_path):
with open(pickle_path, "rb") as file:
inv_file = pickle.load(file)
else:
documents = Reader.read_file(file_path)
print("Number of documents read : {}".format(len(documents)))
inv_file = InvertedFile(documents)
with open(pickle_path, "wb") as file:
pickle.dump(inv_file, file)
print("Loaded Inverted File - {} terms found".format(len(inv_file.vocabulary_of_term)))
algorithm = SimpleScanAlgorithm()
while True:
query = input("Query ? ")
print("Your query is : {}".format(query))
documents = algorithm.execute(query, inv_file, 5)
if documents is not None:
print("You may be interested by the following documents:")
print("\tscore\t |\tdocument")
for doc in documents:
print("\t{:8.5f} | {}".format(doc[1], doc[0]))
else:
print("Sorry no documents may be of interest to you. :(")
| import os.path
import pickle
from files import Inverted_File, Reader
file_path = "latimes/la100590"
pickle_path = "pickles/la100590"
if os.path.isfile(pickle_path):
with open(pickle_path, "rb") as file:
inv_file = pickle.load(file)
else:
documents = Reader.read_file(file_path)
print("Number of documents read : {}".format(len(documents)))
inv_file = Inverted_File(documents)
with open(pickle_path, "wb") as file:
pickle.dump(inv_file, file)
print("Loaded Inverted File - {} terms found".format(len(inv_file.vocabulary_of_term)))
while True:
query = input("Query ? ")
print("Your query is : {}".format(query))
documents = inv_file.scan(query)
if documents is not None:
print("You may be interested by the folowing documents:")
for doc in documents:
print("\t - {}".format(doc))
else:
print("Sorry no documents may be of interest to you. :(")
| mit | Python |
9128d9032dac95128a01f60ef62b448e8e807a27 | Optimise collegehumor.py | CJ-Jackson/django-mediaembedder | mediaembedder/services/collegehumor.py | mediaembedder/services/collegehumor.py | services = []
def collegehumor(self):
id = self.match.group('collegehumor_id')
width = 640
height = 360
if self.width:
width = self.width
elif 'og:video:width' in self.data['meta']:
width = int(self.data['meta']['og:video:width'])
if self.height:
height = self.height
elif 'og:video:height' in self.data['meta']:
height = int(self.data['meta']['og:video:height'])
return self.render('collegehumor.html', {
'id': id, 'width': width, 'height': height
})
services.append({
're': '^http(s?)://(?:www\.)?collegehumor.com/video/(?P<collegehumor_id>\d+)/(?P<collegehumor_name>[a-zA-Z0-9-_]+)',
'func': collegehumor
})
| services = []
def collegehumor(self):
id = self.match.group('collegehumor_id')
if self.width:
width = self.width
else:
try:
width = int(self.data['meta']['og:video:width'])
except:
width = 640
if self.height:
height = self.height
else:
try:
height = int(self.data['meta']['og:video:height'])
except:
height = 360
return self.render('collegehumor.html', {
'id': id, 'width': width, 'height': height
})
services.append({
're': '^http(s?)://(?:www\.)?collegehumor.com/video/(?P<collegehumor_id>\d+)/(?P<collegehumor_name>[a-zA-Z0-9-_]+)',
'func': collegehumor
})
| mit | Python |
027c6c696ef8c58856806eb916c85f9fde97f758 | Update FA icons. | michaelkuty/horizon-contrib,michaelkuty/horizon-contrib,michaelkuty/horizon-contrib | horizon_contrib/tables/filters.py | horizon_contrib/tables/filters.py |
from datetime import datetime
from django.utils.safestring import SafeString
def timestamp_to_datetime(value):
return datetime.fromtimestamp(value)
def nonbreakable_spaces(value):
return SafeString(value.replace(' ', ' '))
def unit_times(value):
return SafeString('%s%s' % (value, '×'))
def join_list_with_comma(value):
return ', '.join(value)
def join_list_with_newline(value):
return SafeString('<br />'.join(value))
def join_list(value):
if isinstance(value, list):
for item in value:
if isinstance(item, dict):
return value
return join_list_with_newline(value)
def status_icon(value):
if value is True:
return SafeString('<i class=\"icon-large fa fa-check\"></i>')
return SafeString('<i class=\"icon-large fa fa-times"></i>')
def filter_m2m(datum):
"""helper for aggregation of m2m relation
"""
items = []
for d in datum.all():
items.append(str(d))
return ", ".join(items)
|
from datetime import datetime
from django.utils.safestring import SafeString
def timestamp_to_datetime(value):
return datetime.fromtimestamp(value)
def nonbreakable_spaces(value):
return SafeString(value.replace(' ', ' '))
def unit_times(value):
return SafeString('%s%s' % (value, '×'))
def join_list_with_comma(value):
return ', '.join(value)
def join_list_with_newline(value):
return SafeString('<br />'.join(value))
def join_list(value):
if isinstance(value, list):
for item in value:
if isinstance(item, dict):
return value
return join_list_with_newline(value)
def status_icon(value):
if value is True:
return SafeString('<i class=\"icon-large fa fa-ok\"></i>')
return SafeString('<i class=\"icon-large fa fa-remove"></i>')
def filter_m2m(datum):
"""helper for aggregation of m2m relation
"""
items = []
for d in datum.all():
items.append(str(d))
return ", ".join(items)
| bsd-3-clause | Python |
c64d3f5701ee9e228d53f4228327ca2392fbb305 | Update repo created event | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | polyaxon/event_manager/events/repo.py | polyaxon/event_manager/events/repo.py | from event_manager import event_actions, event_subjects
from event_manager.event import Attribute, Event
REPO_CREATED = '{}.{}'.format(event_subjects.REPO, event_actions.CREATED)
REPO_DOWNLOADED = '{}.{}'.format(event_subjects.REPO, event_actions.DOWNLOADED)
REPO_NEW_COMMIT = '{}.new_commit'.format(event_subjects.REPO)
class RepoCreatedEvent(Event):
event_type = REPO_CREATED
actor = True
attributes = (
Attribute('id'),
Attribute('project.id'),
Attribute('project.user.id'),
Attribute('external', attr_type=bool)
)
class RepoDownloadedEvent(Event):
event_type = REPO_DOWNLOADED
actor = True
attributes = (
Attribute('id'),
Attribute('project.id'),
Attribute('project.user.id'),
)
class RepoNewCommitEvent(Event):
event_type = REPO_NEW_COMMIT
actor = True
attributes = (
Attribute('id'),
Attribute('project.id'),
Attribute('project.user.id'),
)
| from event_manager import event_actions, event_subjects
from event_manager.event import Attribute, Event
REPO_CREATED = '{}.{}'.format(event_subjects.REPO, event_actions.CREATED)
REPO_DOWNLOADED = '{}.{}'.format(event_subjects.REPO, event_actions.DOWNLOADED)
REPO_NEW_COMMIT = '{}.new_commit'.format(event_subjects.REPO)
class RepoCreatedEvent(Event):
event_type = REPO_CREATED
actor = True
attributes = (
Attribute('id'),
Attribute('project.id'),
Attribute('project.user.id'),
)
class RepoDownloadedEvent(Event):
event_type = REPO_DOWNLOADED
actor = True
attributes = (
Attribute('id'),
Attribute('project.id'),
Attribute('project.user.id'),
)
class RepoNewCommitEvent(Event):
event_type = REPO_NEW_COMMIT
actor = True
attributes = (
Attribute('id'),
Attribute('project.id'),
Attribute('project.user.id'),
)
| apache-2.0 | Python |
9c47519a8327640124adf27e23c80327269ad745 | Tweak allocator | explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc | thinc/backends/_cupy_allocators.py | thinc/backends/_cupy_allocators.py | from typing import cast
from ..types import ArrayXd
from ..util import tensorflow2xp
try:
import tensorflow
except ImportError:
pass
try:
import torch
import torch.cuda
except ImportError:
pass
try:
from cupy.cuda.memory import MemoryPointer
from cupy.cuda.memory import UnownedMemory
except ImportError:
pass
def cupy_tensorflow_allocator(size_in_bytes: int):
"""Function that can be passed into cupy.cuda.set_allocator, to have cupy
allocate memory via TensorFlow. This is important when using the two libraries
together, as otherwise OOM errors can occur when there's available memory
sitting in the other library's pool.
"""
size_in_bytes = max(1024, size_in_bytes)
tensor = tensorflow.zeros((size_in_bytes // 4,), dtype=tensorflow.dtypes.float32)
# We convert to cupy via dlpack, so that we can get a memory pointer.
cupy_array = cast(ArrayXd, tensorflow2xp(tensor))
address = int(cupy_array.data)
# cupy has a neat class to help us here. Otherwise it will try to free.
memory = UnownedMemory(address, size_in_bytes, cupy_array)
# Now return a new memory pointer.
return MemoryPointer(memory, 0)
def cupy_pytorch_allocator(size_in_bytes: int):
"""Function that can be passed into cupy.cuda.set_allocator, to have cupy
allocate memory via PyTorch. This is important when using the two libraries
together, as otherwise OOM errors can occur when there's available memory
sitting in the other library's pool.
"""
# Cupy was having trouble with very small allocations?
size_in_bytes = max(1024, size_in_bytes)
# We use pytorch's underlying FloatStorage type to avoid overhead from
# creating a whole Tensor.
torch_storage = torch.cuda.FloatStorage(size_in_bytes // 4)
# cupy has a neat class to help us here. Otherwise it will try to free.
# I think this is a private API? It's not in the types.
address = torch_storage.data_ptr() # type: ignore
memory = UnownedMemory(address, size_in_bytes, torch_storage)
# Now return a new memory pointer.
return MemoryPointer(memory, 0)
| from typing import cast
from ..types import ArrayXd
from ..util import tensorflow2xp
try:
import tensorflow
except ImportError:
pass
try:
import torch
except ImportError:
pass
try:
from cupy.cuda.memory import MemoryPointer
from cupy.cuda.memory import UnownedMemory
except ImportError:
pass
def cupy_tensorflow_allocator(size_in_bytes: int):
"""Function that can be passed into cupy.cuda.set_allocator, to have cupy
allocate memory via TensorFlow. This is important when using the two libraries
together, as otherwise OOM errors can occur when there's available memory
sitting in the other library's pool.
"""
size_in_bytes = max(1024, size_in_bytes)
tensor = tensorflow.zeros((size_in_bytes // 4,), dtype=tensorflow.dtypes.float32)
# We convert to cupy via dlpack, so that we can get a memory pointer.
cupy_array = cast(ArrayXd, tensorflow2xp(tensor))
address = int(cupy_array.data)
# cupy has a neat class to help us here. Otherwise it will try to free.
memory = UnownedMemory(address, size_in_bytes, cupy_array)
# Now return a new memory pointer.
return MemoryPointer(memory, 0)
def cupy_pytorch_allocator(size_in_bytes: int):
"""Function that can be passed into cupy.cuda.set_allocator, to have cupy
allocate memory via PyTorch. This is important when using the two libraries
together, as otherwise OOM errors can occur when there's available memory
sitting in the other library's pool.
"""
# Cupy was having trouble with very small allocations?
size_in_bytes = max(1024, size_in_bytes)
# We use pytorch's underlying FloatStorage type to avoid overhead from
# creating a whole Tensor.
torch_storage = torch.FloatStorage(size_in_bytes // 4).cuda()
# cupy has a neat class to help us here. Otherwise it will try to free.
# I think this is a private API? It's not in the types.
address = torch_storage.data_ptr() # type: ignore
memory = UnownedMemory(address, size_in_bytes, torch_storage)
# Now return a new memory pointer.
return MemoryPointer(memory, 0)
| mit | Python |
7ec29c392a49c6ad4c2a7d2501d12c85c2480242 | insert data into mysql database | varnish/varnish-microservice-monitor,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/zipnish,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/varnish-microservice-monitor | log-reader/log/LogDatabase.py | log-reader/log/LogDatabase.py | from simplemysql import SimpleMysql
class LogDatabase:
def __init__(self, **keyVals):
# saving database parameters
self.dbParams = keyVals
# table information
self.tablePrefix = 'zipkin_'
self.tables = ['spans', 'annotations']
# connect to database
self.db = SimpleMysql(\
host=keyVals['host'], \
db=keyVals['db'], \
user=keyVals['user'], \
passwd=keyVals['passwd'], \
keep_alive=keyVals['keep_alive'] \
)
if 'truncate_tables' in keyVals:
self.truncateTables()
def getParams(self):
return self.dbParams
def getDB(self):
return self.conn
def insert(self, tableName, rows):
table = self.tablePrefix + tableName
if len(rows) > 0:
for row in rows:
self.db.insert(table, row);
self.db.commit()
# truncate data in tables related to our application
def truncateTables(self):
print 'Truncating Tables:'
if self.db is not None and self.db.is_open():
for tableName in self.tables:
# table prefix + table name
table = self.tablePrefix + tableName
print 'truncating table -> ' + table
# delete table, and commit changes to database
self.db.delete(table)
self.db.commit()
| from simplemysql import SimpleMysql
class LogDatabase:
def __init__(self, **keyVals):
# saving database parameters
self.dbParams = keyVals
# table information
self.tablesPrefix = 'zipkin_'
self.tables = ['spans', 'annotations']
# connect to database
self.db = SimpleMysql(\
host=keyVals['host'], \
db=keyVals['db'], \
user=keyVals['user'], \
passwd=keyVals['passwd'], \
keep_alive=keyVals['keep_alive'] \
)
if 'truncate_tables' in keyVals:
self.truncateTables()
def getParams(self):
return self.dbParams
def getDB(self):
return self.conn
def insert(self, table, rows):
print "Table: " + table
print rows
# truncate data in tables related to our application
def truncateTables(self):
print 'Truncating Tables:'
if self.db is not None and self.db.is_open():
for tableName in self.tables:
# table prefix + table name
table = self.tablesPrefix + tableName
print 'truncating table -> ' + table
# delete table, and commit changes to database
self.db.delete(table)
self.db.commit()
| bsd-2-clause | Python |
b4d350e165fe7351cdb0ce1fc03e992da89cd185 | Make the ship shut up | cnlohr/bridgesim,cnlohr/bridgesim,cnlohr/bridgesim,cnlohr/bridgesim | src/server/Ship.py | src/server/Ship.py | from Entity import Entity
from Component import *
from Missile import Missile
import sys
class Ship(Entity):
def __init__(self, config, universe):
super().__init__(config, universe)
self.__dict__.update(config)
self.energy = self.maxEnergy
# How much power Engineering is giving to each component - [0, 1] normally
self.energySupply = {}
temp = []
for i in self.components:
comp = findComponent(i['type'])(self, i)
temp.append(comp)
self.energySupply[comp] = 1
self.components = temp
def collide(self, other):
print("I got hit!")
if type(other) is Missile:
self.takeDamage(other.getDamage())
def takeDamage(self, damage):
print("I'm hit!", damage)
for i in self.components:
print(i.type, "Took damage")
damage = i.takeDamage(damage)
if damage <= 0:
break
def tick(self, duration):
# Figure out how much each component wants and is being allowed
needed = {component: component.energyNeeded() * duration * self.energySupply[component] for component in self.components}
totalNeeded = sum(needed.values())
factor = 1 if totalNeeded <= self.energy else self.energy / totalNeeded
# do this before looping so nothing grabs power allotted to something else
self.energy -= totalNeeded * factor
for i in self.components:
i.energy = factor * duration * self.energySupply[i] * needed[i]
i.tick(duration)
def tock(self):
for i in self.components:
if not i.isDead():
return
print("Boom!")
self.destroy()
| from Entity import Entity
from Component import *
from Missile import Missile
import sys
class Ship(Entity):
def __init__(self, config, universe):
super().__init__(config, universe)
self.__dict__.update(config)
self.energy = self.maxEnergy
# How much power Engineering is giving to each component - [0, 1] normally
self.energySupply = {}
temp = []
for i in self.components:
comp = findComponent(i['type'])(self, i)
temp.append(comp)
self.energySupply[comp] = 1
self.components = temp
def collide(self, other):
print("I got hit!")
if type(other) is Missile:
self.takeDamage(other.getDamage())
def takeDamage(self, damage):
print("I'm hit!", damage)
for i in self.components:
print(i.type, "Took damage")
damage = i.takeDamage(damage)
if damage <= 0:
break
def tick(self, duration):
# Figure out how much each component wants and is being allowed
needed = {component: component.energyNeeded() * duration * self.energySupply[component] for component in self.components}
totalNeeded = sum(needed.values())
factor = 1 if totalNeeded <= self.energy else self.energy / totalNeeded
# do this before looping so nothing grabs power allotted to something else
self.energy -= totalNeeded * factor
print("Consumed", totalNeeded * factor, "power this tick; now", self.energy)
for i in self.components:
i.energy = factor * duration * self.energySupply[i] * needed[i]
#print("Gave ", i.energy, " power to ", type(i))
i.tick(duration)
def tock(self):
for i in self.components:
if not i.isDead():
return
print("Boom!")
self.destroy()
| mit | Python |
90e62a41b49a978b4bea3a5af87d26ef1fb23009 | Fix namespacing when retrieving remote URIs | redmatter/combine | combine/uri.py | combine/uri.py | # Copyright (c) 2010 John Reese
# Licensed under the MIT license
import urllib2
import urlparse
from combine import Package, File
class URI:
def __init__(self, uri, package=None, format=None, target=None):
self.uri = uri
self.parse = urlparse.urlparse(uri)
self.package = package
self.format = format
self.target = target
self.handle = None
def __getitem__(self, key):
return self.parse.__getattribute__(key)
def fetch(self, target=None):
parse = self.parse
# local file
if parse.scheme == "file":
self.handle = File(parse.path, "r", format=self.format)
# package file
elif parse.scheme == "package":
if self.package is None:
raise Exception("No package specified")
filename = parse.path.lstrip("/")
self.handle = self.package.open(filename, "r", format=self.format)
# remote http resource
elif parse.scheme in ("http", "https"):
self.handle = urllib2.urlopen(self.uri)
else:
raise Exception("Unsupported URI scheme %s" % (parse.scheme))
# write directly to file if requested, and then open that
if target:
with File(target, "w") as fh:
fh.write(self.handle.read())
self.handle.close()
self.handle = File(target, "r")
return self.handle
def __enter__(self):
return self.fetch(target=self.target)
def __exit__(self, type, value, trace):
self.close()
def close(self):
if self.handle:
self.handle.close()
| # Copyright (c) 2010 John Reese
# Licensed under the MIT license
import urllib2
import urlparse
from combine import Package, File
class URI:
def __init__(self, uri, package=None, format=None, target=None):
self.uri = uri
self.parse = urlparse.urlparse(uri)
self.package = package
self.format = format
self.target = target
self.handle = None
def __getitem__(self, key):
return self.parse.__getattribute__(key)
def fetch(self, target=None):
parse = self.parse
# local file
if parse.scheme == "file":
self.handle = File(parse.path, "r", format=self.format)
# package file
elif parse.scheme == "package":
if self.package is None:
raise Exception("No package specified")
filename = parse.path.lstrip("/")
self.handle = self.package.open(filename, "r", format=self.format)
# remote http resource
elif parse.scheme in ("http", "https"):
self.handle = urllib2.urlopen(uri)
else:
raise Exception("Unsupported URI scheme %s" % (parse.scheme))
# write directly to file if requested, and then open that
if target:
with File(target, "w") as fh:
fh.write(self.handle.read())
self.handle.close()
self.handle = File(target, "r")
return self.handle
def __enter__(self):
return self.fetch(target=self.target)
def __exit__(self, type, value, trace):
self.close()
def close(self):
if self.handle:
self.handle.close()
| mit | Python |
d14454e59f6ba6b2f7f2227e1a22e67c392c944e | make pyxl work when using codecs.decode in addition to codecs.streamreader | wfxiang08/pyxl,dropbox/pyxl,pyxl4/pyxl4,lez/pyxl3 | pyxl/codec/register.py | pyxl/codec/register.py | #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import codecs, cStringIO, encodings, tokenize
import traceback
from encodings import utf_8
from pyxl.codec.tokenizer import pyxl_tokenize
def pyxl_transform(stream):
try:
output = tokenize.untokenize(pyxl_tokenize(stream.readline))
except Exception, ex:
print ex
raise
return output
def pyxl_decode(input, errors='strict'):
stream = cStringIO.StringIO(input)
return utf_8.decode(pyxl_transform(stream), errors)
class PyxlStreamReader(utf_8.StreamReader):
def __init__(self, *args, **kwargs):
codecs.StreamReader.__init__(self, *args, **kwargs)
self.stream = cStringIO.StringIO(pyxl_transform(self.stream))
def search_function(encoding):
if encoding != 'pyxl': return None
# Assume utf8 encoding
utf8=encodings.search_function('utf8')
return codecs.CodecInfo(
name = 'pyxl',
encode = utf8.encode,
decode = pyxl_decode,
incrementalencoder = utf8.incrementalencoder,
incrementaldecoder = utf8.incrementaldecoder,
streamreader = PyxlStreamReader,
streamwriter = utf8.streamwriter)
codecs.register(search_function)
| #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import codecs, cStringIO, encodings, tokenize
import traceback
from encodings import utf_8
from pyxl.codec.tokenizer import pyxl_tokenize
class PyxlStreamReader(utf_8.StreamReader):
def __init__(self, *args, **kwargs):
codecs.StreamReader.__init__(self, *args, **kwargs)
try:
data = tokenize.untokenize(pyxl_tokenize(self.stream.readline))
except Exception, ex:
print ex
raise
self.stream = cStringIO.StringIO(data)
def search_function(encoding):
if encoding != 'pyxl': return None
# Assume utf8 encoding
utf8=encodings.search_function('utf8')
return codecs.CodecInfo(
name = 'pyxl',
encode = utf8.encode,
decode = utf8.decode,
incrementalencoder = utf8.incrementalencoder,
incrementaldecoder = utf8.incrementaldecoder,
streamreader = PyxlStreamReader,
streamwriter = utf8.streamwriter)
codecs.register(search_function)
| apache-2.0 | Python |
4133f5f4c04ae75bb8fbe368d851957bceb844e3 | Update math_reverse_integer.py | ngovindaraj/Python | leetcode/math_reverse_integer.py | leetcode/math_reverse_integer.py | # @file Math Reverse Integer
# @brief Given a 32-bit signed integer, reverse digits of an integer.
# https://leetcode.com/problems/reverse-integer/
'''
Given a 32-bit signed integer, reverse digits of an integer.
Example 1:
Input: 123
Output: 321
Example 2:
Input: -123
Output: -321
Example 3:
Input: 120
Output: 21
Note:
Assume we are dealing with an environment which could only hold integers within
the 32-bit signed integer range. For the purpose of this problem, assume that
your function returns 0 when the reversed integer overflows.
'''
# Approach 1: Math
# time: O(n)
# space:
# Understand this as dequeue the last digit each time from input and enqueue the last digit into ans
# Dequeue: use mod to peek at the last digit and division by base(10) to shift number to right
# Enqueue: use multiply by base(10) to make space (shift to left) and addition to insert the last digit
# Overflows: Use the max_int and check if addition or multiplication can cause overflow
def reverse(self, x):
maxInt = 2**31-1 # Maximum positive integer
ans = 0
sign = -1 if x < 0 else 1
x = sign * x # Convert negative x to positive x
while x:
x, lastDigit = x // 10, x % 10
# ans = ans * 10 + lastDigit (with overflow checks)
# Do ans = ans * 10 while checking for overflow (a * 10 <= max ---- a <= max / 10)
if ans <= maxInt // 10:
ans *= 10
else:
return 0
# Do ans += lastDigit while checking overflow (a + 10 <= max ---- a <= max - 10)
if ans <= maxInt - lastDigit:
ans += lastDigit
else:
return 0
return sign * ans
# Approach 2: convert to string
def reverse(self, x):
sign = 1
if x < 0:
sign = -1
x = sign * x
return sign * int(str(x)[::-1])
| # @file Math Reverse Integer
# @brief Given a 32-bit signed integer, reverse digits of an integer.
# https://leetcode.com/problems/reverse-integer/
'''
Given a 32-bit signed integer, reverse digits of an integer.
Example 1:
Input: 123
Output: 321
Example 2:
Input: -123
Output: -321
Example 3:
Input: 120
Output: 21
Note:
Assume we are dealing with an environment which could only hold integers within
the 32-bit signed integer range. For the purpose of this problem, assume that
your function returns 0 when the reversed integer overflows.
'''
# Approach 1: Math
# time: O(n)
# space:
# Understand this as dequeue the last digit each time from input and enqueue the last digit into ans
# Dequeue: use mod to peek at the last digit and division by base(10) to shift number to right
# Enqueue: use multiply by base(10) to make space (shift to left) and addition to insert the last digit
# Overflows: Use the max_int and check if addition or multiplication can cause overflow
def reverse(self, x):
maxInt = 2**31-1 # Maximum positive integer
ans = 0
sign = -1 if x < 0 else 1
x = sign * x # Convert negative x to positive x
while x:
x, lastDigit = x // 10, x % 10
# ans = ans * 10 + lastDigit (with overflow checks)
# Do ans = ans * 10 while checking for overflow (a * 10 <= max ---- a <= max / 10)
if ans <= maxInt // 10:
ans *= 10
else:
return 0
# Do ans += lastDigit while checking overflow (a + 10 <= max ---- a <= max - 10)
if ans <= maxInt - lastDigit:
ans += lastDigit
else:
return 0
return sign * ans
| mit | Python |
12a89261e5b831096e734688edf4a9c1b6ee21b3 | Fix pub integration test on arm64 macs | dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk | tools/bots/pub_integration_test.py | tools/bots/pub_integration_test.py | #!/usr/bin/env python3
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import optparse
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
environment:
sdk: '>=2.10.0 <=3.0.0'
dependencies:
shelf:
test:
"""
def Main():
parser = optparse.OptionParser()
parser.add_option(
'--mode', action='store', dest='mode', type='string', default='release')
parser.add_option('--arch',
action='store',
dest='arch',
type='string',
default='x64')
(options, args) = parser.parse_args()
arch = 'ARM64' if options.arch == 'arm64' else 'X64'
mode = ('Debug' if options.mode == 'debug' else 'Release')
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
extension = '' if not sys.platform == 'win32' else '.bat'
pub = os.path.abspath('%s/%s%s/dart-sdk/bin/pub%s' %
(out_dir, mode, arch, extension))
print(pub)
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = os.environ.copy()
env['PUB_CACHE'] = pub_cache_dir
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code != 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code != 0:
return exit_code
finally:
shutil.rmtree(working_dir)
if __name__ == '__main__':
sys.exit(Main())
| #!/usr/bin/env python3
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import optparse
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
environment:
sdk: '>=2.10.0 <=3.0.0'
dependencies:
shelf:
test:
"""
def Main():
parser = optparse.OptionParser()
parser.add_option(
'--mode', action='store', dest='mode', type='string', default='release')
parser.add_option('--arch',
action='store',
dest='arch',
type='string',
default='x64')
(options, args) = parser.parse_args()
arch = 'XARM64' if options.arch == 'arm64' else 'X64'
mode = ('Debug' if options.mode == 'debug' else 'Release')
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
extension = '' if not sys.platform == 'win32' else '.bat'
pub = os.path.abspath('%s/%s%s/dart-sdk/bin/pub%s' %
(out_dir, mode, arch, extension))
print(pub)
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = os.environ.copy()
env['PUB_CACHE'] = pub_cache_dir
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code != 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code != 0:
return exit_code
finally:
shutil.rmtree(working_dir)
if __name__ == '__main__':
sys.exit(Main())
| bsd-3-clause | Python |
d701885655b2b459df3920e8dd10df195138b11e | Add CSV output via hooks (#36) | deepgram/kur | kur/model/hooks/output_hook.py | kur/model/hooks/output_hook.py | """
Copyright 2016 Deepgram
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pickle
import csv
import logging
from . import EvaluationHook
logger = logging.getLogger(__name__)
###############################################################################
class OutputHook(EvaluationHook):
""" Evaluation hook for saving to disk.
"""
###########################################################################
@classmethod
def get_name(cls):
""" Returns the name of the evaluation hook.
"""
return 'output'
###########################################################################
@staticmethod
def _save_as_pickle(target, data, truth=None):
""" Saves a file as a Python 3 pickle.
"""
logger.info('Saving model output as pickle: %s', target)
result = {'result' : data}
if truth is not None:
result['truth'] = truth
with open(target, 'wb') as fh:
pickle.dump(result, fh)
###########################################################################
@staticmethod
def _save_as_csv(target, data, truth=None):
""" Saves a file as a CSV.
"""
logger.info('Saving model output as csv: %s', target)
with open(target, 'w') as file_handle:
csv_file = csv.writer(file_handle)
# Only generate data for the first output
first_output = next(iter(data))
# Output CSV headers
row = list(data)
if truth is not None:
actuals = list(truth)
row += [actual + ' expected' for actual in actuals]
csv_file.writerow(row)
# For each row of data, output a CSV row
for index in range(len(data[first_output])):
row = []
for output in data.keys():
row += [data[output][index][0]]
if truth is not None:
for output in data.keys():
row += [truth[output][index]]
csv_file.writerow(row)
###########################################################################
def __init__(self, path=None, format=None, **kwargs): \
# pylint: disable=redefined-builtin
""" Creates a new output hook.
"""
super().__init__(**kwargs)
if path is None:
raise ValueError('No path specified in output hook.')
self.path = path
format = format or 'pkl'
savers = {
'pkl' : OutputHook._save_as_pickle,
'pickle' : OutputHook._save_as_pickle,
'csv' : OutputHook._save_as_csv
}
self.saver = savers.get(format)
if self.saver is None:
raise ValueError('No such handler for file format: {}'.format(
format))
###########################################################################
def apply(self, current, original, model=None):
""" Applies the hook to the data.
"""
data, truth = current
self.saver(
target=self.path,
data=data,
truth=truth
)
return current
### EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF
| """
Copyright 2016 Deepgram
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pickle
import logging
from . import EvaluationHook
logger = logging.getLogger(__name__)
###############################################################################
class OutputHook(EvaluationHook):
""" Evaluation hook for saving to disk.
"""
###########################################################################
@classmethod
def get_name(cls):
""" Returns the name of the evaluation hook.
"""
return 'output'
###########################################################################
@staticmethod
def _save_as_pickle(target, data, truth=None):
""" Saves a file as a Python 3 pickle.
"""
logger.info('Saving model output as pickle: %s', target)
result = {'result' : data}
if truth is not None:
result['truth'] = truth
with open(target, 'wb') as fh:
pickle.dump(result, fh)
###########################################################################
def __init__(self, path=None, format=None, **kwargs): \
# pylint: disable=redefined-builtin
""" Creates a new output hook.
"""
super().__init__(**kwargs)
if path is None:
raise ValueError('No path specified in output hook.')
self.path = path
format = format or 'pkl'
savers = {
'pkl' : OutputHook._save_as_pickle,
'pickle' : OutputHook._save_as_pickle
}
self.saver = savers.get(format)
if self.saver is None:
raise ValueError('No such handler for file format: {}'.format(
format))
###########################################################################
def apply(self, current, original, model=None):
""" Applies the hook to the data.
"""
data, truth = current
self.saver(
target=self.path,
data=data,
truth=truth
)
return current
### EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF
| apache-2.0 | Python |
ba69549405b86573f2996db8d0d4336e0c2a084b | Replace urllib2 w/ requests | lexifdev/crawlers,lexifdev/crawlers,teampopong/crawlers,majorika/crawlers,teampopong/crawlers,majorika/crawlers | bills/utils.py | bills/utils.py | #! /usr/bin/python2.7
# -*- coding: utf-8 -*-
import html5lib
import json
import os
import traceback
import requests
HEADERS = {
'Referer': 'http://likms.assembly.go.kr/bill/jsp/BillSearchResult.jsp',
}
def check_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def get_elems(page, x):
return page.xpath(x)
def get_elem_texts(page, x):
elems = page.xpath(x)
return [list(elem.itertext()) for elem in elems]
def get_webpage(url, outp):
try:
r = requests.get(url, headers=HEADERS, stream=True)
assert r.ok
except Exception as e:
import sys
traceback.print_exc(file=sys.stdout)
return
with open(outp, 'wb') as f:
for block in r.iter_content(1024):
if not block:
break
f.write(block)
def get_webpage_text(url):
r = requests.get(url, headers=HEADERS)
return r.text
def read_json(fname):
with open(fname, 'r') as f:
return json.load(f)
def read_webpage(filename):
with open(filename) as f:
page = html5lib.HTMLParser(\
tree=html5lib.treebuilders.getTreeBuilder("lxml"),\
namespaceHTMLElements=False)
p = page.parse(f)
return p
def write_json(data, fn):
with open(fn, 'w') as f:
json.dump(data, f, indent=2)
print 'Data written to ' + fn
| #! /usr/bin/python2.7
# -*- coding: utf-8 -*-
import html5lib
import json
import os
from shutil import copyfileobj
import urllib2
opener = urllib2.build_opener()
opener.addheaders.append(('Referer', 'http://likms.assembly.go.kr/bill/jsp/BillSearchResult.jsp'))
def check_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def get_elems(page, x):
return page.xpath(x)
def get_elem_texts(page, x):
elems = page.xpath(x)
return [list(elem.itertext()) for elem in elems]
def get_webpage(url, outp):
try:
r = opener.open(url)
except urllib2.URLError:
print 'URLError: %s' % url
return
with open(outp, 'w') as f:
copyfileobj(r, f)
def get_webpage_text(url):
return opener.open(url).read()
def read_json(fname):
with open(fname, 'r') as f:
return json.load(f)
def read_webpage(filename):
with open(filename) as f:
page = html5lib.HTMLParser(\
tree=html5lib.treebuilders.getTreeBuilder("lxml"),\
namespaceHTMLElements=False)
p = page.parse(f)
return p
def write_json(data, fn):
with open(fn, 'w') as f:
json.dump(data, f, indent=2)
print 'Data written to ' + fn
| agpl-3.0 | Python |
e64e86a68bae51cd5aefbff563112c0f4765065c | Set version number for v1.0.1 | ajdawson/gridfill | gridfill/__init__.py | gridfill/__init__.py | """Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
| """Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
| mit | Python |
866f95cfb0db14da0596efe41a128baf2a3a1cfe | Fix form PageForm needs updating. | ad-m/django-basic-tinymce-flatpages | django_basic_tinymce_flatpages/admin.py | django_basic_tinymce_flatpages/admin.py | from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
fields = '__all__'
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
| from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
| bsd-3-clause | Python |
04983c399cfdd70cdde121bbbf4e06b8e94370b1 | add get_available_languages(). | ulule/django-linguist | linguist/mixins.py | linguist/mixins.py | # -*- coding: utf-8 -*-
from .models import Translation
from .utils import get_cache_key
class LinguistMixin(object):
def clear_translations_cache(self):
self._linguist.clear()
@property
def language(self):
return self._linguist.language
@language.setter
def language(self, value):
self._linguist.language = value
def get_available_languages(self):
identifier = self._linguist.identifier
return (Translation.objects
.filter(identifier=identifier, object_id=self.pk)
.values_list('language', flat=True)
.distinct()
.order_by('language'))
def prefetch_translations(self):
identifier = self._linguist.identifier
translations = Translation.objects.filter(identifier=identifier, object_id=self.pk)
for translation in translations:
cache_key = get_cache_key(**{
'identifier': identifier,
'object_id': self.pk,
'language': translation.language,
'field_name': translation.field_name,
})
if cache_key not in self._linguist:
self._linguist[cache_key] = translation
| # -*- coding: utf-8 -*-
from .models import Translation
from .utils import get_cache_key
class LinguistMixin(object):
def clear_translations_cache(self):
self._linguist.clear()
@property
def language(self):
return self._linguist.language
@language.setter
def language(self, value):
self._linguist.language = value
def prefetch_translations(self):
identifier = self._linguist.identifier
translations = Translation.objects.filter(identifier=identifier, object_id=self.pk)
for translation in translations:
cache_key = get_cache_key(**{
'identifier': identifier,
'object_id': self.pk,
'language': translation.language,
'field_name': translation.field_name,
})
if cache_key not in self._linguist:
self._linguist[cache_key] = translation
| mit | Python |
c40d0450846e5f23413587e138792dbad8e2afd3 | fix AttributeError | onelab-eu/sfa,yippeecw/sfa,yippeecw/sfa,onelab-eu/sfa,yippeecw/sfa,onelab-eu/sfa | sfa/rspecs/elements/element.py | sfa/rspecs/elements/element.py | class Element(dict):
fields = {}
def __init__(self, fields={}, element=None, keys=None):
self.element = element
dict.__init__(self, dict.fromkeys(self.fields))
if not keys:
keys = fields.keys()
for key in keys:
if key in fields:
self[key] = fields[key]
@staticmethod
def get_elements(xml, xpath, element_class=None, fields=None):
"""
Search the specifed xml node for elements that match the
specified xpath query.
Returns a list of objectes instanced by the specfied element_class.
"""
if not element_class:
element_class = Element
if not fields and hasattr(element_class, fields):
fields = element_class.fields
elems = xml.xpath(xpath)
objs = []
for elem in elems:
if not fields:
obj = element_class(elem.attrib, elem)
else:
obj = element_class({}, elem)
for field in fields:
if field in elem.attrib:
obj[field] = elem.attrib[field]
objs.append(obj)
generic_elems = [element_class(elem.attrib, elem) for elem in elems]
return objs
@staticmethod
def add_elements(xml, name, objs, fields=None):
"""
Adds a child node to the specified xml node based on
the specified name , element class and object.
"""
if not isinstance(objs, list):
objs = [objs]
elems = []
for obj in objs:
if not obj:
continue
if not fields:
fields = obj.keys()
elem = xml.add_element(name)
for field in fields:
if field in obj and obj[field]:
elem.set(field, unicode(obj[field]))
elems.append(elem)
return elems
| class Element(dict):
fields = {}
def __init__(self, fields={}, element=None, keys=None):
self.element = element
dict.__init__(self, dict.fromkeys(self.fields))
if not keys:
keys = fields.keys()
for key in keys:
if key in fields:
self[key] = fields[key]
@staticmethod
def get_elements(xml, xpath, element_class=None, fields=None):
"""
Search the specifed xml node for elements that match the
specified xpath query.
Returns a list of objectes instanced by the specfied element_class.
"""
if not element_class:
element_class = Element
if not fields:
fields = element_class.fields.keys()
elems = xml.xpath(xpath)
objs = []
for elem in elems:
if not fields:
obj = element_class(elem.attrib, elem)
else:
obj = element_class({}, elem)
for field in fields:
if field in elem.attrib:
obj[field] = elem.attrib[field]
objs.append(obj)
generic_elems = [element_class(elem.attrib, elem) for elem in elems]
return objs
@staticmethod
def add_elements(xml, name, objs, fields=None):
"""
Adds a child node to the specified xml node based on
the specified name , element class and object.
"""
if not isinstance(objs, list):
objs = [objs]
elems = []
for obj in objs:
if not obj:
continue
if not fields:
fields = obj.keys()
elem = xml.add_element(name)
for field in fields:
if field in obj and obj[field]:
elem.set(field, unicode(obj[field]))
elems.append(elem)
return elems
| mit | Python |
857a7542c8e55b0162420e408f5175ae48c5f030 | Remove gaps for workspace widget. Change colors. | alberand/lemonbar,alberand/lemonbar,alberand/lemonbar | widgets/ws.py | widgets/ws.py | #!/usr/bin/env python3
import sys
import json
import subprocess
from utils import set_b_color, set_f_color, set_spacing
from widgets.widget import Widget
from widgets.config import colors, icons
class Workspaces(Widget):
'''
'''
def __init__(self, value=''):
'''
Params:
bg: background color
fg: foreground color
icon: icon
'''
Widget.__init__(self)
self.value = value
self.bg = None
self.fg = None
self.icon = None
self.gaps = (0, 0)
self.colors_rules = dict()
self.action = []
self.action_buttons = []
def update(self):
# Get information about workspace situations
cmd = 'i3-msg -t get_workspaces'
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
# Parse this infomation
info = json.loads(output.decode('UTF-8'))
info.sort(key=lambda data: data['num'])
# Generate resulting string
result = ''
for ws in info:
if ws['focused']:
result += self._chosen_ws(ws['num'])
else:
result += self._normal_ws(ws['num'])
# print('{}.'.format(result), file=sys.stderr)
self.value = result
def _chosen_ws(self, num):
num = ' {} '.format(num)
return set_f_color(
set_b_color(
set_spacing(num, (3, 3)), colors['c_gray']
), colors['c_white']
)
def _normal_ws(self, num):
num = ' {} '.format(num)
return set_f_color(set_spacing(num, (3, 3)), colors['c_white'])
if __name__ == '__main__':
a = Widget()
print(a.get_output())
| #!/usr/bin/env python3
import sys
import json
import subprocess
from utils import set_b_color, set_f_color, set_spacing
from widgets.widget import Widget
from widgets.config import colors, icons
class Workspaces(Widget):
'''
'''
def __init__(self, value=''):
'''
Params:
bg: background color
fg: foreground color
icon: icon
'''
Widget.__init__(self)
self.value = value
self.bg = None
self.fg = None
self.icon = None
self.gaps = (3, 3)
self.colors_rules = dict()
self.action = []
self.action_buttons = []
def update(self):
# Get information about workspace situations
cmd = 'i3-msg -t get_workspaces'
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
# Parse this infomation
info = json.loads(output.decode('UTF-8'))
info.sort(key=lambda data: data['num'])
# Generate resulting string
result = ''
for ws in info:
if ws['focused']:
result += self._chosen_ws(ws['num'])
else:
result += self._normal_ws(ws['num'])
# print('{}.'.format(result), file=sys.stderr)
self.value = result
def _chosen_ws(self, num):
num = ' {} '.format(num)
return set_f_color(
set_b_color(
set_spacing(num, (3, 3)), colors['c_black_l']
), colors['c_foreground']
)
def _normal_ws(self, num):
num = ' {} '.format(num)
return set_f_color(
set_b_color(
set_spacing(num, (3, 3)), colors['c_background']
), colors['c_foreground']
)
if __name__ == '__main__':
a = Widget()
print(a.get_output())
| mit | Python |
acc1131002d9c498042ce1bd4a737dee6b47a2dd | Remove redundant import. | bretth/django-pavlova-project,bretth/django-pavlova-project,bretth/django-pavlova-project | project/project_name/settings/base.py | project/project_name/settings/base.py |
from os.path import abspath, basename, dirname
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = dirname(DJANGO_ROOT)
# Site name:
SITE_NAME = basename(DJANGO_ROOT)
|
from os.path import abspath, basename, dirname
from sys import path
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = dirname(DJANGO_ROOT)
# Site name:
SITE_NAME = basename(DJANGO_ROOT)
| mit | Python |
d1008437dcf618700bce53913f3450aceda8a23f | Remove xadmin as it will not work with guardian. | weijia/djangoautoconf,weijia/djangoautoconf | djangoautoconf/auto_conf_admin_utils.py | djangoautoconf/auto_conf_admin_utils.py | from guardian.admin import GuardedModelAdmin
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
#admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | from guardian.admin import GuardedModelAdmin
#from django.contrib import admin
import xadmin as admin
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | bsd-3-clause | Python |
23d1d4f11b081eb946df0895e8e622a2c53cfc72 | Update filter_genes_matrix.py | jfnavarro/st_analysis | scripts/filter_genes_matrix.py | scripts/filter_genes_matrix.py | #! /usr/bin/env python
"""
Script that takes a ST dataset (matrix of counts)
where the columns are genes and the rows
are spot coordinates
gene gene
XxY
XxY
And removes the columns of genes
matching the regular expression given as input.
@Author Jose Fernandez Navarro <jose.fernandez.navarro@scilifelab.se>
"""
import argparse
import sys
import os
import pandas as pd
import re
def main(counts_matrix, reg_exps, outfile):
if not os.path.isfile(counts_matrix):
sys.stderr.write("Error, input file not present or invalid format\n")
sys.exit(1)
if not outfile:
outfile = "filtered_{}.tsv".format(os.path.basename(counts_matrix).split(".")[0])
# Read the data frame (genes as columns)
counts_table = pd.read_csv(counts_matrix, sep="\t", header=0, index_col=0)
genes = counts_table.columns
# Filter out genes that match any of the reg-exps
genes = [gene for gene in genes if any([re.match(regex,gene) for regex in reg_exps])]
counts_table.drop(genes, axis=1, inplace=True)
# Write filtered table
counts_table.to_csv(outfile, sep='\t')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("--counts-matrix", required=True,
help="Matrix with gene counts (genes as columns)")
parser.add_argument("--outfile", help="Name of the output file")
parser.add_argument("--filter-genes", help="Regular expression for \
gene symbols to filter out. Can be given several times.",
default=None,
type=str,
action='append')
args = parser.parse_args()
main(args.counts_matrix, args.filter_genes, args.outfile)
| #! /usr/bin/env python
"""
Script that takes a ST dataset (matrix of counts)
where the columns are genes and the rows
are spot coordinates
gene gene
XxY
XxY
And removes the columns of genes
matching the regular expression given as input.
@Author Jose Fernandez Navarro <jose.fernandez.navarro@scilifelab.se>
"""
import argparse
import sys
import os
import pandas as pd
import re
def main(counts_matrix, reg_exps, outfile):
if not os.path.isfile(counts_matrix):
sys.stderr.write("Error, input file not present or invalid format\n")
sys.exit(1)
if not outfile:
outfile = "filtered_{}.tsv".format(os.path.basename(counts_matrix).split(".")[0])
# Read the data frame (genes as columns)
counts_table = pd.read_csv(counts_matrix, sep="\t", header=0, index_col=0)
genes = counts_table.columns
# Filter out genes that match any of the reg-exps
genes = [gene for gene in genes if any([re.fullmatch(regex,gene) for regex in reg_exps])]
counts_table.drop(genes, axis=1, inplace=True)
# Write filtered table
counts_table.to_csv(outfile, sep='\t')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("--counts-matrix", required=True,
help="Matrix with gene counts (genes as columns)")
parser.add_argument("--outfile", help="Name of the output file")
parser.add_argument("--filter-genes", help="Regular expression for \
gene symbols to filter out. Can be given several times.",
default=None,
type=str,
action='append')
args = parser.parse_args()
main(args.counts_matrix, args.filter_genes, args.outfile)
| mit | Python |
76e35d291ae6619aaf95636deec96247b2e64d62 | Add logging to Cloudformation | CloudHeads/lambda_utils | lambda_utils/cloudformation.py | lambda_utils/cloudformation.py | import json
import urllib2
from lambda_utils import Event, logging
class Cloudformation(Event):
event = None
status = None
response = None
reason = None
def wrapped_function(self, event, context):
logging.info(event)
self.event = self.extract_event(event)
self.status = 'FAILED'
self.response = None
try:
self.response = self.function(self.event, context)
self.status = 'SUCCESS'
return self.response
except Exception as ex:
self.reason = ex.message
raise
finally:
send_signal(self.event, self.status, self.reason, self.response)
def extract_event(self, event):
event = dict(event)
# SNS Topic to Custom Resource
if 'Records' in event:
event = json.loads(event['Records'][0]['Sns']['Message'])
return event
def send_signal(event, response_status, reason, response_data=None):
response_body = json.dumps(
{
'Status': response_status,
'Reason': reason or 'ReasonCanNotBeNone',
'PhysicalResourceId': event.get('PhysicalResourceId', event['LogicalResourceId']),
'StackId': event['StackId'],
'RequestId': event['RequestId'],
'LogicalResourceId': event['LogicalResourceId'],
'Data': response_data or {}
}
)
logging.info(response_body)
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(event['ResponseURL'], data=response_body)
request.add_header('Content-Type', '')
request.add_header('Content-Length', len(response_body))
request.get_method = lambda: 'PUT'
opener.open(request)
| import json
import urllib2
from lambda_utils import Event
class Cloudformation(Event):
event = None
status = None
response = None
reason = None
def wrapped_function(self, event, context):
self.event = self.extract_event(event)
self.status = 'FAILED'
self.response = None
try:
self.response = self.function(self.event, context)
self.status = 'SUCCESS'
return self.response
except Exception as ex:
self.reason = ex.message
raise
finally:
send_signal(self.event, self.status, self.reason, self.response)
def extract_event(self, event):
event = dict(event)
# SNS Topic to Custom Resource
if 'Records' in event:
event = json.loads(event['Records'][0]['Sns']['Message'])
return event
def send_signal(event, response_status, reason, response_data=None):
response_body = json.dumps(
{
'Status': response_status,
'Reason': reason or 'ReasonCanNotBeNone',
'PhysicalResourceId': event.get('PhysicalResourceId', event['LogicalResourceId']),
'StackId': event['StackId'],
'RequestId': event['RequestId'],
'LogicalResourceId': event['LogicalResourceId'],
'Data': response_data or {}
}
)
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(event['ResponseURL'], data=response_body)
request.add_header('Content-Type', '')
request.add_header('Content-Length', len(response_body))
request.get_method = lambda: 'PUT'
opener.open(request)
| mit | Python |
331da7f3004f295a120b3e61a9b525c16bb7f62a | Update version to 2.0-dev. | tsiegleauq/OpenSlides,rolandgeider/OpenSlides,OpenSlides/OpenSlides,ostcar/OpenSlides,normanjaeckel/OpenSlides,normanjaeckel/OpenSlides,CatoTH/OpenSlides,rolandgeider/OpenSlides,ostcar/OpenSlides,CatoTH/OpenSlides,jwinzer/OpenSlides,boehlke/OpenSlides,emanuelschuetze/OpenSlides,jwinzer/OpenSlides,jwinzer/OpenSlides,normanjaeckel/OpenSlides,OpenSlides/OpenSlides,emanuelschuetze/OpenSlides,FinnStutzenstein/OpenSlides,normanjaeckel/OpenSlides,FinnStutzenstein/OpenSlides,emanuelschuetze/OpenSlides,ostcar/OpenSlides,CatoTH/OpenSlides,FinnStutzenstein/OpenSlides,tsiegleauq/OpenSlides,boehlke/OpenSlides,boehlke/OpenSlides,boehlke/OpenSlides,jwinzer/OpenSlides,rolandgeider/OpenSlides,emanuelschuetze/OpenSlides,CatoTH/OpenSlides,FinnStutzenstein/OpenSlides,tsiegleauq/OpenSlides,jwinzer/OpenSlides | openslides/__init__.py | openslides/__init__.py | __author__ = 'OpenSlides Team <support@openslides.org>'
__description__ = 'Presentation and assembly system'
__version__ = '2.0-dev'
| __author__ = 'OpenSlides Team <support@openslides.org>'
__description__ = 'Presentation and assembly system'
__version__ = '2.0b5'
| mit | Python |
3a878c528929fa7090f0dd16f97e1ae1b0d73be1 | Remove feature selection | davidgasquez/kaggle-airbnb | scripts/generate_submission.py | scripts/generate_submission.py | #!/usr/bin/env python
import pandas as pd
import numpy as np
import datetime
from sklearn.preprocessing import LabelEncoder
from xgboost.sklearn import XGBClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.feature_selection import SelectFromModel
def generate_submission(y_pred, test_users_ids, label_encoder):
"""Create a valid submission file given the predictions."""
ids = []
cts = []
for i in range(len(test_users_ids)):
idx = test_users_ids[i]
ids += [idx] * 5
sorted_countries = np.argsort(y_pred[i])[::-1]
cts += label_encoder.inverse_transform(sorted_countries)[:5].tolist()
id_stacks = np.column_stack((ids, cts))
submission = pd.DataFrame(id_stacks, columns=['id', 'country'])
date = datetime.datetime.now().strftime("%m-%d-%H:%M:%S")
name = __file__.split('.')[0] + '_' + str(date) + '.csv'
return submission.to_csv('../data/submissions/' + name, index=False)
def main():
path = '../data/processed/'
train_users = pd.read_csv(path + 'ohe_count_processed_train_users.csv')
test_users = pd.read_csv(path + 'ohe_count_processed_test_users.csv')
y_train = train_users['country_destination']
train_users.drop(['country_destination', 'id'], axis=1, inplace=True)
train_users = train_users.fillna(-1)
x_train = train_users.values
label_encoder = LabelEncoder()
encoded_y_train = label_encoder.fit_transform(y_train)
test_users_ids = test_users['id']
test_users.drop('id', axis=1, inplace=True)
test_users = test_users.fillna(-1)
x_test = test_users.values
clf = XGBClassifier(
max_depth=7,
learning_rate=0.18,
n_estimators=80,
gamma=0,
min_child_weight=1,
max_delta_step=0,
subsample=1,
colsample_bytree=1,
colsample_bylevel=1,
reg_alpha=0,
reg_lambda=1,
scale_pos_weight=1,
base_score=0.5,
missing=None,
silent=True,
nthread=-1,
seed=42
)
clf.fit(x_train, encoded_y_train)
y_pred = clf.predict_proba(x_test)
generate_submission(y_pred, test_users_ids, label_encoder)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import pandas as pd
import numpy as np
import datetime
from sklearn.preprocessing import LabelEncoder
from xgboost.sklearn import XGBClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.feature_selection import SelectFromModel
def generate_submission(y_pred, test_users_ids, label_encoder):
"""Create a valid submission file given the predictions."""
ids = []
cts = []
for i in range(len(test_users_ids)):
idx = test_users_ids[i]
ids += [idx] * 5
sorted_countries = np.argsort(y_pred[i])[::-1]
cts += label_encoder.inverse_transform(sorted_countries)[:5].tolist()
id_stacks = np.column_stack((ids, cts))
submission = pd.DataFrame(id_stacks, columns=['id', 'country'])
date = datetime.datetime.now().strftime("%m-%d-%H:%M:%S")
name = __file__.split('.')[0] + '_' + str(date) + '.csv'
return submission.to_csv('../data/submissions/' + name, index=False)
def main():
path = '../data/processed/'
train_users = pd.read_csv(path + '_encoded_train_users.csv')
test_users = pd.read_csv(path + '_encoded_test_users.csv')
y_train = train_users['country_destination']
train_users.drop(['country_destination', 'id'], axis=1, inplace=True)
train_users = train_users.fillna(-1)
x_train = train_users.values
label_encoder = LabelEncoder()
encoded_y_train = label_encoder.fit_transform(y_train)
test_users_ids = test_users['id']
test_users.drop('id', axis=1, inplace=True)
test_users = test_users.fillna(-1)
x_test = test_users.values
clf = XGBClassifier(
max_depth=7,
learning_rate=0.18,
n_estimators=80,
gamma=0,
min_child_weight=1,
max_delta_step=0,
subsample=1,
colsample_bytree=1,
colsample_bylevel=1,
reg_alpha=0,
reg_lambda=1,
scale_pos_weight=1,
base_score=0.5,
missing=None,
silent=True,
nthread=-1,
seed=42
)
model = SelectFromModel(ExtraTreesClassifier())
model.fit(x_train, encoded_y_train)
x_train = model.transform(x_train)
clf.fit(x_train, encoded_y_train)
y_pred = clf.predict_proba(x_test)
generate_submission(y_pred, test_users_ids, label_encoder)
if __name__ == '__main__':
main()
| mit | Python |
35119551542a2e403e6d66b371d9913d6d1ed440 | Add unicode treatment on JSONField rendering | opps/opps,jeanmask/opps,opps/opps,jeanmask/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,opps/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,YACOWS/opps | opps/fields/widgets.py | opps/fields/widgets.py | #!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
try:
values = json.loads(value)
except TypeError:
values = {}
# value sometimes come as unicode and we need to treat it
if type(values) == unicode:
values = json.loads(values)
objs = self.model.objects.filter(
application__contains=self.attrs.get('_model', None))
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
element_attr['obj_value'] = values.get(obj.slug, '')
if obj.type in ["checkbox", "radio"]:
fo = FieldOption.objects.filter(field=obj)
if obj.type == "checkbox":
obj_value = []
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
del element_attr['obj_value']
else:
element_attr['list'] = fo
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
| #!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
try:
values = json.loads(value)
except TypeError:
values = {}
objs = self.model.objects.filter(
application__contains=self.attrs.get('_model', None))
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
element_attr['obj_value'] = values.get(obj.slug, '')
if obj.type in ["checkbox", "radio"]:
fo = FieldOption.objects.filter(field=obj)
if obj.type == "checkbox":
obj_value = []
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
del element_attr['obj_value']
else:
element_attr['list'] = fo
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
| mit | Python |
40080380c194c8d8a46250f5e75fa210600f8005 | update crawler | vitorfs/woid,vitorfs/woid,vitorfs/woid | scripts/hackernews_services.py | scripts/hackernews_services.py | from unipath import Path
import sys
import os
PROJECT_DIR = Path(os.path.abspath(__file__)).parent.parent
sys.path.append(PROJECT_DIR)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'woid.settings')
import django
django.setup()
import time
import threading
from django.utils import timezone
from woid.crawler.crawler import HackerNewsCrawler
from woid.apps.services.models import Service, Story
FIVE_MINUTES = 5 * 60
TWELVE_HOURS = 12 * 60 * 60
HACKER_NEWS_SLUG = 'hackernews'
class HackerNewsService(threading.Thread):
def __init__(self):
super(HackerNewsService, self).__init__()
self.crawler = HackerNewsCrawler()
self.service = Service.objects.get(slug=HACKER_NEWS_SLUG)
class HNSUpdateTopStories(HackerNewsService):
def run(self):
run_count = 0
while True:
run_count = run_count + 1
self.crawler.update_top_stories()
time.sleep(FIVE_MINUTES)
class HNSUpdateTodayStoriesData(HackerNewsService):
def run(self):
run_count = 0
while True:
run_count = run_count + 1
today = timezone.now()
today_stories = self.service.stories \
.filter(date__year=today.year, date__month=today.month, date__day=today.day) \
.values_list('code', flat=True)
for story_code in today_stories:
self.crawler.update_story(story_code)
time.sleep(FIVE_MINUTES)
class HNSIndexAllStories(HackerNewsService):
def __init__(self, start_id=1, offset=1):
super(HNSIndexAllStories, self).__init__()
self.start_id = start_id
self.offset = offset
def run(self):
self.crawler.index_all_stories(self.start_id, self.offset)
def main():
HNSUpdateTopStories().start()
HNSUpdateTodayStoriesData().start()
HNSIndexAllStories(start_id=101949, offset=2).start()
HNSIndexAllStories(start_id=101950, offset=2).start()
if __name__ == '__main__':
main()
| from unipath import Path
import sys
import os
PROJECT_DIR = Path(os.path.abspath(__file__)).parent.parent
sys.path.append(PROJECT_DIR)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'woid.settings')
import django
django.setup()
import time
import threading
from django.utils import timezone
from woid.crawler.crawler import HackerNewsCrawler
from woid.apps.services.models import Service, Story
FIVE_MINUTES = 5 * 60
TWELVE_HOURS = 12 * 60 * 60
HACKER_NEWS_SLUG = 'hackernews'
class HackerNewsService(threading.Thread):
def __init__(self):
super(HackerNewsService, self).__init__()
self.crawler = HackerNewsCrawler()
self.service = Service.objects.get(slug=HACKER_NEWS_SLUG)
class HNSUpdateTopStories(HackerNewsService):
def run(self):
run_count = 0
while True:
run_count = run_count + 1
self.crawler.update_top_stories()
time.sleep(FIVE_MINUTES)
class HNSUpdateTodayStoriesData(HackerNewsService):
def run(self):
run_count = 0
while True:
run_count = run_count + 1
today = timezone.now()
today_stories = self.service.stories \
.filter(date__year=today.year, date__month=today.month, date__day=today.day) \
.values_list('code', flat=True)
for story_code in today_stories:
self.crawler.update_story(story_code)
time.sleep(FIVE_MINUTES)
class HNSIndexAllStories(HackerNewsService):
def __init__(self, start_id=1, offset=1):
super(HNSIndexAllStories, self).__init__()
self.start_id = start_id
self.offset = offset
def run(self):
self.crawler.index_all_stories(self.start_id, self.offset)
def main():
HNSUpdateTopStories().start()
HNSUpdateTodayStoriesData().start()
HNSIndexAllStories(start_id=29289, offset=2).start()
HNSIndexAllStories(start_id=29290, offset=2).start()
if __name__ == '__main__':
main()
| apache-2.0 | Python |
ab17649005ce87277a8af11994d3f1fc7de2d120 | Update file_system_storage.py | ArabellaTech/django-image-diet | image_diet/file_system_storage.py | image_diet/file_system_storage.py | import os
from image_diet import settings
from django.conf import settings as main_settings
from django.contrib.staticfiles.storage import StaticFilesStorage
class ImageDietFileSystemStorage(StaticFilesStorage):
def post_process(self, files, *args, **kwargs):
results = []
print 'test'
print settings.DIET_COMPRESS_STATIC_IMAGES
die
if settings.DIET_COMPRESS_STATIC_IMAGES:
if 'image_diet' not in main_settings.INSTALLED_APPS:
raise NotImplementedError("You need to install django_image_diet to use DIET_COMPRESS_STATIC_IMAGES")
from image_diet.diet import squeeze
for f in files:
processed_file = squeeze(os.path.join(main_settings.STATIC_ROOT, f))
results.append([f, processed_file, True if processed_file is not None else False])
return results
| import os
from image_diet import settings
from django.conf import settings as main_settings
from django.contrib.staticfiles.storage import StaticFilesStorage
class ImageDietFileSystemStorage(StaticFilesStorage):
def post_process(self, files, *args, **kwargs):
results = []
print 'test'
print settings.DIET_COMPRESS_STATIC_IMAGES:
die
if settings.DIET_COMPRESS_STATIC_IMAGES:
if 'image_diet' not in main_settings.INSTALLED_APPS:
raise NotImplementedError("You need to install django_image_diet to use DIET_COMPRESS_STATIC_IMAGES")
from image_diet.diet import squeeze
for f in files:
processed_file = squeeze(os.path.join(main_settings.STATIC_ROOT, f))
results.append([f, processed_file, True if processed_file is not None else False])
return results
| mit | Python |
bca06e7888af9d47e93eff6500c71343525c3f24 | Make sure instance_class is present. Default num cache nodes to 1 | yaybu/touchdown,mitchellrj/touchdown | touchdown/aws/elasticache/cache.py | touchdown/aws/elasticache/cache.py | # Copyright 2014 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from touchdown.core.resource import Resource
from touchdown.core.plan import Plan, Present
from touchdown.core import argument
from ..account import Account
from ..common import SimpleDescribe, SimpleApply, SimpleDestroy
from ..vpc import SecurityGroup
from .subnet_group import SubnetGroup
class BaseCacheCluster(Resource):
instance_class = argument.String(field="CacheNodeType")
engine = argument.String(field='Engine', aws_update=False)
engine_version = argument.String(field='EngineVersion')
port = argument.Integer(min=1, max=32768, field='Port', aws_update=False)
security_groups = argument.ResourceList(SecurityGroup, field='SecurityGroupIds')
availability_zone = argument.String(field='PreferredAvailabilityZone')
multi_az = argument.Boolean(field='AZMode')
auto_minor_version_upgrade = argument.Boolean(field='AutoMinorVersionUpgrade')
num_cache_nodes = argument.Integer(default=1, min=1, field='NumCacheNodes')
subnet_group = argument.Resource(SubnetGroup, field='CacheSubnetGroupName')
# parameter_group = argument.Resource(ParamaterGroup, field='CacheParameterGroupName')
apply_immediately = argument.Boolean(field="ApplyImmediately", aws_create=False)
# tags = argument.Dict()
account = argument.Resource(Account)
class CacheCluster(BaseCacheCluster):
resource_name = "cache_cluster"
name = argument.String(min=1, max=20, regex=r"^[a-z1-9\-]*$", field="CacheClusterId")
# replication_group = argument.Resource("touchdown.aws.elasticache.replication_group.ReplicationGroup", field='ReplicationGroupId')
class Describe(SimpleDescribe, Plan):
resource = CacheCluster
service_name = 'elasticache'
describe_action = "describe_cache_clusters"
describe_notfound_exception = "CacheClusterNotFound"
describe_envelope = "CacheClusters"
key = 'CacheClusterId'
class Apply(SimpleApply, Describe):
create_action = "create_cache_cluster"
# update_action = "modify_cache_cluster"
waiter = "cache_cluster_available"
signature = (
Present("name"),
Present("instance_class"),
)
class Destroy(SimpleDestroy, Describe):
destroy_action = "delete_cache_cluster"
waiter = "cache_cluster_deleted"
| # Copyright 2014 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from touchdown.core.resource import Resource
from touchdown.core.plan import Plan
from touchdown.core import argument
from ..account import Account
from ..common import SimpleDescribe, SimpleApply, SimpleDestroy
from ..vpc import SecurityGroup
from .subnet_group import SubnetGroup
class BaseCacheCluster(Resource):
instance_class = argument.String(field="CacheNodeType")
engine = argument.String(field='Engine', aws_update=False)
engine_version = argument.String(field='EngineVersion')
port = argument.Integer(min=1, max=32768, field='Port', aws_update=False)
security_groups = argument.ResourceList(SecurityGroup, field='SecurityGroupIds')
availability_zone = argument.String(field='PreferredAvailabilityZone')
multi_az = argument.Boolean(field='AZMode')
auto_minor_version_upgrade = argument.Boolean(field='AutoMinorVersionUpgrade')
num_cache_nodes = argument.Integer(field='NumCacheNodes')
subnet_group = argument.Resource(SubnetGroup, field='CacheSubnetGroupName')
# parameter_group = argument.Resource(ParamaterGroup, field='CacheParameterGroupName')
apply_immediately = argument.Boolean(field="ApplyImmediately", aws_create=False)
# tags = argument.Dict()
account = argument.Resource(Account)
class CacheCluster(BaseCacheCluster):
resource_name = "cache_cluster"
name = argument.String(min=1, max=20, regex=r"^[a-z1-9\-]*$", field="CacheClusterId")
# replication_group = argument.Resource("touchdown.aws.elasticache.replication_group.ReplicationGroup", field='ReplicationGroupId')
class Describe(SimpleDescribe, Plan):
resource = CacheCluster
service_name = 'elasticache'
describe_action = "describe_cache_clusters"
describe_notfound_exception = "CacheClusterNotFound"
describe_envelope = "CacheClusters"
key = 'CacheClusterId'
class Apply(SimpleApply, Describe):
create_action = "create_cache_cluster"
# update_action = "modify_cache_cluster"
waiter = "cache_cluster_available"
class Destroy(SimpleDestroy, Describe):
destroy_action = "delete_cache_cluster"
waiter = "cache_cluster_deleted"
| apache-2.0 | Python |
ea9b40f4b69ebdce7ac060a467151c8e80c5513e | Fix up whoisaccount with new metadata key | Heufneutje/txircd | txircd/modules/core/whoisaccount.py | txircd/modules/core/whoisaccount.py | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
irc.RPL_WHOISACCOUNT = "330"
class WhoisAccount(ModuleData):
implements(IPlugin, IModuleData)
name = "WhoisAccount"
core = True
def actions(self):
return [ ("extrawhois", 1, self.whoisAccountName) ]
def whoisAccountName(self, user, targetUser):
if targetUser.metadataKeyExists("account"):
user.sendMessage(irc.RPL_WHOISACCOUNT, targetUser.nick, targetUser.metadataValue("account"), "is logged in as")
whoisAccount = WhoisAccount() | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
irc.RPL_WHOISACCOUNT = "330"
class WhoisAccount(ModuleData):
implements(IPlugin, IModuleData)
name = "WhoisAccount"
core = True
def actions(self):
return [ ("extrawhois", 1, self.whoisAccountName) ]
def whoisAccountName(self, user, targetUser):
if targetUser.metadataKeyExists("accountname"):
user.sendMessage(irc.RPL_WHOISACCOUNT, targetUser.nick, targetUser.metadataValue("accountname"), "is logged in as")
whoisAccount = WhoisAccount() | bsd-3-clause | Python |
b226a96229a75a0cdcfc3acca3ec84beba2c1613 | refresh camera after changing it does not work FIX | madhuni/AstroBox,madhuni/AstroBox,AstroPrint/AstroBox,abinashk-inf/AstroBox,abinashk-inf/AstroBox,AstroPrint/AstroBox,madhuni/AstroBox,AstroPrint/AstroBox,abinashk-inf/AstroBox,madhuni/AstroBox,abinashk-inf/AstroBox | src/astroprint/api/camera.py | src/astroprint/api/camera.py | # coding=utf-8
__author__ = "Daniel Arroyo <daniel@astroprint.com>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import octoprint.util as util
from flask import jsonify, request, abort
from octoprint.server import restricted_access, SUCCESS
from octoprint.server.api import api
from octoprint.settings import settings
from astroprint.camera import cameraManager
from astroprint.webrtc import webRtcManager
@api.route("/camera/refresh-plugged", methods=["POST"])
@restricted_access
def refreshPluggedCamera():
cm = cameraManager()
if settings().get(['camera', 'manager']) == 'gstreamer':
return jsonify({"isCameraPlugged": cm.init_gstreamer()})
else:
return jsonify({"isCameraPlugged": cm.open_camera()})
@api.route("/camera/has-properties", methods=["GET"])
@restricted_access
def hasCameraProperties():
cm = cameraManager()
return jsonify({"hasCameraProperties": cm.hasCameraProperties()})
@api.route("/camera/is-resolution-supported", methods=["GET"])
@restricted_access
def isResolutionSupported():
cm = cameraManager()
size = request.values['size']
return jsonify({"isResolutionSupported": cm.isResolutionSupported(size)})
@api.route("/camera/connected", methods=["GET"])
@restricted_access
def isCameraConnected():
cm = cameraManager()
return jsonify({"isCameraConnected": cm.isCameraConnected(), "cameraName": cm.cameraName})
@api.route("/camera/timelapse", methods=["POST"])
@restricted_access
def update_timelapse():
freq = request.values.get('freq')
if freq:
cm = cameraManager()
if cm.timelapseInfo:
if cm.update_timelapse(freq):
return jsonify(SUCCESS)
else:
if cm.start_timelapse(freq):
return jsonify(SUCCESS)
else:
abort(400)
abort(500)
@api.route("/camera/init-janus", methods=["POST"])
@restricted_access
def init_janus():
#Start session in Janus
if webRtcManager().ensureJanusRunning():
return jsonify(SUCCESS)
abort(500)
@api.route("/camera/peer-session", methods=["POST", "DELETE"])
@restricted_access
def peer_session():
data = request.json
if data and 'sessionId' in data:
sessionId = data['sessionId']
if request.method == 'POST':
#Initialize the peer session
if cameraManager().startLocalVideoSession(sessionId):
return jsonify(SUCCESS)
abort(500)
elif request.method == 'DELETE':
#Close peer session
if cameraManager().closeLocalVideoSession(sessionId):
return jsonify(SUCCESS)
else:
abort(500)
else:
abort(400)
@api.route("/camera/start-streaming",methods=["POST"])
@restricted_access
def start_streaming():
#open_camera
webRtcManager().startGStreamer()
return jsonify(SUCCESS)
| # coding=utf-8
__author__ = "Daniel Arroyo <daniel@astroprint.com>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import octoprint.util as util
from flask import jsonify, request, abort
from octoprint.server import restricted_access, SUCCESS
from octoprint.server.api import api
from astroprint.camera import cameraManager
from astroprint.webrtc import webRtcManager
@api.route("/camera/refresh-plugged", methods=["POST"])
@restricted_access
def refreshPluggedCamera():
cm = cameraManager()
opened = cm.open_camera()
return jsonify({"isCameraPlugged": opened})
@api.route("/camera/has-properties", methods=["GET"])
@restricted_access
def hasCameraProperties():
cm = cameraManager()
return jsonify({"hasCameraProperties": cm.hasCameraProperties()})
@api.route("/camera/is-resolution-supported", methods=["GET"])
@restricted_access
def isResolutionSupported():
cm = cameraManager()
size = request.values['size']
return jsonify({"isResolutionSupported": cm.isResolutionSupported(size)})
@api.route("/camera/connected", methods=["GET"])
@restricted_access
def isCameraConnected():
cm = cameraManager()
return jsonify({"isCameraConnected": cm.isCameraConnected(), "cameraName": cm.cameraName})
@api.route("/camera/timelapse", methods=["POST"])
@restricted_access
def update_timelapse():
freq = request.values.get('freq')
if freq:
cm = cameraManager()
if cm.timelapseInfo:
if cm.update_timelapse(freq):
return jsonify(SUCCESS)
else:
if cm.start_timelapse(freq):
return jsonify(SUCCESS)
else:
abort(400)
abort(500)
@api.route("/camera/init-janus", methods=["POST"])
@restricted_access
def init_janus():
#Start session in Janus
if webRtcManager().ensureJanusRunning():
return jsonify(SUCCESS)
abort(500)
@api.route("/camera/peer-session", methods=["POST", "DELETE"])
@restricted_access
def peer_session():
data = request.json
if data and 'sessionId' in data:
sessionId = data['sessionId']
if request.method == 'POST':
#Initialize the peer session
if cameraManager().startLocalVideoSession(sessionId):
return jsonify(SUCCESS)
abort(500)
elif request.method == 'DELETE':
#Close peer session
if cameraManager().closeLocalVideoSession(sessionId):
return jsonify(SUCCESS)
else:
abort(500)
else:
abort(400)
@api.route("/camera/start-streaming",methods=["POST"])
@restricted_access
def start_streaming():
#open_camera
webRtcManager().startGStreamer()
return jsonify(SUCCESS)
| agpl-3.0 | Python |
004a804e1d4bbf2caf588bb98127f6bbb8357a72 | Update projectfiles_unchanged.py version 4 | kullo/smartsqlite,kullo/smartsqlite,kullo/smartsqlite | projectfiles_unchanged.py | projectfiles_unchanged.py | #!/usr/bin/env python3
#
# This script is used on Linux, OS X and Windows.
# Python 3 required.
# Returns 0 if project files are unchanged and 1 else.
#
# Script version: 4
import os
import glob
import hashlib
import sys
matches = []
tmp_file = "projectfiles.md5.tmp"
exlude_dirs = set(['.git', 'docs'])
def get_subdirs(path):
return set([name for name in os.listdir(path)
if os.path.isdir(os.path.join(path, name))])
def find_in(path):
# print(path)
out = []
out += glob.glob(path + "/*.pro")
out += glob.glob(path + "/*.pri")
out += glob.glob(path + "/CMakeLists.txt")
out += glob.glob(path + "/Info.plist")
subs = get_subdirs(path) - exlude_dirs
for s in subs:
out += find_in(os.path.join(path, s))
out.sort()
return out
pros = find_in(".")
# print(pros)
hasher = hashlib.md5()
for pro in pros:
with open(pro) as f: s = f.read()
hasher.update(s.encode('utf8'))
current = hasher.hexdigest()
if os.path.isfile(tmp_file):
with open(tmp_file) as f: old = f.read()
else:
old = ""
if current.strip() == old.strip():
sys.exit(0)
else:
with open(tmp_file, "w") as f: print(current, file=f)
sys.exit(1)
| #!/usr/bin/env python3
#
# This script is used on Linux, OS X and Windows.
# Python 3 required.
# Returns 0 if project files are unchanged and 1 else.
#
# Script version: 3
import os
import glob
import hashlib
import sys
matches = []
tmp_file = "projectfiles.md5.tmp"
exlude_dirs = set(['.git', 'docs'])
def get_subdirs(path):
return set([name for name in os.listdir(path)
if os.path.isdir(os.path.join(path, name))])
def find_in(path):
# print(path)
out = []
out += glob.glob(path + "/*.pro")
out += glob.glob(path + "/CMakeLists.txt")
out += glob.glob(path + "/Info.plist")
subs = get_subdirs(path) - exlude_dirs
for s in subs:
out += find_in(os.path.join(path, s))
out.sort()
return out
pros = find_in(".")
# print(pros)
hasher = hashlib.md5()
for pro in pros:
with open(pro) as f: s = f.read()
hasher.update(s.encode('utf8'))
current = hasher.hexdigest()
if os.path.isfile(tmp_file):
with open(tmp_file) as f: old = f.read()
else:
old = ""
if current.strip() == old.strip():
sys.exit(0)
else:
with open(tmp_file, "w") as f: print(current, file=f)
sys.exit(1)
| bsd-3-clause | Python |
ad06f149015fc78365df3a20f7b66d6b00de3da7 | Rename related state to joined to match interface update | juju-solutions/layer-apache-flume-hdfs,juju-solutions/layer-apache-flume-hdfs | reactive/flume_hdfs.py | reactive/flume_hdfs.py | from charms.reactive import when, when_not
from charms.reactive import set_state, remove_state, is_state
from charmhelpers.core import hookenv
from charms.layer.apache_flume_base import Flume
from charms.reactive.helpers import any_file_changed
@when_not('hadoop.joined')
def report_unconnected():
hookenv.status_set('blocked', 'Waiting for relation to Hadoop Plugin')
@when('hadoop.joined')
@when_not('hadoop.hdfs.ready')
def report_waiting(hadoop): # pylint: disable=unused-argument
hookenv.status_set('waiting', 'Waiting for HDFS')
@when('flume-source.joined')
def sending_connection_info_to_agent(source):
config = hookenv.config()
source.send_configuration(config['source_port'], config['protocol'])
@when('flume-base.installed', 'hadoop.hdfs.ready')
def configure_flume(hdfs): # pylint: disable=unused-argument
hookenv.status_set('maintenance', 'Configuring Flume')
flume = Flume()
flume.configure_flume()
if not is_state('flume-hdfs.hdfs.inited'):
flume.init_hdfs()
set_state('flume-hdfs.hdfs.inited')
if any_file_changed([flume.config_file]):
flume.restart()
set_state('flume-hdfs.started')
hookenv.status_set('active', 'Ready')
@when('flume-hdfs.started')
@when_not('hadoop.ready')
def stop_flume():
flume = Flume()
flume.stop()
remove_state('flume-hdfs.started')
| from charms.reactive import when, when_not
from charms.reactive import set_state, remove_state, is_state
from charmhelpers.core import hookenv
from charms.layer.apache_flume_base import Flume
from charms.reactive.helpers import any_file_changed
@when_not('hadoop.related')
def report_unconnected():
hookenv.status_set('blocked', 'Waiting for relation to Hadoop Plugin')
@when('hadoop.related')
@when_not('hadoop.hdfs.ready')
def report_waiting(hadoop): # pylint: disable=unused-argument
hookenv.status_set('waiting', 'Waiting for HDFS')
@when('flume-source.joined')
def sending_connection_info_to_agent(source):
config = hookenv.config()
source.send_configuration(config['source_port'], config['protocol'])
@when('flume-base.installed', 'hadoop.hdfs.ready')
def configure_flume(hdfs): # pylint: disable=unused-argument
hookenv.status_set('maintenance', 'Configuring Flume')
flume = Flume()
flume.configure_flume()
if not is_state('flume-hdfs.hdfs.inited'):
flume.init_hdfs()
set_state('flume-hdfs.hdfs.inited')
if any_file_changed([flume.config_file]):
flume.restart()
set_state('flume-hdfs.started')
hookenv.status_set('active', 'Ready')
@when('flume-hdfs.started')
@when_not('hadoop.ready')
def stop_flume():
flume = Flume()
flume.stop()
remove_state('flume-hdfs.started')
| apache-2.0 | Python |
657a22dc282452b9ffeb408721623c3029cf5ad2 | save allvisit filename on paths object | adrn/thejoker,adrn/thejoker | scripts/make-troup-allVisit.py | scripts/make-troup-allVisit.py | """
Create a subset of APOGEE's allVisit file that contains only the stars in Troup's sample.
"""
# Standard library
import os
# Third-party
from astropy.io import fits
import h5py
import numpy as np
# Project
from thejoker import Paths
paths = Paths(__file__)
def main():
allVisit_path = os.path.join(paths.root, "data", "allVisit-l30e.2.fits")
troup_csv_path = os.path.join(paths.root, "data", "troup16-dr12.csv")
if not os.path.exists(allVisit_path):
download_cmd = ("wget https://data.sdss.org/sas/dr13/apogee/spectro/redux/r6/allVisit-l30e.2.fits -O {}"
.format(allVisit_path))
raise IOError("Path to main APOGEE DR13 allVisit file does not exist: {}\n"
"\t Download file with: {}"
.format(allVisit_path, download_cmd))
troup = np.genfromtxt(troup_csv_path, delimiter=",", names=True, dtype=None)
allVisit = fits.getdata(allVisit_path, 1)
with h5py.File(paths.troup_allVisit, 'w') as f:
for apogee_id in troup['APOGEE_ID'].astype(str):
idx = allVisit['APOGEE_ID'].astype(str) == apogee_id
f.create_dataset(apogee_id, data=allVisit[idx])
if __name__ == '__main__':
main()
| """
Create a subset of APOGEE's allVisit file that contains only the stars in Troup's sample.
"""
# Standard library
import os
# Third-party
from astropy.io import fits
import h5py
import numpy as np
# Project
from thejoker import Paths
paths = Paths(__file__)
def main():
allVisit_path = os.path.join(paths.root, "data", "allVisit-l30e.2.fits")
troup_csv_path = os.path.join(paths.root, "data", "troup16-dr12.csv")
output_path = os.path.join(paths.root, "data", "troup-allVisit.h5")
if not os.path.exists(allVisit_path):
download_cmd = ("wget https://data.sdss.org/sas/dr13/apogee/spectro/redux/r6/allVisit-l30e.2.fits -O {}"
.format(allVisit_path))
raise IOError("Path to main APOGEE DR13 allVisit file does not exist: {}\n"
"\t Download file with: {}"
.format(allVisit_path, download_cmd))
troup = np.genfromtxt(troup_csv_path, delimiter=",", names=True, dtype=None)
allVisit = fits.getdata(allVisit_path, 1)
with h5py.File(output_path, 'w') as f:
for apogee_id in troup['APOGEE_ID'].astype(str):
idx = allVisit['APOGEE_ID'].astype(str) == apogee_id
f.create_dataset(apogee_id, data=allVisit[idx])
if __name__ == '__main__':
main()
| mit | Python |
075bb05e57e63bb3e4f6cf70ce2c7db883d920e2 | correct curve radius formula | FlorianGraef/adv-lane-lines-vehicle-detection | lane_line.py | lane_line.py | import numpy as np
from collections import deque
class LaneLine():
def __init__(self):
# was the line detected in the last iteration?
self.detected = False
# x values of the last n fits of the line
self.recent_xfitted = deque([])
self.recent_yfitted = None
# average x values of the fitted line over the last n iterations
self.bestx = None
# polynomial coefficients averaged over the last n iterations
self.best_fit = None
# polynomial coefficients for the most recent fit
self.last_fits = deque()
# radius of curvature of the line in some units
self.radius_of_curvature = None
# distance in meters of vehicle center from the line
self.line_base_pos = None
# difference in fit coefficients between last and new fits
self.diffs = np.array([0, 0, 0], dtype='float')
# x values for detected line pixels
self.allx = None
# y values for detected line pixels
self.ally = None
self.ym_perpix = None
self.xm_perpix = None
# curvature of the lane
self.curv = None
def clear(self):
self.allx = None
self.ally = None
self.recent_yfitted = None
def calc_curv_rad(self, ym_perpix, xm_perpix):
curv_fit = np.polyfit(np.array(self.recent_yfitted)*ym_perpix, np.array(self.recent_xfitted[-1])*xm_perpix, 2)
y_eval = np.min(self.recent_yfitted)
curv_rad = ((1 + (2 * curv_fit[0] * y_eval * ym_perpix + curv_fit[1]) ** 2) ** 1.5) / np.absolute(
2 * curv_fit[0])
self.radius_of_curvature = curv_rad
return curv_rad
def calc_best_fit(self):
# average last 5 fits
last5poly = list(self.last_fits)[-5: ]
self.best_fit = np.average(last5poly, axis=0)
#self.best_fit = np.average( np.array([ i for i in last5poly]), axis=0)
self.bestx = self.best_fit[0] * self.recent_yfitted ** 2 + self.best_fit[1] * self.recent_yfitted + self.best_fit[2] | import numpy as np
from collections import deque
class LaneLine():
def __init__(self):
# was the line detected in the last iteration?
self.detected = False
# x values of the last n fits of the line
self.recent_xfitted = deque([])
self.recent_yfitted = None
# average x values of the fitted line over the last n iterations
self.bestx = None
# polynomial coefficients averaged over the last n iterations
self.best_fit = None
# polynomial coefficients for the most recent fit
self.last_fits = deque()
# radius of curvature of the line in some units
self.radius_of_curvature = None
# distance in meters of vehicle center from the line
self.line_base_pos = None
# difference in fit coefficients between last and new fits
self.diffs = np.array([0, 0, 0], dtype='float')
# x values for detected line pixels
self.allx = None
# y values for detected line pixels
self.ally = None
self.ym_perpix = None
self.xm_perpix = None
# curvature of the lane
self.curv = None
def clear(self):
self.allx = None
self.ally = None
self.recent_yfitted = None
def calc_curv_rad(self, ym_perpix, xm_perpix):
curv_fit = np.polyfit(np.array(self.recent_yfitted)*ym_perpix, np.array(self.recent_xfitted[-1])*xm_perpix, 2)
y_eval = np.min(self.recent_yfitted)
curv_rad = ((1+ (2 * curv_fit[0]*y_eval*ym_perpix + curv_fit[1])**2)/np.absolute(2*curv_fit[0]))
self.radius_of_curvature = curv_rad
return curv_rad
def calc_best_fit(self):
# average last 5 fits
last5poly = list(self.last_fits)[-5: ]
self.best_fit = np.average(last5poly, axis=0)
#self.best_fit = np.average( np.array([ i for i in last5poly]), axis=0)
self.bestx = self.best_fit[0] * self.recent_yfitted ** 2 + self.best_fit[1] * self.recent_yfitted + self.best_fit[2] | mit | Python |
0c47016840b9b9b0feaeab6076a48da0b41d3520 | Edit notifier.notify() | openstack/osprofiler,stackforge/osprofiler,openstack/osprofiler,stackforge/osprofiler,openstack/osprofiler,stackforge/osprofiler | osprofiler/notifier.py | osprofiler/notifier.py | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def noop_notifier(info):
"""Do nothing on notification."""
pass
# NOTE(boris-42): By default we are using noop notifier.
__notifier = noop_notifier
def notify(info):
"""Passes the profiling info to the notifier callable.
:param info: dictionary with profiling information
"""
__notifier(info)
def get_notifier():
"""Returns notifier callable."""
return __notifier
def set_notifier(notifier):
"""Service that are going to use profiler should set callable notifier.
Callable notifier is instance of callable object, that accept exactly
one argument "info". "info" - is dictionary of values that contains
profiling information.
"""
global __notifier
__notifier = notifier
| # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def noop_notifier(info):
"""Do nothing on notification."""
pass
# NOTE(boris-42): By default we are using noop notifier.
__notifier = noop_notifier
def notify(info):
global __notifier
__notifier(info)
def get_notifier():
"""Returns notifier callable."""
return __notifier
def set_notifier(notifier):
"""Service that are going to use profiler should set callable notifier.
Callable notifier is instance of callable object, that accept exactly
one argument "info". "info" - is dictionary of values that contains
profiling information.
"""
global __notifier
__notifier = notifier
| apache-2.0 | Python |
0aa22c1a24fb649a9186d36e44a5a7f1a53e66df | Add items() method to odict() as suggested by @streeto. Closes #49. | atpy/atpy | atpy/odict.py | atpy/odict.py | from __future__ import print_function, division
import numpy as np
class odict(object):
def __init__(self):
self.keys = []
self.values = []
def __setitem__(self, key, value):
if type(key) == int:
if key > len(self.keys) - 1:
raise Exception("Element %i does not exist" % key)
else:
self.values[key] = value
elif type(key) in [str, np.string_, unicode]:
if key in self.keys:
index = self.keys.index(key)
self.values[index] = value
else:
self.keys.append(key)
self.values.append(value)
else:
raise Exception("Wrong type for key: %s" % type(key))
def __getitem__(self, key):
if type(key) == int:
return self.values[key]
elif type(key) in [str, np.string_]:
index = self.keys.index(key)
return self.values[index]
else:
raise Exception("Wrong type for key: %s" % type(key))
def __repr__(self):
string = "{"
for i, key in enumerate(self.keys):
if i > 0:
string += ", "
string += "\n%s : %s" % (key, self.values[i])
string += "\n}"
return string
def __contains__(self, key):
return key in self.keys
def pop(self, key):
index = self.keys.index(key)
self.keys.pop(index)
self.values.pop(index)
def __len__(self):
return len(self.keys)
def rename(self, oldkey, newkey):
index = self.keys.index(oldkey)
self.keys[index] = newkey
return
def insert(self, position, key, value):
self.keys.insert(position, key)
self.values.insert(position, value)
return
def __iter__(self):
return iter(self.keys)
def items(self):
return zip(self.keys, self.values)
| from __future__ import print_function, division
import numpy as np
class odict(object):
def __init__(self):
self.keys = []
self.values = []
def __setitem__(self, key, value):
if type(key) == int:
if key > len(self.keys) - 1:
raise Exception("Element %i does not exist" % key)
else:
self.values[key] = value
elif type(key) in [str, np.string_, unicode]:
if key in self.keys:
index = self.keys.index(key)
self.values[index] = value
else:
self.keys.append(key)
self.values.append(value)
else:
raise Exception("Wrong type for key: %s" % type(key))
def __getitem__(self, key):
if type(key) == int:
return self.values[key]
elif type(key) in [str, np.string_]:
index = self.keys.index(key)
return self.values[index]
else:
raise Exception("Wrong type for key: %s" % type(key))
def __repr__(self):
string = "{"
for i, key in enumerate(self.keys):
if i > 0:
string += ", "
string += "\n%s : %s" % (key, self.values[i])
string += "\n}"
return string
def __contains__(self, key):
return key in self.keys
def pop(self, key):
index = self.keys.index(key)
self.keys.pop(index)
self.values.pop(index)
def __len__(self):
return len(self.keys)
def rename(self, oldkey, newkey):
index = self.keys.index(oldkey)
self.keys[index] = newkey
return
def insert(self, position, key, value):
self.keys.insert(position, key)
self.values.insert(position, value)
return
def __iter__(self):
return iter(self.keys)
| mit | Python |
7bfa9d24f7af811746bbb0336b5e75a592cff186 | Fix KeyError: 'version' due to 403 Forbidden error | jpdoria/aws_eis | aws_eis/lib/checks.py | aws_eis/lib/checks.py | import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
try:
es_version = get_version(endpoint)
except KeyError:
print('Status: {}'.format(r.status_code))
sys.exit(1)
else:
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
| import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = get_version(endpoint)
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
else:
print(json.loads(msg)['Message'])
print('Status: {}'.format(status_code))
sys.exit(1)
| mit | Python |
5e71f486a06165d499334e9616e5f29ec411940f | make all arguments optional for get_demos_path (#138) | mila-iqia/babyai | babyai/utils/demos.py | babyai/utils/demos.py | import os
import pickle
from .. import utils
def get_demos_path(demos=None, env=None, origin=None, valid=False):
valid_suff = '_valid' if valid else ''
demos_path = (demos + valid_suff
if demos
else env + "_" + origin + valid_suff) + '.pkl'
return os.path.join(utils.storage_dir(), 'demos', demos_path)
def load_demos(path, raise_not_found=True):
try:
return pickle.load(open(path, "rb"))
except FileNotFoundError:
if raise_not_found:
raise FileNotFoundError("No demos found at {}".format(path))
else:
return []
def save_demos(demos, path):
utils.create_folders_if_necessary(path)
pickle.dump(demos, open(path, "wb"))
def synthesize_demos(demos):
print('{} demonstrations saved'.format(len(demos)))
num_frames_per_episode = [len(demo[0]) for demo in demos]
if len(demos) > 0:
print('Demo num frames: {}'.format(num_frames_per_episode))
| import os
import pickle
from .. import utils
def get_demos_path(demos, env, origin, valid):
valid_suff = '_valid' if valid else ''
demos_path = (demos + valid_suff
if demos
else env + "_" + origin + valid_suff) + '.pkl'
return os.path.join(utils.storage_dir(), 'demos', demos_path)
def load_demos(path, raise_not_found=True):
try:
return pickle.load(open(path, "rb"))
except FileNotFoundError:
if raise_not_found:
raise FileNotFoundError("No demos found at {}".format(path))
else:
return []
def save_demos(demos, path):
utils.create_folders_if_necessary(path)
pickle.dump(demos, open(path, "wb"))
def synthesize_demos(demos):
print('{} demonstrations saved'.format(len(demos)))
num_frames_per_episode = [len(demo[0]) for demo in demos]
if len(demos) > 0:
print('Demo num frames: {}'.format(num_frames_per_episode))
| bsd-3-clause | Python |
cd006f8d3885005e867255e63819fc8a5c7430bf | Add getter for text widget | BrickText/BrickText | redactor/TextEditor.py | redactor/TextEditor.py | from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_widget(self):
return self.editor
def get_text_panel(self):
return self.text_panel
| from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_panel(self):
return self.text_panel
| mit | Python |
61108f794b53048aae3fc9840abf34a4150bb83a | Combine tests into one | CubicComet/exercism-python-solutions | leap/leap.py | leap/leap.py | def is_leap_year(y):
return (y % 4 == 0) and (y % 100 != 0 or y % 400 == 0)
| def is_leap_year(y):
if y % 400 == 0:
return True
elif y % 100 == 0:
return False
elif y % 4 == 0:
return True
else:
return False
| agpl-3.0 | Python |
7c02a79a5eb2dd6b9b49b2eefbdde1064a73de17 | Fix exception handling in TagField | RedPanal/redpanal,RedPanal/redpanal,RedPanal/redpanal | redpanal/core/forms.py | redpanal/core/forms.py | from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(str(e))
| from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(e.msg)
| agpl-3.0 | Python |
f2c0c1ab83dbc2344612e4cbf66d39acefa2fd4f | define encoding | jjangsangy/py-translate,jjangsangy/py-translate | translate/tests/test_translator.py | translate/tests/test_translator.py | # -*- coding: utf-8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
from nose.tools import *
from translate import translator
class TestTranslator(unittest.TestCase):
def typeassert(self):
instance = translator('en', 'en', str())
self.assertIsInstance(instance, dict)
self.assertIsInstance(instance['sentences'], list)
self.assertIsInstance(instance['sentences'][0], dict)
self.assertIsInstance(instance['sentences'][0]['trans'], str)
def test_love(self):
love = translator('en', 'zh-TW', 'I love you')
self.assertEqual(love['sentences'][0]['trans'], '我愛你')
if __name__ == '__main__':
unittest.main()
| try:
import unittest2 as unittest
except ImportError:
import unittest
from nose.tools import *
from translate import translator
class TestTranslator(unittest.TestCase):
def typeassert(self):
instance = translator('en', 'en', str())
self.assertIsInstance(instance, dict)
self.assertIsInstance(instance['sentences'], list)
self.assertIsInstance(instance['sentences'][0], dict)
self.assertIsInstance(instance['sentences'][0]['trans'], str)
def test_love(self):
love = translator('en', 'zh-TW', 'I love you')
self.assertEqual(love['sentences'][0]['trans'].decode('utf-8'), '我愛你')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
ae11251f7669e4ddde6f0491ff1fe0afdfd54a7a | Change 'language' to 'syntax', that is more precise terminology. | SublimeLinter/SublimeLinter-jsl | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
syntax = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
language = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
| mit | Python |
ce5d02adc02b421ee2a16e988fa9fa122fc872a8 | Update run_init_temp.py | cornell-zhang/datuner,cornell-zhang/datuner,cornell-zhang/datuner,cornell-zhang/datuner,cornell-zhang/datuner,cornell-zhang/datuner | scripts/tests/run_init_temp.py | scripts/tests/run_init_temp.py | import os
import sys
pwd =os.getcwd()
tpath = "vtr_flow_holder"
design = "diffeq1"
wrksp = "workspace_holder"
proc_num = 1
datpath = sys.path[0]
srcFile = datpath+"/run_DATuner.py"
rep_cmd = "sed -e \"s:TOOL_PATH:"+tpath+":g\" -e \"s:DESIGN_NAME:"+design+":g\" -e \"s:WORKSPACE:"+wrksp+":g\" -e \"s:DATuner_PATH:"+datpath+":g\" -e \"s:PROC_NUM:"+str(proc_num)+":g\" "+srcFile+" > "+datpath+"/datuner"+"_new.py"
os.system(rep_cmd)
run_cmd = "python "+ pwd + "/releases/Linux_x86_64/scripts/datuner_new.py -f vtr -t 20 2 0 0.2197 -p 2 -b 1000"
print "call: python ./datuner_new.py -f vtr -t 20 2 0 0.2197 -p 2 -b 1000"
os.system(run_cmd)
| import os
import sys
pwd =os.getcwd()
tpath = vtr_flow_holder
design = diffeq1
wrksp = workspace_holder
proc_num = 1
datpath = sys.path[0]
srcFile = datpath+"/run_DATuner.py"
rep_cmd = "sed -e \"s:TOOL_PATH:"+tpath+":g\" -e \"s:DESIGN_NAME:"+design+":g\" -e \"s:WORKSPACE:"+wrksp+":g\" -e \"s:DATuner_PATH:"+datpath+":g\" -e \"s:PROC_NUM:"+str(proc_num)+":g\" "+srcFile+" > "+datpath+"/datuner"+"_new.py"
os.system(rep_cmd)
run_cmd = "python "+ pwd + "/releases/Linux_x86_64/scripts/datuner_new.py -f vtr -t 20 2 0 0.2197 -p 2 -b 1000"
print "call: python ./datuner_new.py -f vtr -t 20 2 0 0.2197 -p 2 -b 1000"
os.system(run_cmd)
| bsd-3-clause | Python |
4a298d076546df563239861689dba46606d4f6f0 | bump version: 0.2.8 | BenevolentAI/guacamol | guacamol/__init__.py | guacamol/__init__.py | __version__ = "0.2.8"
| __version__ = "0.2.7"
| mit | Python |
be5772a6d64a0ceeb1c5748364ca10f376d2fe51 | Add customizable SHOP_CHARGE_CURRENCY setting | BCGamer/cartridge-stripe,BCGamer/cartridge-stripe | cartridge_stripe/__init__.py | cartridge_stripe/__init__.py | import logging
import cartridge
import stripe
from django.utils.translation import ugettext as _
from django.conf import settings
logger = logging.getLogger(__name__)
CURRENCY = getattr(settings, 'SHOP_CHARGE_CURRENCY', 'usd')
class CheckoutError(Exception):
"""
Should be raised in billing/shipping and payment handlers for
cases such as an invalid shipping address or an unsuccessful
payment.
"""
pass
def billship_handler(request, order_form):
from mezzanine.conf import settings
from cartridge.shop.utils import set_shipping, sign
settings.use_editable()
set_shipping(request, _("Flat rate shipping"),
settings.SHOP_DEFAULT_SHIPPING_VALUE)
def payment_handler(request, order_form, order):
tok = order_form.cleaned_data['stripe_token']
total = order.total
try:
charge = stripe.Charge.create(amount=int(total) * 100,
currency=CURRENCY,
card=tok,
description=order)
return charge.id
except stripe.CardError as e:
raise cartridge.shop.checkout.CheckoutError(e)
def order_handler(request, order_form, order):
pass
| import logging
import cartridge
import stripe
from django.utils.translation import ugettext as _
logger = logging.getLogger(__name__)
class CheckoutError(Exception):
"""
Should be raised in billing/shipping and payment handlers for
cases such as an invalid shipping address or an unsuccessful
payment.
"""
pass
def billship_handler(request, order_form):
from mezzanine.conf import settings
from cartridge.shop.utils import set_shipping, sign
settings.use_editable()
set_shipping(request, _("Flat rate shipping"),
settings.SHOP_DEFAULT_SHIPPING_VALUE)
def payment_handler(request, order_form, order):
tok = order_form.cleaned_data['stripe_token']
total = order.total
try:
charge = stripe.Charge.create(amount=int(total) * 100,
currency="usd",
card=tok,
description=order)
return charge.id
except stripe.CardError as e:
raise cartridge.shop.checkout.CheckoutError(e)
def order_handler(request, order_form, order):
pass
| mit | Python |
8b889c10abf043f6612409973458e8a0f0ed952e | Initialize the log counter to 0xFF | japesinator/Bad-Crypto,japesinator/Bad-Crypto | bonus_level.py | bonus_level.py | #!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
log_counter = 0xFF
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
| #!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
| mit | Python |
64c8fd3fa18dd6644a67cbd9e9aa5f20eb5e85a7 | Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries. | EmreAtes/spack,krafczyk/spack,matthiasdiener/spack,matthiasdiener/spack,tmerrick1/spack,krafczyk/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,lgarren/spack,iulian787/spack,EmreAtes/spack,mfherbst/spack,krafczyk/spack,matthiasdiener/spack,skosukhin/spack,skosukhin/spack,mfherbst/spack,EmreAtes/spack,TheTimmy/spack,lgarren/spack,mfherbst/spack,mfherbst/spack,matthiasdiener/spack,LLNL/spack,lgarren/spack,krafczyk/spack,EmreAtes/spack,tmerrick1/spack,tmerrick1/spack,TheTimmy/spack,lgarren/spack,LLNL/spack,iulian787/spack,krafczyk/spack,skosukhin/spack,LLNL/spack,TheTimmy/spack,TheTimmy/spack,tmerrick1/spack,LLNL/spack,skosukhin/spack,iulian787/spack,TheTimmy/spack,mfherbst/spack,lgarren/spack,skosukhin/spack,tmerrick1/spack,iulian787/spack,iulian787/spack | var/spack/packages/mrnet/package.py | var/spack/packages/mrnet/package.py | from spack import *
class Mrnet(Package):
"""The MRNet Multi-Cast Reduction Network."""
homepage = "http://paradyn.org/mrnet"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
variant('krelloptions', default=False, description="Also build the MRNet LW threadsafe libraries")
parallel = False
depends_on("boost")
def install(self, spec, prefix):
# Build the MRNet LW thread safe libraries when the krelloptions variant is present
if '+krelloptions' in spec:
configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe")
else:
configure("--prefix=%s" %prefix, "--enable-shared")
make()
make("install")
| from spack import *
class Mrnet(Package):
"""The MRNet Multi-Cast Reduction Network."""
homepage = "http://paradyn.org/mrnet"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
parallel = False
depends_on("boost")
def install(self, spec, prefix):
configure("--prefix=%s" %prefix, "--enable-shared")
make()
make("install")
| lgpl-2.1 | Python |
1842cac64940a43a43399e6e942c59265b03c1a7 | Add `viewable` field to Concept preserialize template | chop-dbhi/serrano,rv816/serrano_night,rv816/serrano_night,chop-dbhi/serrano | serrano/resources/templates.py | serrano/resources/templates.py | Category = {
'fields': [':pk', 'name', 'order', 'parent_id'],
'allow_missing': True,
}
BriefField = {
'fields': [':pk', 'name', 'description'],
'allow_missing': True,
}
Field = {
'fields': [
':pk', 'name', 'plural_name', 'description', 'keywords',
'app_name', 'model_name', 'field_name',
'modified', 'published', 'operators',
'simple_type', 'internal_type', 'data_modified', 'enumerable',
'searchable', 'unit', 'plural_unit', 'nullable'
],
'aliases': {
'plural_name': 'get_plural_name',
'plural_unit': 'get_plural_unit',
},
'allow_missing': True,
}
BriefConcept = {
'fields': [':pk', 'name', 'description'],
'allow_missing': True,
}
Concept = {
'fields': [
':pk', 'name', 'plural_name', 'description', 'keywords',
'category_id', 'order', 'modified', 'published',
'formatter_name', 'queryable', 'sortable', 'viewable'
],
'aliases': {
'plural_name': 'get_plural_name',
},
'allow_missing': True,
}
ConceptField = {
'fields': ['alt_name', 'alt_plural_name'],
'aliases': {
'alt_name': 'name',
'alt_plural_name': 'get_plural_name',
},
'allow_missing': True,
}
Context = {
'exclude': ['user', 'session_key'],
'allow_missing': True,
}
View = {
'exclude': ['user', 'session_key'],
'allow_missing': True,
}
User = {
'fields': [':pk', 'name', 'username', 'email'],
'aliases': {
'name': 'get_full_name',
}
}
BriefQuery = {
'exclude': ['session_key', 'context_json', 'view_json'],
'allow_missing': True,
'related': {
'user': User,
}
}
ForkedQuery = {
'fields': [':pk', 'parent'],
'allow_missing': True,
}
Query = {
'fields': [':pk', 'accessed', 'name', 'description', 'user',
'shared_users', 'context_json', 'view_json'],
'related': {
'user': User,
'shared_users': User,
}
}
Revision = {
'exclude': ['user', 'session_key', 'data'],
'allow_missing': True,
}
| Category = {
'fields': [':pk', 'name', 'order', 'parent_id'],
'allow_missing': True,
}
BriefField = {
'fields': [':pk', 'name', 'description'],
'allow_missing': True,
}
Field = {
'fields': [
':pk', 'name', 'plural_name', 'description', 'keywords',
'app_name', 'model_name', 'field_name',
'modified', 'published', 'operators',
'simple_type', 'internal_type', 'data_modified', 'enumerable',
'searchable', 'unit', 'plural_unit', 'nullable'
],
'aliases': {
'plural_name': 'get_plural_name',
'plural_unit': 'get_plural_unit',
},
'allow_missing': True,
}
BriefConcept = {
'fields': [':pk', 'name', 'description'],
'allow_missing': True,
}
Concept = {
'fields': [
':pk', 'name', 'plural_name', 'description', 'keywords',
'category_id', 'order', 'modified', 'published',
'formatter_name', 'queryable', 'sortable'
],
'aliases': {
'plural_name': 'get_plural_name',
},
'allow_missing': True,
}
ConceptField = {
'fields': ['alt_name', 'alt_plural_name'],
'aliases': {
'alt_name': 'name',
'alt_plural_name': 'get_plural_name',
},
'allow_missing': True,
}
Context = {
'exclude': ['user', 'session_key'],
'allow_missing': True,
}
View = {
'exclude': ['user', 'session_key'],
'allow_missing': True,
}
User = {
'fields': [':pk', 'name', 'username', 'email'],
'aliases': {
'name': 'get_full_name',
}
}
BriefQuery = {
'exclude': ['session_key', 'context_json', 'view_json'],
'allow_missing': True,
'related': {
'user': User,
}
}
ForkedQuery = {
'fields': [':pk', 'parent'],
'allow_missing': True,
}
Query = {
'fields': [':pk', 'accessed', 'name', 'description', 'user',
'shared_users', 'context_json', 'view_json'],
'related': {
'user': User,
'shared_users': User,
}
}
Revision = {
'exclude': ['user', 'session_key', 'data'],
'allow_missing': True,
}
| bsd-2-clause | Python |
bbd0c68669ffa0fd2d01ac8f86302673ed7b710e | Add default configuration for mongo | Widukind/dlstats,MichelJuillard/dlstats,mmalter/dlstats,mmalter/dlstats,mmalter/dlstats,MichelJuillard/dlstats,Widukind/dlstats,MichelJuillard/dlstats | dlstats/configuration.py | dlstats/configuration.py | import configobj
import validate
import os
def _get_filename():
"""Return the configuration file path."""
appname = 'dlstats'
if os.name == 'posix':
if "HOME" in os.environ:
if os.path.isfile(os.environ["HOME"]+'/.'+appname+'/main.conf'):
return os.environ["HOME"]+'/.'+appname+'main.conf'
if os.path.isfile('/etc/'+appname+'/main.conf'):
return '/etc/'+appname+'/main.conf'
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
return ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname+'/main.conf'))
elif os.name == 'nt':
return ("%s\Application Data\%s" % (os.environ["HOMEPATH"], appname+'/main.conf'))
else:
raise UnsupportedOSError(os.name)
_configspec = """
[General]
logging_directory = string()
socket_directory = string()
[MongoDB]
host = ip_addr()
port = integer()
max_pool_size = integer()
socketTimeoutMS = integer()
connectTimeoutMS = integer()
waitQueueTimeout = integer()
waitQueueMultiple = integer()
auto_start_request = boolean()
use_greenlets = boolean()
[ElasticSearch]
host = integer()
port = integer()
[Fetchers]
[[Eurostat]]
url_table_of_contents = string()"""
try:
configuration_filename = _get_filename()
configuration = configobj.ConfigObj(configuration_filename,
configspec=_configspec.split('\n'))
validator = validate.Validator()
configuration.validate(validator)
except FileNotFoundError:
configuration = configobj.ConfigObj()
configuration['General'] = {'logging_directory': os.environ["HOME"], 'socket_directory': os.environ["HOME"]}
configuration['Fetchers'] = {'Eurostat':{'url_table_of_contents':'http://ec.europa.eu/eurostat/estat-navtree-portlet-prod/BulkDownloadListing?sort=1&file=table_of_contents.xml'}}
configuration['MongoDB'] = {'host':'127.0.0.1', 'port':9200}
configuration = configuration.dict()
| import configobj
import validate
import os
def _get_filename():
"""Return the configuration file path."""
appname = 'dlstats'
if os.name == 'posix':
if "HOME" in os.environ:
if os.path.isfile(os.environ["HOME"]+'/.'+appname+'/main.conf'):
return os.environ["HOME"]+'/.'+appname+'main.conf'
if os.path.isfile('/etc/'+appname+'/main.conf'):
return '/etc/'+appname+'/main.conf'
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
return ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname+'/main.conf'))
elif os.name == 'nt':
return ("%s\Application Data\%s" % (os.environ["HOMEPATH"], appname+'/main.conf'))
else:
raise UnsupportedOSError(os.name)
_configspec = """
[General]
logging_directory = string()
socket_directory = string()
[MongoDB]
host = ip_addr()
port = integer()
max_pool_size = integer()
socketTimeoutMS = integer()
connectTimeoutMS = integer()
waitQueueTimeout = integer()
waitQueueMultiple = integer()
auto_start_request = boolean()
use_greenlets = boolean()
[ElasticSearch]
host = integer()
port = integer()
[Fetchers]
[[Eurostat]]
url_table_of_contents = string()"""
try:
configuration_filename = _get_filename()
configuration = configobj.ConfigObj(configuration_filename,
configspec=_configspec.split('\n'))
validator = validate.Validator()
configuration.validate(validator)
except FileNotFoundError:
configuration = configobj.ConfigObj()
configuration['General'] = {'logging_directory': os.environ["HOME"], 'socket_directory': os.environ["HOME"]}
configuration['Fetchers'] = {'Eurostat':{'url_table_of_contents':'http://ec.europa.eu/eurostat/estat-navtree-portlet-prod/BulkDownloadListing?sort=1&file=table_of_contents.xml'}}
configuration = configuration.dict()
| agpl-3.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.