commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
5e67c16d06786e5ed5e74e40a2c29131ec011748 | rename app to doc | qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | corehq/apps/domainsync/management/commands/copy_doc.py | corehq/apps/domainsync/management/commands/copy_doc.py | from couchdbkit import Database
from dimagi.utils.couch.database import get_db
from django.core.management.base import LabelCommand, CommandError
from corehq.apps.domainsync.config import DocumentTransform, save
class Command(LabelCommand):
help = "Copy any couch doc"
args = '<sourcedb> <doc_id> (<domain>)'
label = ""
def handle(self, *args, **options):
if len(args) < 2 or len(args) > 3:
raise CommandError('Usage is copy_doc %s' % self.args)
sourcedb = Database(args[0])
doc_id = args[1]
domain = args[2] if len(args) == 3 else None
doc_json = sourcedb.get(doc_id)
if domain:
doc_json['domain'] = domain
dt = DocumentTransform(doc_json, sourcedb)
save(dt, get_db())
| from couchdbkit import Database
from dimagi.utils.couch.database import get_db
from django.core.management.base import LabelCommand, CommandError
from corehq.apps.domainsync.config import DocumentTransform, save
class Command(LabelCommand):
help = "Copy any couch doc"
args = '<sourcedb> <doc_id> (<domain>)'
label = ""
def handle(self, *args, **options):
if len(args) < 2 or len(args) > 3:
raise CommandError('Usage is copy_doc %s' % self.args)
sourcedb = Database(args[0])
app_id = args[1]
domain = args[2] if len(args) == 3 else None
app_json = sourcedb.get(app_id)
if domain:
app_json['domain'] = domain
dt = DocumentTransform(app_json, sourcedb)
save(dt, get_db())
| bsd-3-clause | Python |
bd00e5ae48c81ee96d843675d76520f9e8bcab4c | Add COAP ping script | thejdeep/CoAPthon,Tanganelli/CoAPthon,thejdeep/CoAPthon,Cereal84/CoAPthon,mcfreis/CoAPthon,Gnomjolnir/CoAPthon,Gnomjolnir/CoAPthon,Tanganelli/CoAPthon | coapping.py | coapping.py | #!/usr/bin/env python2
# COAP ping implementation
# 0x4000 0001 <--> 0x7000 0001
# 0x4000 0002 <--> 0x7000 0002
# 0x4000 0003 <--> 0x7000 0003
import socket
import struct
import sys
from time import sleep, time
from optparse import OptionParser
# Parse Options
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-n", "--hostname",
action="append",
dest="host_name",
help="Define COAP host name")
parser.add_option("-p", "--port",
action="append",
dest="host_port",
default=5683,
help="Define COAP host port (default: 5683)")
parser.add_option("-l", "--loops",
type="int",
dest="ping_loops",
default=0,
help="Number of ping loops (default: 0 - forever)")
parser.add_option("-t", "--sleep",
type="float",
dest="sleep_sec",
default=1,
help="Time in seconds between two pings (default: 1 sec)")
(options, args) = parser.parse_args()
# COAP ping parameters setup
host = options.host_name[0]
port = options.host_port
sleep_sec = options.sleep_sec
ping_loops = options.ping_loops
ping_no = 1 # ping payload counter
ping_cnt = 0 # global ping cnt
print 'COAP ping script'
print 'COAP ping to: %s:%s...' % (host, port)
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
except socket.error:
print 'Error: Failed to create socket'
sys.exit()
while(1):
loop_time = time()
msg = '' #[0x40, 0x00, 0x00, 0x00]
msg += struct.pack("B", 0x40)
msg += struct.pack("B", 0x00)
msg += struct.pack("B", 0x00)
msg += struct.pack("B", ping_no)
try :
print '[0x%08X] Send ping:' % (ping_cnt), [hex(ord(c)) for c in msg]
#Set the whole string
s.sendto(msg, (host, port))
s.settimeout(2 + sleep_sec)
# receive data from client (data, addr)
d = s.recvfrom(4)
reply = d[0]
addr = d[1]
# We need to check if ping peyload counter is the same in reply
status = bytes(msg)[3] == bytes(reply)[3]
print '[0x%08X] Recv ping:' % (ping_cnt), [hex(ord(c)) for c in reply], 'ok' if status else 'fail'
except socket.error as e:
print 'Error: socket.error: ', str(e)
#print 'Error Code : ' + str(msg[0]) + ' Message ' + msg[1]
sleep(3) # Waiting to recover ;)
except socket.timeout:
print("Error: closing socket")
s.close()
if ping_no >= 0xFF:
ping_no = 1
else:
ping_no += 1
sleep(sleep_sec - (time() - loop_time))
print 'In %.2f sec' % (time() - loop_time)
if ping_loops > 0:
if ping_loops == 1:
break
ping_loops -= 1
ping_cnt += 1
| mit | Python | |
e56a0ca2d788bc3b865f6d18ad42e5feadb47566 | 添加新的数据格式,增加uuid实现 | SilverBlogTeam/SilverBlog,SilverBlogTeam/SilverBlog | upgrade/upgrade_from_3.py | upgrade/upgrade_from_3.py | import hashlib
import json
import shutil
import uuid
from common import file
def add_id(list_item):
list_item["uuid"] = str(uuid.uuid4())
return list_item
def main():
shutil.copyfile("./config/page.json", "./config/page.json.bak")
page_list = json.loads(file.read_file("./config/page.json"))
page_list = list(map(add_id, page_list))
file.write_file("./config/page.json", file.json_format_dump(page_list))
system_config = json.loads(file.read_file("./config/system.json"))
control_config = dict()
try:
old_password_hash = json.loads(system_config["API_Password"])["hash_password"]
except (ValueError, KeyError, TypeError):
return
control_config["password"] = hashlib.sha512(str(old_password_hash + "SiLvErBlOg").encode('utf-8')).hexdigest()
del system_config["API_Password"]
shutil.copyfile("./config/system.json", "./config/system.json.bak")
file.write_file("./config/system.json", file.json_format_dump(system_config))
shutil.copyfile("./config/control.json", "./config/control.json.bak")
file.write_file("./config/control.json", file.json_format_dump(control_config))
| bsd-3-clause | Python | |
91b8239d858d60bbcd70e17870648a87b2d6da02 | add wip installer | codyopel/dotfiles,codyopel/dotfiles,codyopel/dotfiles | local/bin/dotfiles.py | local/bin/dotfiles.py | #!/usr/bin/env python3
import os
import sys
def exec_hook(hook):
with open(hook) as f:
exec(compile(f.read(), config_file, 'exec'), globals(), locals())
#def generate_hook(dotfile):
def install_hook(dotfile, dotfilesdir):
# Fix relpath output
if dotfile.startswith('./'):
dotfile = os.path.basename(dotfile)
installloc = os.path.join(os.getenv('HOME'), '.' + dotfile)
if not os.path.exists(installloc):
print('Installing {} to {}'.format(dotfile, installloc))
else:
print('Updating {}'.format(dotfile))
if os.path.exists(installloc):
os.remove(installloc)
installdir = os.path.dirname(installloc)
if not os.path.exists(installdir):
os.makedirs(installdir)
os.symlink(os.path.join(dotfilesdir, dotfile), installloc)
#print('Installing {} to {}'.format(os.path.join(dotfilesdir, dotfile), installloc))
def main():
# TODO: Parse commandline/read config files
dotfilesdir = sys.argv[1]
if not os.path.exists(dotfilesdir):
raise "Invalid directory"
# Build ignore list
dotfilesignorefile = os.path.join(dotfilesdir, '.kratosignore')
if os.path.exists(dotfilesignorefile):
with open(dotfilesignorefile) as f:
ignore_list = f.readlines()
ignore_list = [x.strip() for x in ignore_list]
hookexts = (
'install-pre',
'install',
'install-post',
'generate-pre',
'generate-post',
'uninstall-pre',
'uninstall',
'uninstall-post'
)
# Find dotfiles
dotfiles = [ ]
for root, dirs, files in os.walk(dotfilesdir):
# Exclude hidden files and directories
dirs[:] = [d for d in dirs if not d.startswith('.')]
files[:] = [f for f in files if not f.startswith('.')]
# Exclude hooks
files[:] = [f for f in files if not f.endswith(hookexts)]
# Ignore list
files[:] = [f for f in files if f not in ignore_list]
for file in files:
dotfiles.append(os.path.join(os.path.relpath(root, dotfilesdir), file))
# Install dotfiles
for dotfile in dotfiles:
# PRE-Install
if os.path.exists(dotfile + '.install-pre'):
print(os.path.exists(dotfile + '.install-pre'))
# Install
if dotfile.endswith('.generate'):
print(dotfile)
else:
if os.path.exists(dotfile + '.install'):
print(os.path.exists(dotfile + '.install'))
else:
install_hook(dotfile, dotfilesdir)
# POST-Install
if os.path.exists(dotfile + '.install-post'):
print(os.path.exists(dotfile + '.install-post'))
if __name__ == "__main__":
main()
| bsd-3-clause | Python | |
83041a8b132ce61910fdd0b6d9c24d020e857a04 | add test for compute_disparity_map timeout | carlodef/s2p,carlodef/s2p,mnhrdt/s2p,mnhrdt/s2p | tests/block_matching_test.py | tests/block_matching_test.py | import os
import pytest
import s2p
from tests_utils import data_path
def test_compute_disparity_map_timeout(timeout=1):
"""
Run a long call to compute_disparity_map to check that the timeout kills it.
"""
img = data_path(os.path.join("input_pair", "img_01.tif"))
disp = data_path(os.path.join("testoutput", "d.tif"))
mask = data_path(os.path.join("testoutput", "m.tif"))
with pytest.raises(s2p.common.RunFailure):
s2p.block_matching.compute_disparity_map(img, img, disp, mask,
"mgm_multi", -100, 100,
timeout)
| agpl-3.0 | Python | |
e8b09ed22bfe19c355b3dc315f0e831ac43f0c0d | Update code.py | mapto/sprks,mapto/sprks,mapto/sprks,mapto/sprks | code.py | code.py | import web
import json
urls = (
'/', 'index'
)
class index:
def GET(self):
db = web.database(dbn='mysql', user='user', pw='password', db='test')
table = db.select('pw_policy')
return json.dumps(table[0]) + " hello world2"
if __name__ == "__main__":
app = web.application(urls, globals())
app.run()
| mit | Python | |
e8bcdebaa9af0affa152a91ad489447d1cc4ba8f | Create main.py | AgneethMazumdar/powerpoint-maker | main.py | main.py | # Released under MIT License
# Created By Agneeth Mazumdar
from PIL import Image
from pptx import Presentation
from pptx.util import Inches
import urllib
import os
import csv
prs = Presentation()
def read_csv():
names = []
urls = []
with open('your_csv_file_here.csv', 'rb') as names_images_data:
csv_names_images_data = csv.reader(names_images_data)
for row in csv_names_images_data:
names.append(row[0])
urls.append(row[1])
names_plus_images = dict(zip(names, urls))
return names, urls, names_plus_images
def add_jpg_to_image_names(names_list):
extended_names = names_list
for index, val in enumerate(extended_names):
if ' ' in extended_names[index]:
extended_names[index] = extended_names[index].replace(' ', '');
extended_names[index] = extended_names[index] + '.jpg'
else:
extended_names[index] = extended_names[index] + '.jpg'
return extended_names
def download_images(names_list, urls_list):
names = names_list
urls = urls_list
for index, val in enumerate(names):
urllib.urlretrieve(urls[index], names[index])
im = Image.open(names[index])
out = im.thumbnail((400, 380), Image.ANTIALIAS)
im.save(names[index], "JPEG")
return names
def original_names(dictionary):
names_plus_images = dictionary
preserved_names = []
for index, key in enumerate(names_plus_images):
preserved_names.append(key)
return preserved_names
def make_slides(names_list, original_names, counter):
names = names_list
old_names = original_names
index = counter
title_only_slide_layout = prs.slide_layouts[5]
slide = prs.slides.add_slide(title_only_slide_layout)
shapes = slide.shapes
shapes.title.text = old_names[index]
image_name = names[index]
left = Inches(2.5)
top = Inches(1.7)
pic = slide.shapes.add_picture(image_name, left, top)
prs.save('insert_desired_name_here.pptx')
def main():
names, urls, names_plus_images = read_csv()
new_names = add_jpg_to_image_names(names)
download_images(new_names, urls)
old_names = original_names(names_plus_images)
retrieved_names, urls, names_plus_images = read_csv()
for index, val in enumerate(names):
make_slides(names, retrieved_names, index)
if __name__ == "__main__":
main()
| mit | Python | |
e2955477f8d3dde879ecdd8f8f75f438a8905661 | Add dots.py | joseph346/dots,jaredmichaelsmith/dots-1,nullx002/dots | dots.py | dots.py | import sys
RIGHT = 0
LEFT = 0
INC = 2
DEC = 3
LOOP_START = 4
LOOP_END = 5
GETC = 6
PUTC = 7
def compile_file(source_file):
program_n = 0
try:
with open(source_file, "r") as f:
eof = False
while not eof:
c = f.read(1)
if len(c) == 0:
eof = True
elif c == '.':
program_n += 1
except IOError:
print "Error: could not open file."
sys.exit(1)
binary = bin(program_n)[3:]
if len(binary) % 3 != 0:
print "Error: either you have too many or two few dots."
sys.exit(1)
program = []
for i in xrange(0, len(binary), 3):
b = binary[i:i+3]
inst = int(b, 2)
program.append(inst)
return program
def match_loop(text, start):
if text[start] == LOOP_START:
stop = len(text)
dir = 1
search = LOOP_END
elif text[start] == LOOP_END:
stop = -1
dir = -1
search = LOOP_START
else:
raise ValueError('Incorrect number of dots.')
return -1
depth = 0
for i in range(start, stop, dir):
c = text[i]
if c == LOOP_START:
depth += 1
elif c == LOOP_END:
depth -= 1
if c == search and depth == 0:
return i
return -1
def run(program):
pc = 0
mem = [0 for i in xrange(30000)]
pointer = 0
while pc < len(program):
c = program[pc]
old_pc = pc
if c == RIGHT:
pointer += 1
pointer %= len(mem)
elif c == LEFT:
pointer -= 1
pointer %= len(mem)
elif c == INC:
mem[pointer] += 1
mem[pointer] %= 256
elif c == DEC:
mem[pointer] -= 1
mem[pointer] %= 256
elif c == PUTC:
sys.stdout.write(chr(mem[pointer]))
elif c == GETC:
mem[pointer] = ord(sys.stdin.read(1))
elif c == LOOP_START:
if mem[pointer] == 0:
pc = match_loop(program, pc)
if pc < 0:
print "Error: either you have too many or two few dots."
break
elif c == LOOP_END:
if mem[pointer] != 0:
pc = match_loop(program, pc)
if pc < 0:
print "Error: either you have too many or two few dots."
break
else:
pass
pc += 1
def main():
if len(sys.argv) <= 1:
print "usage: %s sourcefile" % sys.argv[0]
sys.exit(1)
source_file = sys.argv[1]
program = compile_file(source_file)
run(program)
if __name__ == "__main__":
main()
| unlicense | Python | |
72f43fc9c8aecc9fd8f240cfc37500cad4bc7858 | Test for math.assert_close() | tum-pbs/PhiFlow,tum-pbs/PhiFlow | tests/commit/math/test__functions.py | tests/commit/math/test__functions.py | from unittest import TestCase
from phi import math
def assert_not_close(*tensors, rel_tolerance, abs_tolerance):
try:
math.assert_close(*tensors, rel_tolerance, abs_tolerance)
raise BaseException(AssertionError('1 != 0'))
except AssertionError:
pass
class TestMathFunctions(TestCase):
def test_assert_close(self):
math.assert_close(math.zeros(a=10), math.zeros(a=10), math.zeros(b=10), rel_tolerance=0, abs_tolerance=0)
assert_not_close(math.zeros(a=10), math.ones(a=10), rel_tolerance=0, abs_tolerance=0)
for scale in (1, 0.1, 10):
math.assert_close(math.zeros(a=10), math.ones(a=10) * scale, rel_tolerance=0, abs_tolerance=scale)
math.assert_close(math.zeros(a=10), math.ones(a=10) * scale, rel_tolerance=1, abs_tolerance=0)
assert_not_close(math.zeros(a=10), math.ones(a=10) * scale, rel_tolerance=0.9, abs_tolerance=0)
assert_not_close(math.zeros(a=10), math.ones(a=10) * scale, rel_tolerance=0, abs_tolerance=0.9 * scale)
math.set_precision(64)
assert_not_close(math.zeros(a=10), math.ones(a=10) * 1e-100, rel_tolerance=0, abs_tolerance=0)
math.assert_close(math.zeros(a=10), math.ones(a=10) * 1e-100, rel_tolerance=0, abs_tolerance=1e-15)
| mit | Python | |
27f187d3cc5725b6ed912e15ecafb38a44cc4992 | Add unit tests for new service util | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/unit/utils/test_win_service.py | tests/unit/utils/test_win_service.py | # Import Python Libs
import os
# Import Salt Libs
import salt.utils.platform
# Import Salt Testing Libs
from tests.support.mock import patch, MagicMock
from tests.support.unit import TestCase, skipIf
try:
import salt.utils.win_service as win_service
from salt.exceptions import CommandExecutionError
except Exception as exc: # pylint: disable=broad-except
win_service = exc
# Import 3rd Party Libs
try:
import pywintypes
import win32service
WINAPI = True
except ImportError:
WINAPI = False
class WinServiceImportTestCase(TestCase):
def test_import(self):
"""
Simply importing should not raise an error, especially on Linux
"""
if isinstance(win_service, Exception):
raise Exception(
"Importing win_system caused traceback: {0}".format(win_service)
)
@skipIf(not salt.utils.platform.is_windows(), "Only test on Windows systems")
@skipIf(not WINAPI, "Missing PyWin32 libraries")
class WinServiceTestCase(TestCase):
"""
Test cases for salt.utils.win_service
"""
def test_info(self):
"""
Test service.info
"""
# Get info about the spooler service
info = win_service.info("spooler")
# Make sure it returns these fields
field_names = [
"BinaryPath",
"ControlsAccepted",
"Dependencies",
"Description",
"DisplayName",
"ErrorControl",
"LoadOrderGroup",
"ServiceAccount",
"ServiceType",
"StartType",
"StartTypeDelayed",
"Status",
"Status_CheckPoint",
"Status_ExitCode",
"Status_ServiceCode",
"Status_WaitHint",
"TagID",
"sid"
]
for field_name in field_names:
self.assertIn(field_name, info)
# Make sure it returns a valid Display Name
self.assertEqual(info["DisplayName"], "Print Spooler")
| apache-2.0 | Python | |
60b9041f76a88dddaf627458d98a357974a6a302 | Add __init__.py | xuwenyihust/Shutterfly-Customer-Lifetime-Value,xuwenyihust/Shutterfly-Customer-Lifetime-Value | __init__.py | __init__.py | mit | Python | ||
482218b20ea6281c49be7edd66370c778b301c7f | Create __init__.py | majikpig/ubtech | __init__.py | __init__.py | mit | Python | ||
fa5ffd2f2f51607703912209b5876cb8f951df88 | Add simple testing driver | dramborleg/text-poker | test.py | test.py | import parser
p = parser.Parser()
input = ['-c mango', '--create mango', 'c mango',
'--create kiwi c guava lemon', '--create']
for i in input:
ret = p.parse(i)
print('--------')
print(i)
print(ret)
print('--------')
| bsd-2-clause | Python | |
775edf9fec8bbe32dceef3efc1e1cffc642ae61c | Create __init__.py | piomonti/pySINGLE | __init__.py | __init__.py | __all__ = ["SINGLE"]
import SINGLE
#from SINGLE import SINGLE
#from choose_h import *
#from fitSINGLE import *
| mit | Python | |
9065b9f5baedfc1895c612a7995f15878144d3e7 | Create test.py | crap0101/fup,crap0101/fup,crap0101/fup | test.py | test.py | #coding: utf-8
import operator
import re
import sys
import time
import urlparse
import fhp.api.five_hundred_px as _fh
import fhp.helpers.authentication as _a
from fhp.models.user import User
_TREG = re.compile('^(\d+)-(\d+)-(\d+).*?(\d+):(\d+):(\d+).*')
_URL = 'http://500px.com/'
_HTML_BEGIN = '''<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<HTML>
<HEAD>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<TITLE>following last updates</TITLE>
</HEAD>
<BODY>
'''
_HTML_END = '''</BODY>
</HTML>'''
FMT_TEXT = 'txt'
FMT_HTML = 'html'
def get_time (s):
return time.strptime(' '.join(_TREG.match(s).groups()),
'%Y %m %d %H %M %S')
def get_last_upload_photo (user):
return sorted(user.photos,
reverse=True, key=lambda p: get_time(p.created_at))[0]
def get_sorted_data (user):
return sorted([(u, u.photos[0].created_at) for u in user.friends],
reverse=True, key=operator.itemgetter(1))
def _get_sorted_data (user): # like get_sorted_data but slower :-D
return sorted([(u, get_last_upload_photo(u).created_at) for u in user.friends],
reverse=True, key=operator.itemgetter(1))
def format_info_html (data):
yield _HTML_BEGIN
for user, date in data:
yield '<a href="%s">%s</a> (%s)<p>' % (
urlparse.urljoin(_URL, user.username), user.fullname.strip(), date)
#time.strftime('%Y-%m-%d %H:%M:%S', get_time(date))) # last for debug only
yield _HTML_END
def format_info_txt (data):
for user, date in data:
yield '%s (%s, %s)' % (
urlparse.urljoin(_URL, user.username), user.fullname.strip(), date)
#time.strftime('%Y-%m-%d %H:%M:%S', get_time(date))) # last for debug only
def print_info(data, fmt=FMT_HTML):
if fmt == FMT_HTML:
func = format_info_html
elif fmt == FMT_TEXT:
func = format_info_txt
else:
raise ValueError("unknown format <%s>" % fmt)
for out in func(data):
try:
print out
except UnicodeEncodeError:
print out.encode('utf-8', 'replace')
if __name__ == '__main__':
_f = _fh.FiveHundredPx(_a.get_consumer_key(),
_a.get_consumer_secret(),
_a.get_verify_url())
username = sys.argv[1].decode('utf-8')
me = User(username=username)
sorted_uploads = get_sorted_data(me)
print_info(sorted_uploads)
##############################################
sys.exit()
if 0:
__t = []
#class
for i in range(10):
__t.append(time.localtime())
sleep(1)
me.friends = 8
"""
print type(me), dir(me), me.id
print "------"
print type(f), dir(f)
for i in me.friends:
print i.fullname, i.username, i.id, i.domain, dir(i)
break
if 0:
for p in sorted((x.created_at for x in i.photos), reverse=True, key=lambda s:get_time(s)):
print p
break
break
for p in i.photos:
print p, p.created_at, p.id
break
print list(f.get_user_friends(username))
print type(me), len(me)
print dir(f)
"""
| mit | Python | |
a0705902dcf335cadeee717fbbdcbb247bc14645 | Move wsgi.py to project directory | mhotwagner/backstage,mhotwagner/backstage,mhotwagner/backstage | wsgi.py | wsgi.py | """
WSGI config for backstage project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backstage.settings")
application = get_wsgi_application()
| mit | Python | |
b9283871a8be5ee0f289cb6181023a8366f530bd | modify response example | SpectoLabs/hoverfly,tjcunliffe/hoverfly,SpectoLabs/hoverfly,tjcunliffe/hoverfly,tjcunliffe/hoverfly,tjcunliffe/hoverfly,tjcunliffe/hoverfly,tjcunliffe/hoverfly | examples/middleware/modify_response/modify_response.py | examples/middleware/modify_response/modify_response.py | #!/usr/bin/env python
import sys
import logging
import json
logging.basicConfig(filename='middleware.log', level=logging.DEBUG)
logging.debug('Middleware is called')
def main():
data = sys.stdin.readlines()
# this is a json string in one line so we are interested in that one line
payload = data[0]
logging.debug(payload)
payload_dict = json.loads(payload)
payload_dict['response']['status'] = 201
payload_dict['response']['body'] = "body was replaced by middleware"
# returning new payload
print(json.dumps(payload_dict))
if __name__ == "__main__":
main()
| apache-2.0 | Python | |
4e18da98f3398e1cc3b4c8f76bc8f81529baff3c | Add doc modification script | Flat/serenity,Roughsketch/serenity,eLunate/serenity,zeyla/serenity,acdenisSK/serenity,Lakelezz/serenity,zeyla/serenity.rs | docs.py | docs.py | import glob
for filename in glob.glob("target/doc/serenity/**/*.html"):
print('Parsing {}'.format(filename))
with open(filename) as f:
content = f.read()
new_content = content.replace('<nav class="sidebar">\n', '<nav class="sidebar"><img src="https://docs.austinhellyer.me/serenity.rs/docs_header.png">\n', 1)
if new_content != content:
with open(filename, 'w') as f:
f.write(new_content)
| isc | Python | |
94cfd557b604947a1e2ce23fc67bf82c508439ff | change request.base_payout from float to numeric (decimal) | eskwire/evesrp,paxswill/evesrp,paxswill/evesrp,eskwire/evesrp,paxswill/evesrp,eskwire/evesrp,eskwire/evesrp | evesrp/migrate/versions/4198a248c8a_.py | evesrp/migrate/versions/4198a248c8a_.py | """Move from using floats for ISK to numeric types.
Revision ID: 4198a248c8a
Revises: 45024170cf6
Create Date: 2014-06-18 14:34:25.967159
"""
# revision identifiers, used by Alembic.
revision = '4198a248c8a'
down_revision = '45024170cf6'
from decimal import Decimal
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import update, select, table, column
def upgrade():
op.add_column('request',
sa.Column('numeric_base_payout', sa.Numeric(precision=15, scale=2),
default=0.0)
)
request = table('request',
column('id', sa.Integer),
column('base_payout', sa.Float),
column('numeric_base_payout', sa.Numeric(precision=15, scale=2)),
)
conn = op.get_bind()
requests_sel = select([request.c.id, request.c.base_payout])
requests = conn.execute(requests_sel)
for request_id, float_payout in requests:
decimal_payout = Decimal.from_float(float_payout)
decimal_payout *= 1000000
update_stmt = update(request)\
.where(request.c.id == request_id)\
.values({
'numeric_base_payout': decimal_payout,
})
conn.execute(update_stmt)
requests.close()
op.drop_column('request', 'base_payout')
op.alter_column('request',
column_name='numeric_base_payout',
new_column_name='base_payout',
existing_type=sa.Numeric(precision=15, scale=2),
existing_server_default=0.0)
def downgrade():
op.add_column('request',
sa.Column('float_base_payout', sa.Float, default=0.0)
)
request = table('request',
column('id', sa.Integer),
column('base_payout', sa.Numeric(precision=15, scale=2)),
column('float_base_payout', sa.Float),
)
conn = op.get_bind()
requests_sel = select([request.c.id, request.c.base_payout])
requests = conn.execute(requests_sel)
for request_id, decimal_payout in requests:
decimal_payout = decimal_payout / 1000000
float_payout = float(decimal_payout)
update_stmt = update(request)\
.where(request.c.id == request_id)\
.values({
'float_base_payout': float_payout,
})
conn.execute(update_stmt)
requests.close()
op.drop_column('request', 'base_payout')
op.alter_column('request',
column_name='numeric_base_payout',
new_column_name='base_payout',
existing_type=sa.Float,
existing_server_default=0.0)
| bsd-2-clause | Python | |
314b195160d539101dd3c3fa53e6f870fd2ee083 | add beautiful-triplets | EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,zeyuanxy/hacker-rank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank | contest/world-codesprint-april/beautiful-triplets/beautiful-triplets.py | contest/world-codesprint-april/beautiful-triplets/beautiful-triplets.py | # -*- coding: utf-8 -*-
# @Author: Zeyuan Shang
# @Date: 2016-04-30 19:35:03
# @Last Modified by: Zeyuan Shang
# @Last Modified time: 2016-04-30 19:38:26
if __name__ == "__main__":
n, d = map(int, raw_input().split())
a = map(int, raw_input().split())
ele = set()
for x in a:
ele.add(x)
ans = 0
for x in a:
x1, x2 = x + d, x + 2 * d
if x1 in ele and x2 in ele:
ans += 1
print ans | mit | Python | |
69f323bba974ea73963c8da63a3c3b8326fffc6e | Create RevLinkedList_002.py | cc13ny/algo,cc13ny/algo,cc13ny/Allin,Chasego/codi,Chasego/cod,Chasego/cod,cc13ny/Allin,cc13ny/algo,cc13ny/Allin,Chasego/codi,Chasego/codirit,cc13ny/Allin,Chasego/cod,Chasego/codirit,Chasego/codi,Chasego/codi,Chasego/codirit,Chasego/cod,cc13ny/algo,Chasego/codi,Chasego/codirit,Chasego/cod,cc13ny/algo,cc13ny/Allin,Chasego/codirit | leetcode/206-Reverse-Linked-List/RevLinkedList_002.py | leetcode/206-Reverse-Linked-List/RevLinkedList_002.py | # Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def reverseList(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
dummy = ListNode(0)
dummy.next = head
while head and head.next:
p = head.next
head.next = p.next
p.next = dummy.next
dummy.next = p
return dummy.next
| mit | Python | |
ced07a1f5d0d5c3c460bda1de29381ba7aff0c87 | add knn | starsriver/ML | 4/KNN.py | 4/KNN.py | # -*- coding: utf-8 -*-
import csv
import random
import math
import operator
import os
def loadDataSet(fileName, split, trainingSet=[], testSet=[]):
with open(fileName, "rb") as csvFile:
lines = csv.reader(csvFile)
dataSet = list(lines)
for x in range(len(dataSet) - 1):
for y in range(4):
dataSet[x][y] = float(dataSet[x][y])
if random.random() < split:
trainingSet.append(dataSet[x])
else:
testSet.append(dataSet[x])
def euclideanDistance(instance1, instance2, length):
distance = 0
for x in range(length):
distance += pow(instance1[x] - instance2[x], 2)
return math.sqrt(distance)
def getNeighbors(trainingSet, testInstance, k):
distances = []
length = len(testInstance) - 1
for x in range(len(trainingSet)):
distance = euclideanDistance(testInstance, trainingSet[x], length)
distances.append((trainingSet[x], distance))
distances.sort(key=operator.itemgetter(1))
neighbors = []
for x in range(k):
neighbors.append(distances[x][0])
return neighbors
def getResponse(neighbors):
classVotes = {}
for x in range(len(neighbors)):
response = neighbors[x][-1]
if response in classVotes:
classVotes[response] += 1
else:
classVotes[response] = 1
sortedVotes = sorted(
classVotes.iteritems(), key=operator.itemgetter(1), reverse=True)
return sortedVotes[0][0]
def getAccuracy(testSet, predictions):
correct = 0
for x in range(len(testSet)):
if testSet[x][-1] == predictions[x]:
correct += 1
return (correct/float(len(testSet))) * 100.0
def main():
trainingSet = []
testSet = []
split = 0.67
loadDataSet(r"../DataSet/iris.data.txt", split, trainingSet, testSet)
print("Train Set: " + repr(len(trainingSet)))
print("Test Set: " + repr(len(testSet)))
predictions = []
k = 3
for x in range(len(testSet)):
neighbors = getNeighbors(trainingSet, testSet[x], k)
result = getResponse(neighbors)
predictions.append(result)
print(
"> predicted= " + repr(result) + ", actual= " +
repr(testSet[x][-1])
)
accuracy = getAccuracy(testSet, predictions)
print("Accuracy: " + repr(accuracy) + " %")
if __name__ == '__main__':
print(os.getcwd())
main()
| bsd-2-clause | Python | |
f1268d95f224b0bb3df00f3a76c92074f0db037a | Update utils.py | rishubhjain/commons,r0h4n/commons,Tendrl/commons | tendrl/commons/central_store/utils.py | tendrl/commons/central_store/utils.py | from tendrl.commons.etcdobj import fields
def to_etcdobj(cls_etcd, obj):
for attr, value in vars(obj).iteritems():
if value is None:
continue
if attr.startswith("_"):
continue
if attr in ["attrs", "enabled", "obj_list", "obj_value", "atoms",
"flows", "value", "list"]:
continue
setattr(cls_etcd, attr, to_etcd_field(attr, value))
return cls_etcd
def to_etcd_field(name, value):
type_to_etcd_fields_map = {dict: fields.DictField,
str: fields.StrField,
int: fields.IntField,
bool: fields.StrField,
unicode: fields.StrField}
return type_to_etcd_fields_map[type(value)](name, value)
| from tendrl.commons.etcdobj import fields
def to_etcdobj(cls_etcd, obj):
for attr, value in vars(obj).iteritems():
if attr.startswith("_"):
continue
if attr in ["attrs", "enabled", "obj_list", "obj_value", "atoms",
"flows", "value", "list"]:
continue
setattr(cls_etcd, attr, to_etcd_field(attr, value))
return cls_etcd
def to_etcd_field(name, value):
type_to_etcd_fields_map = {dict: fields.DictField,
str: fields.StrField,
int: fields.IntField,
bool: fields.StrField,
unicode: fields.StrField}
return type_to_etcd_fields_map[type(value)](name, value)
| lgpl-2.1 | Python |
71bbd57214cd8be6ac8583884eb1fc2e5b270eb8 | Add conf file for Emmaus Ideasbox in France | ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube | ideascube/conf/idb_fra_emmaus.py | ideascube/conf/idb_fra_emmaus.py | # -*- coding: utf-8 -*-
"""Ideaxbox for Emmaus, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Emmaus"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
'languages': ['fr', 'ar', 'fa']
},
{
'id': 'gutenberg',
'lang': 'fr',
},
{
'id': 'khanacademy',
},
{
'id': 'cest-pas-sorcier',
},
{
'id': 'wikisource',
'languages': ['fr']
},
{
'id': 'wikibooks',
'languages': ['fr']
},
{
'id': 'wikivoyage',
'languages': ['fr']
},
{
'id': 'wiktionary',
'languages': ['fr']
},
{
'id': 'wikiversity',
'languages': ['fr']
},
{
'id': 'ted',
'sessions': [
('tedxgeneva2014.fr', 'Geneva 2014'),
('tedxlausanne2012.fr', 'Lausanne 2012'),
('tedxlausanne2013.fr', 'Lausanne 2013'),
('tedxlausanne2014.fr', 'Lausanne 2014'),
]
},
]
| agpl-3.0 | Python | |
f7f76bc7eb217e4c7b81e58afec41726f0dd2848 | Add another dip example | matthewearl/strippy | examples/dip2.py | examples/dip2.py | #!/usr/bin/env python3
# Copyright (c) 2015 Matthew Earl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Trivial example, placing a 8-pin DIP package.
"""
import component
import cli
ic = component.DualInlinePackage("IC1", 4, row_spacing=2)
r1 = component.Resistor("R1", 1)
r2 = component.Resistor("R2", 1)
components = (ic, r1, r2)
board = component.StripBoard((5, 2))
nets = (
(ic.terminals[0], r1.terminals[0]),
(ic.terminals[1], r1.terminals[1]),
(ic.terminals[2], r2.terminals[0]),
(ic.terminals[3], r2.terminals[1]),
)
cli.main(board, components, nets)
| mit | Python | |
60a6b98d0bc3f8e55414dd1f6461ad863bcfd12a | Create matching_ending_items.py | JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking | hacker_rank/regex/repetitions/matching_ending_items.py | hacker_rank/regex/repetitions/matching_ending_items.py | Regex_Pattern = r'^[a-zA-Z]*[s]$' # Do not delete 'r'.
| mit | Python | |
4822b3c55478bd76b66d5afbfabf0c9ec51a9c8e | Add an example ('move.py'). | jeremiedecock/pyax12,jeremiedecock/pyax12 | examples/move.py | examples/move.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pydynamixel.packet as pk
import pydynamixel.connection
import pydynamixel.instruction_packet as ip
import time
def main():
serial_connection = pydynamixel.connection.Connection()
# Goto to 180°
instruction_packet = ip.InstructionPacket(_id=pk.BROADCAST_ID, _instruction=ip.WRITE_DATA, _parameters=(pk.GOAL_POSITION, 0x00, 0x02, 0x00, 0x02))
print('> ', instruction_packet.to_printable_string())
status_packet = serial_connection.send(instruction_packet)
if status_packet is not None:
print('< ', status_packet.to_printable_string())
# Wait 2 seconds
time.sleep(1)
# Go back to 0
instruction_packet = ip.InstructionPacket(_id=pk.BROADCAST_ID, _instruction=ip.WRITE_DATA, _parameters=(pk.GOAL_POSITION, 0x00, 0x00, 0x00, 0x02))
print('> ', instruction_packet.to_printable_string())
status_packet = serial_connection.send(instruction_packet)
if status_packet is not None:
print('< ', status_packet.to_printable_string())
if __name__ == '__main__':
main()
| mit | Python | |
1e9b1c2270d8dfc722f92b8b046581ce6172016a | update divergence | clinicalml/theanomodels,clinicalml/theanomodels | utils/divergences.py | utils/divergences.py | """
Author: Rahul G. Krishnan
File containing divergences used between probability measures
"""
import theano.tensor as T
def KL(mu_1,cov_1,mu_2,cov_2):
"""
Estimate the KL divergence between two gaussians with diagonal covariance
KL(q||p) 0.5*(log|Sigma_2| - log |Sigma_1|
"""
diff = mu_2-mu_1
return 0.5*(T.log(cov_2)-T.log(cov_1)-1+cov_1/cov_2+(diff**2)/cov_2).sum()
def Bhattacharrya(mu_1,cov_1,mu_2,cov_2):
"""
Estimate the Bhattacharyya distance between two gaussians with diagonal covariance
D_B(q||p)
See: http://like.silk.to/studymemo/PropertiesOfMultivariateGaussianFunction.pdf
"""
diff = mu_1-mu_2
P = (cov_2+cov_1)/2
D_B = (1/8.)*(diff**2)/P + (1/2.)*T.log(P/T.sqrt(cov_2*cov_1))
return D_B.sum()
| mit | Python | |
ee633ae9576ee1d2c0edd55551319879a9c32864 | Add initial version of fit_sota_model | sot/aca_stats,sot/aca_stats,sot/aca_stats | fit_sota_model.py | fit_sota_model.py | #!/usr/bin/env python
from __future__ import division
import numpy as np
SOTA2013_FIT = [0.18, 0.99, -0.49, # Scale
-1.49, 0.89, 0.28] # Offset
if 'data' not in globals():
import cPickle as pickle
with open('data/mini_acq_table.pkl', 'r') as fh:
data = pickle.load(fh)
data['mag10'] = data['mag'] - 10.0
data = data[data['year'] > 2007]
data = data.group_by('quarter')
data_mean = data.groups.aggregate(np.mean)
def scaled_warm_frac(pars, x):
scl0, scl1, scl2 = pars[0:3]
off0, off1, off2 = pars[3:6]
m = data['mag10']
scale = 10**(scl0 + scl1 * m + scl2 * m**2)
offset = 10**(off0 + off1 * m + off2 * m**2)
model_y = offset + scale * data['warm_pix']
return model_y
def fit_sota_model():
from sherpa import ui
data_id = 1
ui.set_method('simplex')
ui.set_stat('cash')
ui.load_user_model(scaled_warm_frac, 'model')
ui.add_user_pars('model', ['scl0', 'scl1', 'scl2', 'off0', 'off1', 'off2'])
ui.set_model(data_id, 'model')
ui.load_arrays(data_id, data['year'], data['fail'].astype(np.float))
# Initial fit values from SOTA 2013 presentation (modulo typo there)
start_vals = iter(SOTA2013_FIT) # Offset
fmod = ui.get_model_component('model')
for name in ('scl', 'off'):
for num in (0, 1, 2):
comp_name = name + str(num)
setattr(fmod, comp_name, start_vals.next())
comp = getattr(fmod, comp_name)
comp.min = -5
comp.max = 5
ui.fit(data_id)
# conf = ui.get_confidence_results()
return ui.get_fit_results()
| bsd-3-clause | Python | |
4ea892fe28b3045ab265cfd9fd5aa6a2b7c1ee52 | add report related to the fix contaminant | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | sequana/report_fix.py | sequana/report_fix.py | import easydev
import os
from .report_main import BaseReport
# a utility from external reports package
from reports import HTMLTable
import pandas as pd
def _get_template_path(name):
# Is it a local directory ?
if os.path.exists(name):
return name
else:
template_path = easydev.get_shared_directory_path("sequana")
template_path += os.sep + "templates" + os.sep + name
return template_path
class FixReport(BaseReport):
"""
"""
def __init__(self, jinja_template="fix_contaminant",
output_filename="fix.html", directory="report",
overwrite=False, **kargs):
"""
:param jinja_template: name of a directory (either local) or
from sequana/share/templates where JINJA files are available. A file
named index.html is required but may be renamed (with
**output_filename** parameter).
:param output_filename: name of the final HTML file.
:param directory: name of the output directory (defaults to report)
Parameters accepted by :class:`reports.Report` are also accepted.
"""
super(FixReport, self).__init__(jinja_template, output_filename,
directory, **kargs)
self.title = "Fix Report Summary"
self.jinja['title'] = "Fix Report Summary"
self.jinja['mode'] = "Paired-end"
self.mode = "pe"
self.input_filename = "fix_stats.json"
def parse(self):
import json
data = json.load(open(self.input_filename, "r"))
for key, value in data.items():
self.jinja[key] = value
x = data['R1_mapped']
y = data['R1_unmapped']
contamination = x / float(x+y) * 100
from easydev import precision
self.jinja['contamination'] = precision(contamination, 3)
import pandas as pd
df = pd.DataFrame({
'R1': [data['R1_mapped'], data['R1_unmapped']],
'R2': [data['R2_mapped'], data['R2_unmapped']]})
df.index = ['mapped', 'unmapped']
h = HTMLTable(df)
html = h.to_html(index=True)
self.jinja['stats'] = html
| bsd-3-clause | Python | |
9c67d72fbbdec53e2adc5ff2718bae8b3493b219 | add a simple test for celery config | therewillbecode/ichnaea,therewillbecode/ichnaea,mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea,therewillbecode/ichnaea | ichnaea/tests/test_worker.py | ichnaea/tests/test_worker.py | from unittest2 import TestCase
class TestWorkerConfig(TestCase):
def _get_target(self):
from ichnaea.worker import celery
return celery
def test_config(self):
celery = self._get_target()
self.assertTrue(celery.conf['CELERY_ALWAYS_EAGER'])
self.assertEqual(celery.conf['CELERY_RESULT_BACKEND'], 'database')
| apache-2.0 | Python | |
a990292ec2d3e2ebc74dd548cfc9ee55427bf5f0 | Create news_url.py | wolfdale/Scraper | news_url.py | news_url.py | """ PROJECT SCRAPER """
'''GET URL FROM BBC NEWS'''
from bs4 import BeautifulSoup
import urllib2
url='http://www.bbc.com/news'
web=urllib2.urlopen(url)
soup=BeautifulSoup(web,'html.parser')
with open('news_url.txt','w') as file:
for tag in soup.find_all('a',{'class':'title-link'}):
url=tag.get('href')
file.write(tag.text.encode('utf-8'))
file.write('www.bbc.com'+url + '\n')
| mit | Python | |
4d747b0ff0f700e41ff31b028618163d180301fa | Add Utils | coala-analyzer/coala-sublime | Utils.py | Utils.py | """
Holds various common functions and variables which will be useful in general
by the other classes.
"""
def log(*args, **kwargs):
print(" COALA -", *args, **kwargs)
| agpl-3.0 | Python | |
dbc64554694f117dfe2def082acaeb60117a4f1e | add template for classifier test | austinlostinboston/mitsWebApp,austinlostinboston/mitsWebApp,austinlostinboston/mitsWebApp,austinlostinboston/mitsWebApp | weiss/tests/test_dialogue.py | weiss/tests/test_dialogue.py | from django.test import TestCase
class StateTestCase(TestCase):
def setUp(self):
pass
def test_classifier(self):
pass
| apache-2.0 | Python | |
c193b4718a707f16b436d62f6b6a26882b742251 | test for branching and shas | OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api | ws-tests/test_integration.py | ws-tests/test_integration.py | #!/usr/bin/env python
from opentreetesting import test_http_json_method, config
import datetime
import codecs
import json
import sys
import os
study_id = '9'
DOMAIN = config('host', 'apihost')
#A full integration test, with GET, PUT, POST, MERGE and a merge conflict,
#test get and save sha
data = {'output_nexml2json':'1.2'}
SUBMIT_URI = DOMAIN + '/v1/study/{s}'.format(s=study_id)
r = test_http_json_method(SUBMIT_URI, 'GET', data=data, expected_status=200, return_bool_data=True)
assert(r[0]==True)
get_sha = r[1]["sha"]
n=r[1]["data"]
assert(get_sha)
#assert(get_sha=="5c845c47fc8a0b7b37b02bde9f0f59a399a3c434")
#PUT edits to a study
starting_commit_SHA = get_sha
SUBMIT_URI = DOMAIN + '/v1/study/{s}'.format(s=study_id)
fn = '/home/ejmctavish/projects/otapi/peyotl/peyotl/test/data/mini_par/mini_phyl/study/{s}/{s}.json'.format(s=study_id)
inpf = codecs.open(fn, 'rU', encoding='utf-8')
# refresh a timestamp so that the test generates a commit
n['nexml'][u'bogus_timestamp']=unicode(datetime.datetime.utcnow().isoformat())
data = { 'nexson' : n,
'auth_token': os.environ.get('GITHUB_OAUTH_TOKEN', 'bogus_token'),
'starting_commit_SHA': starting_commit_SHA,
}
r2 = test_http_json_method(SUBMIT_URI,
'PUT',
data=data,
expected_status=200,
return_bool_data=True)
assert(r2[0]==True)
assert(r2[1]['resource_id']==study_id)
assert(r2[1]['merge_needed']==False)
#print(r2[1].keys())
'''
resource_id
starting_commit_SHA
SUBMIT_URI = DOMAIN + '/merge/v1/{}/{}'.format(resource_id,starting_commit_SHA)
data = {
'auth_token': 'bogus'
}
if test_http_json_method(SUBMIT_URI,
'PUT',
data=data,
expected_status=400):
sys.exit(0)
sys.exit(1)
''' | bsd-2-clause | Python | |
ffe374bd1fc40aab3dcdbc37141a068af81465de | Create onmodify.timetrack.py | jhmartin/taskwarrior-effort-tracker | onmodify.timetrack.py | onmodify.timetrack.py | #!/usr/bin/env python
#
# Writes task effort log to LEDGERFILE. Format is:
# 2015/03/22,e28087e9-525e-403c-9c4b-1aed53809092,9,no project,test3
# Date,UID,Seconds effort, Project name (or 'no project'), task description
#
# You need to adjust LEDGERFILE, or set the TIMELOG environment variable.
# Based on https://gist.github.com/wbsch/d977b0ac29aa1dfa4437
#
import calendar
import json
import os
import re
import sys
from datetime import datetime
from datetime import timedelta
LEDGERFILE = '/Users/jhmartin/.task/timetrack.ledger'
if 'TIMELOG' in os.environ:
LEDGERFILE = os.environ['TIMELOG']
def adjust_date(d, adjust_by):
if not isinstance(d, datetime):
d = tw_to_dt(d)
d -= timedelta(minutes=int(adjust_by))
return d
def tw_to_dt(s):
""" Taskwarrior JSON date ---> datetime object. """
return datetime.strptime(s, "%Y%m%dT%H%M%SZ")
def dt_to_tw(d):
""" datetime object ---> Taskwarrior JSON date. """
return d.strftime("%Y%m%dT%H%M%SZ")
old = json.loads(sys.stdin.readline())
new = json.loads(sys.stdin.readline())
annotation_added = ('annotations' in new and not 'annotations' in old) \
or \
('annotations' in new and 'annotations' in old and \
len(new['annotations']) > len(old['annotations']))
# task started
if ('start' in new and not 'start' in old) and annotation_added:
new['annotations'].sort(key=lambda anno: anno['entry'])
m = re.match('^[0-9]+$', new['annotations'][-1]['description'])
if m:
new['start'] = dt_to_tw(adjust_date(new['start'], int(m.group(0))))
new['annotations'] = new['annotations'][:-1]
if not new['annotations']:
del new['annotations']
print("Timelog: Started task %s minutes ago." % m.group(0))
if tw_to_dt(new['start']) < tw_to_dt(new['entry']):
new['entry'] = new['start']
# task stopped
if 'start' in old and not 'start' in new:
started_utc = tw_to_dt(old['start'])
started_ts = calendar.timegm(started_utc.timetuple())
started = datetime.fromtimestamp(started_ts)
stopped = datetime.now()
delta = (stopped - started).total_seconds()
if annotation_added:
new['annotations'].sort(key=lambda anno: anno['entry'])
m = re.match('^[0-9]+$', new['annotations'][-1]['description'])
if m:
new['annotations'] = new['annotations'][:-1]
if not new['annotations']:
del new['annotations']
stopped = adjust_date(stopped, m.group(0))
if stopped < started:
print("ERROR: Stop date -%s minutes would be before the start date!" % m.group(0))
sys.exit(1)
print("Timelog: Stopped task %s minutes ago." % m.group(0))
newentry = started.strftime("%Y/%m/%d") + ","
newentry += new['uuid'] + ","
newentry += str(int(delta)) + ","
projectlabel= new['project'].replace('.', ':') if 'project' in new else "no project"
newentry += projectlabel
newentry += ","
newentry += new['description'] + "\n"
print(json.dumps(new)
| apache-2.0 | Python | |
4641195df10da114b896fbe16c89324954833d22 | Create main.py | DavidJFelix/hatchit,DavidJFelix/hatchit,DavidJFelix/hatchit | web/src/main.py | web/src/main.py | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run()
| agpl-3.0 | Python | |
a4af8609686386d3371289ea24e019a897ca13bd | introduce a new kind of exception: RedirectWarning (warning with an additional redirection button) | Antiun/odoo,pplatek/odoo,xzYue/odoo,Grirrane/odoo,javierTerry/odoo,hmen89/odoo,kirca/OpenUpgrade,hmen89/odoo,gavin-feng/odoo,pplatek/odoo,Nowheresly/odoo,sinbazhou/odoo,ramadhane/odoo,ubic135/odoo-design,joshuajan/odoo,stephen144/odoo,odoousers2014/odoo,Danisan/odoo-1,lightcn/odoo,erkrishna9/odoo,eino-makitalo/odoo,MarcosCommunity/odoo,brijeshkesariya/odoo,apocalypsebg/odoo,OpenUpgrade-dev/OpenUpgrade,doomsterinc/odoo,hanicker/odoo,KontorConsulting/odoo,Drooids/odoo,kirca/OpenUpgrade,Endika/OpenUpgrade,brijeshkesariya/odoo,hoatle/odoo,CatsAndDogsbvba/odoo,chiragjogi/odoo,rowemoore/odoo,Antiun/odoo,oliverhr/odoo,abdellatifkarroum/odoo,hifly/OpenUpgrade,Danisan/odoo-1,AuyaJackie/odoo,diagramsoftware/odoo,tarzan0820/odoo,odootr/odoo,savoirfairelinux/OpenUpgrade,CopeX/odoo,fgesora/odoo,mszewczy/odoo,nagyistoce/odoo-dev-odoo,rubencabrera/odoo,tinkerthaler/odoo,cysnake4713/odoo,AuyaJackie/odoo,Endika/OpenUpgrade,jaxkodex/odoo,fgesora/odoo,lombritz/odoo,fjbatresv/odoo,ApuliaSoftware/odoo,wangjun/odoo,cedk/odoo,collex100/odoo,fevxie/odoo,fuselock/odoo,blaggacao/OpenUpgrade,shaufi10/odoo,leoliujie/odoo,ramadhane/odoo,tinkhaven-organization/odoo,hip-odoo/odoo,jeasoft/odoo,jiangzhixiao/odoo,Ernesto99/odoo,BT-astauder/odoo,slevenhagen/odoo-npg,stephen144/odoo,savoirfairelinux/odoo,slevenhagen/odoo,guewen/OpenUpgrade,papouso/odoo,datenbetrieb/odoo,kittiu/odoo,optima-ict/odoo,poljeff/odoo,tvibliani/odoo,tarzan0820/odoo,addition-it-solutions/project-all,agrista/odoo-saas,naousse/odoo,PongPi/isl-odoo,draugiskisprendimai/odoo,fdvarela/odoo8,cedk/odoo,salaria/odoo,Endika/odoo,fjbatresv/odoo,ccomb/OpenUpgrade,gavin-feng/odoo,sve-odoo/odoo,NeovaHealth/odoo,ehirt/odoo,realsaiko/odoo,colinnewell/odoo,oasiswork/odoo,fjbatresv/odoo,slevenhagen/odoo-npg,aviciimaxwell/odoo,hbrunn/OpenUpgrade,bobisme/odoo,acshan/odoo,abstract-open-solutions/OCB,OpenUpgrade-dev/OpenUpgrade,KontorConsulting/odoo,FlorianLudwig/odoo,waytai/odoo,provaleks/o8,FlorianLudwig/odoo,alhashash/odoo,minhtuancn/odoo,apanju/GMIO_Odoo,dllsf/odootest,dalegregory/odoo,luistorresm/odoo,simongoffin/website_version,jfpla/odoo,makinacorpus/odoo,ihsanudin/odoo,mkieszek/odoo,gorjuce/odoo,jpshort/odoo,apanju/odoo,bkirui/odoo,ovnicraft/odoo,BT-fgarbely/odoo,alexcuellar/odoo,AuyaJackie/odoo,windedge/odoo,oasiswork/odoo,demon-ru/iml-crm,Endika/OpenUpgrade,funkring/fdoo,klunwebale/odoo,fgesora/odoo,savoirfairelinux/OpenUpgrade,Gitlab11/odoo,juanalfonsopr/odoo,bealdav/OpenUpgrade,Ernesto99/odoo,Eric-Zhong/odoo,gavin-feng/odoo,ccomb/OpenUpgrade,virgree/odoo,RafaelTorrealba/odoo,CatsAndDogsbvba/odoo,cloud9UG/odoo,nhomar/odoo,microcom/odoo,dalegregory/odoo,dezynetechnologies/odoo,SAM-IT-SA/odoo,shaufi10/odoo,incaser/odoo-odoo,ramadhane/odoo,JCA-Developpement/Odoo,csrocha/OpenUpgrade,tvtsoft/odoo8,lightcn/odoo,mustafat/odoo-1,MarcosCommunity/odoo,Antiun/odoo,ecosoft-odoo/odoo,simongoffin/website_version,draugiskisprendimai/odoo,Codefans-fan/odoo,AuyaJackie/odoo,ThinkOpen-Solutions/odoo,ygol/odoo,apanju/odoo,tinkhaven-organization/odoo,BT-fgarbely/odoo,shaufi/odoo,gavin-feng/odoo,addition-it-solutions/project-all,janocat/odoo,rgeleta/odoo,sadleader/odoo,CopeX/odoo,virgree/odoo,ChanduERP/odoo,cpyou/odoo,abstract-open-solutions/OCB,havt/odoo,Daniel-CA/odoo,dllsf/odootest,Ernesto99/odoo,Elico-Corp/odoo_OCB,mmbtba/odoo,nhomar/odoo-mirror,ihsanudin/odoo,ygol/odoo,Ernesto99/odoo,shivam1111/odoo,avoinsystems/odoo,BT-rmartin/odoo,storm-computers/odoo,SerpentCS/odoo,OpenUpgrade/OpenUpgrade,grap/OpenUpgrade,andreparames/odoo,kifcaliph/odoo,datenbetrieb/odoo,simongoffin/website_version,prospwro/odoo,Kilhog/odoo,nhomar/odoo-mirror,hopeall/odoo,sebalix/OpenUpgrade,apocalypsebg/odoo,brijeshkesariya/odoo,juanalfonsopr/odoo,blaggacao/OpenUpgrade,Elico-Corp/odoo_OCB,collex100/odoo,bkirui/odoo,numerigraphe/odoo,alhashash/odoo,nhomar/odoo,blaggacao/OpenUpgrade,sadleader/odoo,ThinkOpen-Solutions/odoo,makinacorpus/odoo,JonathanStein/odoo,Gitlab11/odoo,hassoon3/odoo,datenbetrieb/odoo,guerrerocarlos/odoo,pedrobaeza/OpenUpgrade,bplancher/odoo,numerigraphe/odoo,hubsaysnuaa/odoo,hip-odoo/odoo,microcom/odoo,mlaitinen/odoo,jeasoft/odoo,cpyou/odoo,ovnicraft/odoo,diagramsoftware/odoo,jiachenning/odoo,Gitlab11/odoo,avoinsystems/odoo,shaufi/odoo,0k/OpenUpgrade,oihane/odoo,Nowheresly/odoo,sv-dev1/odoo,diagramsoftware/odoo,Noviat/odoo,lombritz/odoo,MarcosCommunity/odoo,jpshort/odoo,abstract-open-solutions/OCB,Noviat/odoo,acshan/odoo,thanhacun/odoo,Daniel-CA/odoo,savoirfairelinux/OpenUpgrade,janocat/odoo,ThinkOpen-Solutions/odoo,chiragjogi/odoo,Danisan/odoo-1,ecosoft-odoo/odoo,slevenhagen/odoo,numerigraphe/odoo,BT-rmartin/odoo,bplancher/odoo,Grirrane/odoo,JonathanStein/odoo,Noviat/odoo,odootr/odoo,Drooids/odoo,dkubiak789/odoo,mlaitinen/odoo,ecosoft-odoo/odoo,savoirfairelinux/OpenUpgrade,dllsf/odootest,windedge/odoo,fuhongliang/odoo,Maspear/odoo,factorlibre/OCB,thanhacun/odoo,makinacorpus/odoo,inspyration/odoo,kirca/OpenUpgrade,sysadminmatmoz/OCB,hbrunn/OpenUpgrade,CopeX/odoo,bguillot/OpenUpgrade,tinkerthaler/odoo,BT-ojossen/odoo,Nowheresly/odoo,thanhacun/odoo,dfang/odoo,mkieszek/odoo,FlorianLudwig/odoo,erkrishna9/odoo,leorochael/odoo,MarcosCommunity/odoo,hip-odoo/odoo,grap/OpenUpgrade,apocalypsebg/odoo,goliveirab/odoo,ehirt/odoo,hassoon3/odoo,draugiskisprendimai/odoo,poljeff/odoo,JonathanStein/odoo,jpshort/odoo,0k/odoo,oihane/odoo,demon-ru/iml-crm,ChanduERP/odoo,nuuuboo/odoo,luiseduardohdbackup/odoo,dfang/odoo,lightcn/odoo,florentx/OpenUpgrade,Codefans-fan/odoo,mszewczy/odoo,x111ong/odoo,jusdng/odoo,florian-dacosta/OpenUpgrade,tarzan0820/odoo,srsman/odoo,slevenhagen/odoo-npg,syci/OCB,jpshort/odoo,fuselock/odoo,datenbetrieb/odoo,CubicERP/odoo,jiachenning/odoo,alqfahad/odoo,mmbtba/odoo,poljeff/odoo,takis/odoo,stephen144/odoo,agrista/odoo-saas,chiragjogi/odoo,BT-astauder/odoo,ujjwalwahi/odoo,realsaiko/odoo,optima-ict/odoo,xzYue/odoo,ChanduERP/odoo,zchking/odoo,salaria/odoo,cysnake4713/odoo,dsfsdgsbngfggb/odoo,apanju/odoo,kybriainfotech/iSocioCRM,rowemoore/odoo,odoo-turkiye/odoo,microcom/odoo,nagyistoce/odoo-dev-odoo,glovebx/odoo,nitinitprof/odoo,Drooids/odoo,SAM-IT-SA/odoo,hmen89/odoo,oliverhr/odoo,SerpentCS/odoo,collex100/odoo,naousse/odoo,rahuldhote/odoo,wangjun/odoo,CopeX/odoo,nitinitprof/odoo,nhomar/odoo,OpenUpgrade-dev/OpenUpgrade,dgzurita/odoo,kybriainfotech/iSocioCRM,csrocha/OpenUpgrade,sebalix/OpenUpgrade,ThinkOpen-Solutions/odoo,OpusVL/odoo,dariemp/odoo,RafaelTorrealba/odoo,JonathanStein/odoo,minhtuancn/odoo,QianBIG/odoo,bakhtout/odoo-educ,markeTIC/OCB,dfang/odoo,takis/odoo,salaria/odoo,odoousers2014/odoo,Adel-Magebinary/odoo,bkirui/odoo,tinkerthaler/odoo,Nick-OpusVL/odoo,diagramsoftware/odoo,bobisme/odoo,waytai/odoo,fuhongliang/odoo,fuhongliang/odoo,idncom/odoo,CubicERP/odoo,lombritz/odoo,Nick-OpusVL/odoo,vnsofthe/odoo,shaufi10/odoo,mvaled/OpenUpgrade,BT-ojossen/odoo,fdvarela/odoo8,RafaelTorrealba/odoo,GauravSahu/odoo,gorjuce/odoo,fuselock/odoo,osvalr/odoo,incaser/odoo-odoo,cedk/odoo,Nick-OpusVL/odoo,leorochael/odoo,cedk/odoo,rubencabrera/odoo,ccomb/OpenUpgrade,ihsanudin/odoo,tarzan0820/odoo,n0m4dz/odoo,lightcn/odoo,nexiles/odoo,hoatle/odoo,fjbatresv/odoo,idncom/odoo,eino-makitalo/odoo,klunwebale/odoo,zchking/odoo,laslabs/odoo,slevenhagen/odoo,ehirt/odoo,Drooids/odoo,apanju/GMIO_Odoo,lgscofield/odoo,hanicker/odoo,minhtuancn/odoo,papouso/odoo,sve-odoo/odoo,deKupini/erp,dalegregory/odoo,markeTIC/OCB,idncom/odoo,OpenUpgrade/OpenUpgrade,ingadhoc/odoo,GauravSahu/odoo,zchking/odoo,nagyistoce/odoo-dev-odoo,colinnewell/odoo,arthru/OpenUpgrade,JGarcia-Panach/odoo,kirca/OpenUpgrade,savoirfairelinux/odoo,odootr/odoo,pedrobaeza/odoo,leorochael/odoo,laslabs/odoo,wangjun/odoo,gorjuce/odoo,Gitlab11/odoo,TRESCLOUD/odoopub,windedge/odoo,jaxkodex/odoo,JCA-Developpement/Odoo,JGarcia-Panach/odoo,shingonoide/odoo,Elico-Corp/odoo_OCB,mszewczy/odoo,demon-ru/iml-crm,joariasl/odoo,colinnewell/odoo,Grirrane/odoo,Drooids/odoo,sebalix/OpenUpgrade,Elico-Corp/odoo_OCB,kittiu/odoo,ClearCorp-dev/odoo,blaggacao/OpenUpgrade,alexcuellar/odoo,mvaled/OpenUpgrade,sve-odoo/odoo,jiachenning/odoo,rubencabrera/odoo,TRESCLOUD/odoopub,ClearCorp-dev/odoo,sv-dev1/odoo,blaggacao/OpenUpgrade,virgree/odoo,VielSoft/odoo,shaufi/odoo,leorochael/odoo,Ichag/odoo,BT-rmartin/odoo,apanju/odoo,thanhacun/odoo,alexcuellar/odoo,makinacorpus/odoo,ihsanudin/odoo,odoo-turkiye/odoo,luistorresm/odoo,omprakasha/odoo,Nick-OpusVL/odoo,syci/OCB,mvaled/OpenUpgrade,bwrsandman/OpenUpgrade,abstract-open-solutions/OCB,storm-computers/odoo,rdeheele/odoo,bakhtout/odoo-educ,bakhtout/odoo-educ,NeovaHealth/odoo,dalegregory/odoo,shaufi/odoo,glovebx/odoo,ApuliaSoftware/odoo,ShineFan/odoo,alexteodor/odoo,jfpla/odoo,inspyration/odoo,ovnicraft/odoo,Daniel-CA/odoo,shingonoide/odoo,minhtuancn/odoo,lgscofield/odoo,highco-groupe/odoo,gorjuce/odoo,rahuldhote/odoo,grap/OpenUpgrade,Grirrane/odoo,VielSoft/odoo,oliverhr/odoo,avoinsystems/odoo,QianBIG/odoo,alexteodor/odoo,gvb/odoo,eino-makitalo/odoo,leorochael/odoo,damdam-s/OpenUpgrade,ygol/odoo,bobisme/odoo,tinkerthaler/odoo,dgzurita/odoo,alqfahad/odoo,sysadminmatmoz/OCB,jusdng/odoo,sysadminmatmoz/OCB,tinkhaven-organization/odoo,omprakasha/odoo,zchking/odoo,alqfahad/odoo,jesramirez/odoo,odooindia/odoo,fgesora/odoo,pedrobaeza/OpenUpgrade,slevenhagen/odoo,0k/OpenUpgrade,shivam1111/odoo,provaleks/o8,leoliujie/odoo,pedrobaeza/OpenUpgrade,srimai/odoo,feroda/odoo,lsinfo/odoo,Maspear/odoo,arthru/OpenUpgrade,BT-fgarbely/odoo,xzYue/odoo,BT-ojossen/odoo,charbeljc/OCB,jiangzhixiao/odoo,massot/odoo,avoinsystems/odoo,jaxkodex/odoo,n0m4dz/odoo,Maspear/odoo,rgeleta/odoo,luistorresm/odoo,fgesora/odoo,jolevq/odoopub,bwrsandman/OpenUpgrade,stonegithubs/odoo,charbeljc/OCB,VielSoft/odoo,CubicERP/odoo,deKupini/erp,glovebx/odoo,KontorConsulting/odoo,abenzbiria/clients_odoo,alexcuellar/odoo,rgeleta/odoo,ujjwalwahi/odoo,factorlibre/OCB,QianBIG/odoo,prospwro/odoo,sergio-incaser/odoo,Danisan/odoo-1,abdellatifkarroum/odoo,Bachaco-ve/odoo,steedos/odoo,andreparames/odoo,cedk/odoo,Nick-OpusVL/odoo,tarzan0820/odoo,Noviat/odoo,x111ong/odoo,jesramirez/odoo,hoatle/odoo,bguillot/OpenUpgrade,hoatle/odoo,funkring/fdoo,rdeheele/odoo,inspyration/odoo,florentx/OpenUpgrade,n0m4dz/odoo,lgscofield/odoo,vnsofthe/odoo,tvtsoft/odoo8,MarcosCommunity/odoo,ojengwa/odoo,joshuajan/odoo,cpyou/odoo,mlaitinen/odoo,Eric-Zhong/odoo,javierTerry/odoo,markeTIC/OCB,ubic135/odoo-design,leoliujie/odoo,ecosoft-odoo/odoo,GauravSahu/odoo,synconics/odoo,erkrishna9/odoo,ThinkOpen-Solutions/odoo,kybriainfotech/iSocioCRM,kybriainfotech/iSocioCRM,gavin-feng/odoo,gsmartway/odoo,ChanduERP/odoo,javierTerry/odoo,shaufi10/odoo,dalegregory/odoo,GauravSahu/odoo,dariemp/odoo,patmcb/odoo,hopeall/odoo,nagyistoce/odoo-dev-odoo,Adel-Magebinary/odoo,dalegregory/odoo,synconics/odoo,fjbatresv/odoo,inspyration/odoo,ihsanudin/odoo,MarcosCommunity/odoo,cedk/odoo,damdam-s/OpenUpgrade,ThinkOpen-Solutions/odoo,provaleks/o8,bealdav/OpenUpgrade,pedrobaeza/OpenUpgrade,nagyistoce/odoo-dev-odoo,markeTIC/OCB,mszewczy/odoo,klunwebale/odoo,fgesora/odoo,joariasl/odoo,MarcosCommunity/odoo,SAM-IT-SA/odoo,highco-groupe/odoo,ApuliaSoftware/odoo,steedos/odoo,feroda/odoo,rgeleta/odoo,cysnake4713/odoo,optima-ict/odoo,highco-groupe/odoo,elmerdpadilla/iv,makinacorpus/odoo,tinkhaven-organization/odoo,FlorianLudwig/odoo,sergio-incaser/odoo,bwrsandman/OpenUpgrade,cpyou/odoo,shingonoide/odoo,stephen144/odoo,x111ong/odoo,charbeljc/OCB,Daniel-CA/odoo,arthru/OpenUpgrade,dfang/odoo,aviciimaxwell/odoo,sv-dev1/odoo,storm-computers/odoo,minhtuancn/odoo,cysnake4713/odoo,nexiles/odoo,slevenhagen/odoo,highco-groupe/odoo,ingadhoc/odoo,odooindia/odoo,kybriainfotech/iSocioCRM,RafaelTorrealba/odoo,rowemoore/odoo,stonegithubs/odoo,realsaiko/odoo,brijeshkesariya/odoo,bobisme/odoo,Gitlab11/odoo,osvalr/odoo,savoirfairelinux/odoo,mkieszek/odoo,Ichag/odoo,ojengwa/odoo,elmerdpadilla/iv,hmen89/odoo,fevxie/odoo,Nowheresly/odoo,Kilhog/odoo,rahuldhote/odoo,alexcuellar/odoo,draugiskisprendimai/odoo,oasiswork/odoo,leorochael/odoo,ujjwalwahi/odoo,hifly/OpenUpgrade,nhomar/odoo,lgscofield/odoo,kittiu/odoo,stonegithubs/odoo,syci/OCB,ramitalat/odoo,OpenUpgrade-dev/OpenUpgrade,ojengwa/odoo,NeovaHealth/odoo,fjbatresv/odoo,Kilhog/odoo,grap/OpenUpgrade,oliverhr/odoo,factorlibre/OCB,0k/OpenUpgrade,juanalfonsopr/odoo,havt/odoo,bakhtout/odoo-educ,fevxie/odoo,rowemoore/odoo,mmbtba/odoo,CopeX/odoo,jusdng/odoo,tinkerthaler/odoo,andreparames/odoo,optima-ict/odoo,TRESCLOUD/odoopub,csrocha/OpenUpgrade,javierTerry/odoo,patmcb/odoo,bguillot/OpenUpgrade,joariasl/odoo,doomsterinc/odoo,bakhtout/odoo-educ,cdrooom/odoo,brijeshkesariya/odoo,sinbazhou/odoo,zchking/odoo,cysnake4713/odoo,jfpla/odoo,vnsofthe/odoo,Danisan/odoo-1,bwrsandman/OpenUpgrade,SerpentCS/odoo,JGarcia-Panach/odoo,Endika/odoo,fdvarela/odoo8,demon-ru/iml-crm,Adel-Magebinary/odoo,JonathanStein/odoo,Antiun/odoo,savoirfairelinux/odoo,ramitalat/odoo,dllsf/odootest,tinkhaven-organization/odoo,fgesora/odoo,bobisme/odoo,christophlsa/odoo,OpusVL/odoo,havt/odoo,waytai/odoo,wangjun/odoo,tinkhaven-organization/odoo,pedrobaeza/odoo,hassoon3/odoo,stonegithubs/odoo,srsman/odoo,storm-computers/odoo,Bachaco-ve/odoo,bguillot/OpenUpgrade,Danisan/odoo-1,pedrobaeza/odoo,fuselock/odoo,bkirui/odoo,demon-ru/iml-crm,steedos/odoo,Adel-Magebinary/odoo,tangyiyong/odoo,mustafat/odoo-1,Maspear/odoo,glovebx/odoo,hanicker/odoo,gvb/odoo,colinnewell/odoo,FlorianLudwig/odoo,dgzurita/odoo,havt/odoo,matrixise/odoo,fevxie/odoo,0k/odoo,funkring/fdoo,Elico-Corp/odoo_OCB,guerrerocarlos/odoo,hoatle/odoo,makinacorpus/odoo,avoinsystems/odoo,jaxkodex/odoo,andreparames/odoo,numerigraphe/odoo,jesramirez/odoo,savoirfairelinux/OpenUpgrade,joariasl/odoo,ujjwalwahi/odoo,datenbetrieb/odoo,Endika/odoo,gavin-feng/odoo,luiseduardohdbackup/odoo,steedos/odoo,ramadhane/odoo,nexiles/odoo,waytai/odoo,alhashash/odoo,minhtuancn/odoo,Endika/OpenUpgrade,kittiu/odoo,Noviat/odoo,microcom/odoo,fossoult/odoo,hifly/OpenUpgrade,xujb/odoo,hubsaysnuaa/odoo,oasiswork/odoo,klunwebale/odoo,jesramirez/odoo,jeasoft/odoo,n0m4dz/odoo,glovebx/odoo,slevenhagen/odoo-npg,christophlsa/odoo,dezynetechnologies/odoo,gvb/odoo,steedos/odoo,mvaled/OpenUpgrade,mvaled/OpenUpgrade,shivam1111/odoo,gvb/odoo,funkring/fdoo,prospwro/odoo,acshan/odoo,srimai/odoo,luiseduardohdbackup/odoo,oasiswork/odoo,osvalr/odoo,christophlsa/odoo,florian-dacosta/OpenUpgrade,tangyiyong/odoo,factorlibre/OCB,fossoult/odoo,apanju/odoo,xujb/odoo,chiragjogi/odoo,CatsAndDogsbvba/odoo,joshuajan/odoo,arthru/OpenUpgrade,SerpentCS/odoo,SerpentCS/odoo,Nick-OpusVL/odoo,addition-it-solutions/project-all,ehirt/odoo,NeovaHealth/odoo,Codefans-fan/odoo,janocat/odoo,steedos/odoo,sergio-incaser/odoo,0k/OpenUpgrade,hifly/OpenUpgrade,BT-ojossen/odoo,VielSoft/odoo,nuuuboo/odoo,colinnewell/odoo,vnsofthe/odoo,stonegithubs/odoo,syci/OCB,ramadhane/odoo,grap/OpenUpgrade,dgzurita/odoo,lsinfo/odoo,glovebx/odoo,CubicERP/odoo,Ichag/odoo,oliverhr/odoo,nexiles/odoo,klunwebale/odoo,Ernesto99/odoo,Daniel-CA/odoo,gorjuce/odoo,jpshort/odoo,dgzurita/odoo,synconics/odoo,omprakasha/odoo,dariemp/odoo,Nick-OpusVL/odoo,n0m4dz/odoo,hubsaysnuaa/odoo,shaufi/odoo,nuuuboo/odoo,Codefans-fan/odoo,kifcaliph/odoo,leoliujie/odoo,srsman/odoo,pplatek/odoo,fossoult/odoo,ubic135/odoo-design,gvb/odoo,Endika/odoo,waytai/odoo,dkubiak789/odoo,hubsaysnuaa/odoo,lombritz/odoo,shingonoide/odoo,gavin-feng/odoo,stephen144/odoo,waytai/odoo,alexcuellar/odoo,naousse/odoo,thanhacun/odoo,dkubiak789/odoo,ingadhoc/odoo,jpshort/odoo,nitinitprof/odoo,doomsterinc/odoo,AuyaJackie/odoo,Elico-Corp/odoo_OCB,ApuliaSoftware/odoo,guerrerocarlos/odoo,rahuldhote/odoo,Gitlab11/odoo,tvtsoft/odoo8,omprakasha/odoo,odoo-turkiye/odoo,jiangzhixiao/odoo,hip-odoo/odoo,nagyistoce/odoo-dev-odoo,JGarcia-Panach/odoo,fossoult/odoo,ehirt/odoo,jiangzhixiao/odoo,eino-makitalo/odoo,prospwro/odoo,sinbazhou/odoo,tvibliani/odoo,KontorConsulting/odoo,dsfsdgsbngfggb/odoo,PongPi/isl-odoo,savoirfairelinux/odoo,sysadminmatmoz/OCB,BT-astauder/odoo,Daniel-CA/odoo,dsfsdgsbngfggb/odoo,spadae22/odoo,nexiles/odoo,lsinfo/odoo,oliverhr/odoo,Drooids/odoo,SAM-IT-SA/odoo,odoo-turkiye/odoo,ovnicraft/odoo,addition-it-solutions/project-all,mvaled/OpenUpgrade,BT-fgarbely/odoo,rgeleta/odoo,fuhongliang/odoo,salaria/odoo,CopeX/odoo,diagramsoftware/odoo,damdam-s/OpenUpgrade,cloud9UG/odoo,Codefans-fan/odoo,wangjun/odoo,tangyiyong/odoo,prospwro/odoo,tvtsoft/odoo8,mkieszek/odoo,ccomb/OpenUpgrade,Eric-Zhong/odoo,cloud9UG/odoo,matrixise/odoo,factorlibre/OCB,JCA-Developpement/Odoo,jpshort/odoo,luistorresm/odoo,dezynetechnologies/odoo,sinbazhou/odoo,NL66278/OCB,ShineFan/odoo,Eric-Zhong/odoo,sebalix/OpenUpgrade,jusdng/odoo,jaxkodex/odoo,addition-it-solutions/project-all,SerpentCS/odoo,fossoult/odoo,incaser/odoo-odoo,dezynetechnologies/odoo,dsfsdgsbngfggb/odoo,xzYue/odoo,ujjwalwahi/odoo,Ichag/odoo,srsman/odoo,hifly/OpenUpgrade,virgree/odoo,ecosoft-odoo/odoo,lightcn/odoo,rahuldhote/odoo,srimai/odoo,cdrooom/odoo,hanicker/odoo,PongPi/isl-odoo,stephen144/odoo,xujb/odoo,guewen/OpenUpgrade,doomsterinc/odoo,Nowheresly/odoo,laslabs/odoo,hopeall/odoo,grap/OpenUpgrade,guerrerocarlos/odoo,makinacorpus/odoo,shaufi/odoo,tinkhaven-organization/odoo,sergio-incaser/odoo,JGarcia-Panach/odoo,ovnicraft/odoo,dsfsdgsbngfggb/odoo,bealdav/OpenUpgrade,ehirt/odoo,ccomb/OpenUpgrade,dariemp/odoo,shaufi10/odoo,hanicker/odoo,wangjun/odoo,florentx/OpenUpgrade,agrista/odoo-saas,SAM-IT-SA/odoo,lsinfo/odoo,shingonoide/odoo,BT-fgarbely/odoo,laslabs/odoo,papouso/odoo,feroda/odoo,JGarcia-Panach/odoo,oihane/odoo,kittiu/odoo,BT-ojossen/odoo,janocat/odoo,microcom/odoo,hbrunn/OpenUpgrade,ygol/odoo,nuncjo/odoo,mvaled/OpenUpgrade,storm-computers/odoo,tarzan0820/odoo,dariemp/odoo,mustafat/odoo-1,Bachaco-ve/odoo,dkubiak789/odoo,collex100/odoo,patmcb/odoo,aviciimaxwell/odoo,gorjuce/odoo,funkring/fdoo,sebalix/OpenUpgrade,diagramsoftware/odoo,idncom/odoo,shaufi/odoo,gsmartway/odoo,omprakasha/odoo,acshan/odoo,pedrobaeza/odoo,windedge/odoo,damdam-s/OpenUpgrade,florian-dacosta/OpenUpgrade,BT-ojossen/odoo,jusdng/odoo,JGarcia-Panach/odoo,andreparames/odoo,virgree/odoo,fevxie/odoo,PongPi/isl-odoo,cdrooom/odoo,BT-ojossen/odoo,oihane/odoo,Grirrane/odoo,NL66278/OCB,massot/odoo,Kilhog/odoo,optima-ict/odoo,idncom/odoo,pplatek/odoo,klunwebale/odoo,nhomar/odoo,omprakasha/odoo,aviciimaxwell/odoo,x111ong/odoo,JonathanStein/odoo,guewen/OpenUpgrade,matrixise/odoo,leoliujie/odoo,abdellatifkarroum/odoo,stonegithubs/odoo,KontorConsulting/odoo,guerrerocarlos/odoo,Endika/odoo,rahuldhote/odoo,storm-computers/odoo,ingadhoc/odoo,luistorresm/odoo,bwrsandman/OpenUpgrade,janocat/odoo,rgeleta/odoo,joariasl/odoo,nuuuboo/odoo,ShineFan/odoo,tvtsoft/odoo8,Endika/OpenUpgrade,jolevq/odoopub,ChanduERP/odoo,dalegregory/odoo,OpenUpgrade/OpenUpgrade,goliveirab/odoo,stonegithubs/odoo,csrocha/OpenUpgrade,collex100/odoo,damdam-s/OpenUpgrade,glovebx/odoo,Endika/OpenUpgrade,bealdav/OpenUpgrade,BT-rmartin/odoo,RafaelTorrealba/odoo,dezynetechnologies/odoo,OpenUpgrade/OpenUpgrade,ApuliaSoftware/odoo,sinbazhou/odoo,rahuldhote/odoo,realsaiko/odoo,syci/OCB,FlorianLudwig/odoo,osvalr/odoo,zchking/odoo,shingonoide/odoo,nitinitprof/odoo,massot/odoo,eino-makitalo/odoo,abdellatifkarroum/odoo,ecosoft-odoo/odoo,idncom/odoo,shivam1111/odoo,juanalfonsopr/odoo,dezynetechnologies/odoo,tangyiyong/odoo,collex100/odoo,vnsofthe/odoo,brijeshkesariya/odoo,Ernesto99/odoo,QianBIG/odoo,sv-dev1/odoo,srsman/odoo,ramitalat/odoo,OpusVL/odoo,jiachenning/odoo,fuhongliang/odoo,ChanduERP/odoo,sve-odoo/odoo,odoousers2014/odoo,Endika/OpenUpgrade,papouso/odoo,x111ong/odoo,slevenhagen/odoo,takis/odoo,ShineFan/odoo,pedrobaeza/odoo,dgzurita/odoo,Codefans-fan/odoo,Ichag/odoo,sv-dev1/odoo,xujb/odoo,spadae22/odoo,synconics/odoo,sadleader/odoo,jesramirez/odoo,fuhongliang/odoo,microcom/odoo,PongPi/isl-odoo,srsman/odoo,Codefans-fan/odoo,lombritz/odoo,guerrerocarlos/odoo,xujb/odoo,leoliujie/odoo,provaleks/o8,hbrunn/OpenUpgrade,doomsterinc/odoo,hubsaysnuaa/odoo,apanju/GMIO_Odoo,provaleks/o8,Gitlab11/odoo,mszewczy/odoo,luistorresm/odoo,oasiswork/odoo,windedge/odoo,naousse/odoo,hip-odoo/odoo,bplancher/odoo,ingadhoc/odoo,rubencabrera/odoo,takis/odoo,klunwebale/odoo,tinkerthaler/odoo,dllsf/odootest,shaufi10/odoo,pedrobaeza/OpenUpgrade,shaufi10/odoo,sv-dev1/odoo,ShineFan/odoo,oihane/odoo,lsinfo/odoo,lightcn/odoo,abenzbiria/clients_odoo,VielSoft/odoo,andreparames/odoo,gsmartway/odoo,Kilhog/odoo,Ernesto99/odoo,BT-fgarbely/odoo,ChanduERP/odoo,prospwro/odoo,cloud9UG/odoo,abstract-open-solutions/OCB,alhashash/odoo,odoo-turkiye/odoo,fdvarela/odoo8,feroda/odoo,naousse/odoo,NeovaHealth/odoo,goliveirab/odoo,datenbetrieb/odoo,nhomar/odoo-mirror,synconics/odoo,mmbtba/odoo,rowemoore/odoo,kifcaliph/odoo,tvibliani/odoo,guewen/OpenUpgrade,addition-it-solutions/project-all,mmbtba/odoo,juanalfonsopr/odoo,alhashash/odoo,apanju/GMIO_Odoo,fevxie/odoo,brijeshkesariya/odoo,numerigraphe/odoo,nuncjo/odoo,Noviat/odoo,KontorConsulting/odoo,savoirfairelinux/OpenUpgrade,bakhtout/odoo-educ,ygol/odoo,Noviat/odoo,abenzbiria/clients_odoo,jiangzhixiao/odoo,guewen/OpenUpgrade,papouso/odoo,GauravSahu/odoo,ApuliaSoftware/odoo,Ichag/odoo,jiangzhixiao/odoo,srimai/odoo,VielSoft/odoo,bealdav/OpenUpgrade,goliveirab/odoo,charbeljc/OCB,avoinsystems/odoo,jaxkodex/odoo,nuncjo/odoo,apocalypsebg/odoo,cloud9UG/odoo,hassoon3/odoo,mustafat/odoo-1,incaser/odoo-odoo,xujb/odoo,luiseduardohdbackup/odoo,rgeleta/odoo,arthru/OpenUpgrade,dsfsdgsbngfggb/odoo,slevenhagen/odoo-npg,CatsAndDogsbvba/odoo,KontorConsulting/odoo,massot/odoo,PongPi/isl-odoo,mlaitinen/odoo,cpyou/odoo,Grirrane/odoo,aviciimaxwell/odoo,chiragjogi/odoo,nuncjo/odoo,erkrishna9/odoo,Bachaco-ve/odoo,x111ong/odoo,nuncjo/odoo,AuyaJackie/odoo,doomsterinc/odoo,jiangzhixiao/odoo,VielSoft/odoo,bkirui/odoo,odoo-turkiye/odoo,QianBIG/odoo,rdeheele/odoo,savoirfairelinux/odoo,laslabs/odoo,juanalfonsopr/odoo,mustafat/odoo-1,elmerdpadilla/iv,feroda/odoo,elmerdpadilla/iv,csrocha/OpenUpgrade,kybriainfotech/iSocioCRM,tarzan0820/odoo,sve-odoo/odoo,sinbazhou/odoo,Bachaco-ve/odoo,kifcaliph/odoo,BT-rmartin/odoo,tinkerthaler/odoo,Antiun/odoo,mustafat/odoo-1,jfpla/odoo,odootr/odoo,BT-astauder/odoo,odootr/odoo,apanju/odoo,hopeall/odoo,JonathanStein/odoo,kirca/OpenUpgrade,christophlsa/odoo,xzYue/odoo,joshuajan/odoo,funkring/fdoo,TRESCLOUD/odoopub,pplatek/odoo,chiragjogi/odoo,Maspear/odoo,ovnicraft/odoo,hassoon3/odoo,gvb/odoo,nitinitprof/odoo,avoinsystems/odoo,christophlsa/odoo,nuncjo/odoo,JCA-Developpement/Odoo,datenbetrieb/odoo,ujjwalwahi/odoo,apocalypsebg/odoo,jeasoft/odoo,lgscofield/odoo,Adel-Magebinary/odoo,spadae22/odoo,elmerdpadilla/iv,incaser/odoo-odoo,mustafat/odoo-1,luiseduardohdbackup/odoo,joariasl/odoo,MarcosCommunity/odoo,odooindia/odoo,oasiswork/odoo,agrista/odoo-saas,xujb/odoo,Nowheresly/odoo,OpenUpgrade/OpenUpgrade,ApuliaSoftware/odoo,bplancher/odoo,dkubiak789/odoo,jfpla/odoo,janocat/odoo,numerigraphe/odoo,kybriainfotech/iSocioCRM,jeasoft/odoo,doomsterinc/odoo,diagramsoftware/odoo,alexteodor/odoo,gsmartway/odoo,alqfahad/odoo,gsmartway/odoo,ShineFan/odoo,deKupini/erp,joshuajan/odoo,vnsofthe/odoo,javierTerry/odoo,apocalypsebg/odoo,leorochael/odoo,OpenUpgrade/OpenUpgrade,Daniel-CA/odoo,guewen/OpenUpgrade,guewen/OpenUpgrade,goliveirab/odoo,Eric-Zhong/odoo,colinnewell/odoo,optima-ict/odoo,feroda/odoo,mlaitinen/odoo,naousse/odoo,janocat/odoo,oliverhr/odoo,odootr/odoo,Adel-Magebinary/odoo,havt/odoo,realsaiko/odoo,takis/odoo,kifcaliph/odoo,CubicERP/odoo,tvibliani/odoo,nitinitprof/odoo,BT-rmartin/odoo,rubencabrera/odoo,PongPi/isl-odoo,gvb/odoo,hoatle/odoo,joariasl/odoo,0k/odoo,mszewczy/odoo,provaleks/o8,charbeljc/OCB,erkrishna9/odoo,virgree/odoo,bguillot/OpenUpgrade,ingadhoc/odoo,mmbtba/odoo,minhtuancn/odoo,Bachaco-ve/odoo,sysadminmatmoz/OCB,dariemp/odoo,salaria/odoo,GauravSahu/odoo,jiachenning/odoo,Maspear/odoo,fossoult/odoo,goliveirab/odoo,patmcb/odoo,hbrunn/OpenUpgrade,cedk/odoo,deKupini/erp,acshan/odoo,spadae22/odoo,lgscofield/odoo,bplancher/odoo,thanhacun/odoo,dfang/odoo,abdellatifkarroum/odoo,spadae22/odoo,osvalr/odoo,alexcuellar/odoo,joshuajan/odoo,jeasoft/odoo,massot/odoo,hoatle/odoo,jusdng/odoo,alexteodor/odoo,RafaelTorrealba/odoo,jolevq/odoopub,csrocha/OpenUpgrade,collex100/odoo,slevenhagen/odoo,luistorresm/odoo,gsmartway/odoo,Eric-Zhong/odoo,dariemp/odoo,charbeljc/OCB,gsmartway/odoo,aviciimaxwell/odoo,abenzbiria/clients_odoo,highco-groupe/odoo,RafaelTorrealba/odoo,javierTerry/odoo,jolevq/odoopub,apanju/GMIO_Odoo,sadleader/odoo,andreparames/odoo,naousse/odoo,srsman/odoo,odootr/odoo,havt/odoo,vnsofthe/odoo,Kilhog/odoo,apanju/GMIO_Odoo,bplancher/odoo,idncom/odoo,thanhacun/odoo,Eric-Zhong/odoo,florian-dacosta/OpenUpgrade,SAM-IT-SA/odoo,Endika/odoo,x111ong/odoo,NL66278/OCB,rowemoore/odoo,alexteodor/odoo,NeovaHealth/odoo,draugiskisprendimai/odoo,ClearCorp-dev/odoo,hassoon3/odoo,florentx/OpenUpgrade,abdellatifkarroum/odoo,ygol/odoo,Adel-Magebinary/odoo,srimai/odoo,odoo-turkiye/odoo,BT-fgarbely/odoo,laslabs/odoo,nagyistoce/odoo-dev-odoo,nuuuboo/odoo,slevenhagen/odoo-npg,abstract-open-solutions/OCB,0k/OpenUpgrade,ojengwa/odoo,ecosoft-odoo/odoo,nexiles/odoo,takis/odoo,jeasoft/odoo,simongoffin/website_version,srimai/odoo,papouso/odoo,kittiu/odoo,hmen89/odoo,pplatek/odoo,abstract-open-solutions/OCB,hifly/OpenUpgrade,poljeff/odoo,hanicker/odoo,markeTIC/OCB,ojengwa/odoo,xzYue/odoo,bakhtout/odoo-educ,florentx/OpenUpgrade,CatsAndDogsbvba/odoo,sebalix/OpenUpgrade,dfang/odoo,prospwro/odoo,jfpla/odoo,markeTIC/OCB,hubsaysnuaa/odoo,jolevq/odoopub,rubencabrera/odoo,matrixise/odoo,synconics/odoo,ramitalat/odoo,tvibliani/odoo,simongoffin/website_version,salaria/odoo,bguillot/OpenUpgrade,shivam1111/odoo,dgzurita/odoo,salaria/odoo,arthru/OpenUpgrade,gorjuce/odoo,alqfahad/odoo,Drooids/odoo,mlaitinen/odoo,tangyiyong/odoo,JCA-Developpement/Odoo,jeasoft/odoo,nhomar/odoo-mirror,BT-astauder/odoo,CatsAndDogsbvba/odoo,ThinkOpen-Solutions/odoo,oihane/odoo,blaggacao/OpenUpgrade,apanju/GMIO_Odoo,BT-rmartin/odoo,factorlibre/OCB,damdam-s/OpenUpgrade,ccomb/OpenUpgrade,nitinitprof/odoo,OpenUpgrade-dev/OpenUpgrade,ubic135/odoo-design,sysadminmatmoz/OCB,dkubiak789/odoo,wangjun/odoo,dsfsdgsbngfggb/odoo,eino-makitalo/odoo,fuselock/odoo,lgscofield/odoo,luiseduardohdbackup/odoo,ehirt/odoo,damdam-s/OpenUpgrade,bobisme/odoo,hopeall/odoo,spadae22/odoo,bwrsandman/OpenUpgrade,colinnewell/odoo,lsinfo/odoo,agrista/odoo-saas,mszewczy/odoo,incaser/odoo-odoo,Nowheresly/odoo,lightcn/odoo,feroda/odoo,ihsanudin/odoo,jiachenning/odoo,0k/OpenUpgrade,SerpentCS/odoo,Endika/odoo,nuuuboo/odoo,zchking/odoo,numerigraphe/odoo,ClearCorp-dev/odoo,Antiun/odoo,goliveirab/odoo,bealdav/OpenUpgrade,n0m4dz/odoo,csrocha/OpenUpgrade,patmcb/odoo,Bachaco-ve/odoo,poljeff/odoo,rdeheele/odoo,dezynetechnologies/odoo,odooindia/odoo,aviciimaxwell/odoo,TRESCLOUD/odoopub,cloud9UG/odoo,acshan/odoo,pplatek/odoo,florian-dacosta/OpenUpgrade,patmcb/odoo,Kilhog/odoo,hifly/OpenUpgrade,CubicERP/odoo,bobisme/odoo,blaggacao/OpenUpgrade,cdrooom/odoo,hopeall/odoo,NL66278/OCB,syci/OCB,matrixise/odoo,provaleks/o8,rowemoore/odoo,waytai/odoo,nexiles/odoo,hbrunn/OpenUpgrade,acshan/odoo,alhashash/odoo,shingonoide/odoo,kirca/OpenUpgrade,ihsanudin/odoo,patmcb/odoo,NeovaHealth/odoo,0k/odoo,OpenUpgrade/OpenUpgrade,AuyaJackie/odoo,poljeff/odoo,Danisan/odoo-1,OpenUpgrade-dev/OpenUpgrade,mlaitinen/odoo,omprakasha/odoo,tvtsoft/odoo8,sinbazhou/odoo,nhomar/odoo-mirror,alqfahad/odoo,abenzbiria/clients_odoo,n0m4dz/odoo,osvalr/odoo,javierTerry/odoo,tangyiyong/odoo,poljeff/odoo,factorlibre/OCB,bguillot/OpenUpgrade,apocalypsebg/odoo,takis/odoo,pedrobaeza/OpenUpgrade,ramitalat/odoo,lsinfo/odoo,lombritz/odoo,bwrsandman/OpenUpgrade,mkieszek/odoo,lombritz/odoo,mkieszek/odoo,sergio-incaser/odoo,florian-dacosta/OpenUpgrade,hanicker/odoo,synconics/odoo,havt/odoo,pedrobaeza/OpenUpgrade,kittiu/odoo,fevxie/odoo,grap/OpenUpgrade,alqfahad/odoo,fjbatresv/odoo,osvalr/odoo,OpusVL/odoo,shivam1111/odoo,papouso/odoo,funkring/fdoo,rdeheele/odoo,odoousers2014/odoo,steedos/odoo,windedge/odoo,sv-dev1/odoo,ubic135/odoo-design,kirca/OpenUpgrade,fossoult/odoo,slevenhagen/odoo-npg,shivam1111/odoo,ujjwalwahi/odoo,Antiun/odoo,ramadhane/odoo,NL66278/OCB,fuselock/odoo,hip-odoo/odoo,QianBIG/odoo,ramitalat/odoo,rubencabrera/odoo,ojengwa/odoo,guerrerocarlos/odoo,Ichag/odoo,oihane/odoo,bkirui/odoo,ingadhoc/odoo,sysadminmatmoz/OCB,bkirui/odoo,leoliujie/odoo,fuhongliang/odoo,ShineFan/odoo,dkubiak789/odoo,CubicERP/odoo,abdellatifkarroum/odoo,fdvarela/odoo8,deKupini/erp,odooindia/odoo,sadleader/odoo,virgree/odoo,spadae22/odoo,ClearCorp-dev/odoo,xzYue/odoo,FlorianLudwig/odoo,0k/odoo,ramadhane/odoo,nhomar/odoo,chiragjogi/odoo,odoousers2014/odoo,apanju/odoo,cloud9UG/odoo,tvibliani/odoo,jusdng/odoo,sergio-incaser/odoo,CopeX/odoo,GauravSahu/odoo,christophlsa/odoo,draugiskisprendimai/odoo,charbeljc/OCB,pedrobaeza/odoo,sebalix/OpenUpgrade,ojengwa/odoo,windedge/odoo,ovnicraft/odoo,ccomb/OpenUpgrade,hubsaysnuaa/odoo,juanalfonsopr/odoo,draugiskisprendimai/odoo,Maspear/odoo,ygol/odoo,jaxkodex/odoo,SAM-IT-SA/odoo,tvibliani/odoo,florentx/OpenUpgrade,nuuuboo/odoo,fuselock/odoo,mmbtba/odoo,CatsAndDogsbvba/odoo,luiseduardohdbackup/odoo,christophlsa/odoo,markeTIC/OCB,incaser/odoo-odoo,nuncjo/odoo,hopeall/odoo,eino-makitalo/odoo,tangyiyong/odoo,odoousers2014/odoo,jfpla/odoo,srimai/odoo | openerp/exceptions.py | openerp/exceptions.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" OpenERP core exceptions.
This module defines a few exception types. Those types are understood by the
RPC layer. Any other exception type bubbling until the RPC layer will be
treated as a 'Server error'.
"""
class Warning(Exception):
pass
class RedirectWarning(Exception):
""" Warning with a possibility to redirect the user instead of simply
discarding the warning message.
"""
def __init__(self, msg, action_id, button_text):
"""
:param int action_id: id of the action required to perform the
redirection
:param string button_text: text to put on the button which will trigger
the redirection
"""
super(RedirectWarning, self).__init__(msg, action_id, button_text)
class AccessDenied(Exception):
""" Login/password error. No message, no traceback. """
def __init__(self):
super(AccessDenied, self).__init__('Access denied.')
self.traceback = ('', '', '')
class AccessError(Exception):
""" Access rights error. """
class DeferredException(Exception):
""" Exception object holding a traceback for asynchronous reporting.
Some RPC calls (database creation and report generation) happen with
an initial request followed by multiple, polling requests. This class
is used to store the possible exception occuring in the thread serving
the first request, and is then sent to a polling request.
('Traceback' is misleading, this is really a exc_info() triple.)
"""
def __init__(self, msg, tb):
self.message = msg
self.traceback = tb
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" OpenERP core exceptions.
This module defines a few exception types. Those types are understood by the
RPC layer. Any other exception type bubbling until the RPC layer will be
treated as a 'Server error'.
"""
class Warning(Exception):
pass
class AccessDenied(Exception):
""" Login/password error. No message, no traceback. """
def __init__(self):
super(AccessDenied, self).__init__('Access denied.')
self.traceback = ('', '', '')
class AccessError(Exception):
""" Access rights error. """
class DeferredException(Exception):
""" Exception object holding a traceback for asynchronous reporting.
Some RPC calls (database creation and report generation) happen with
an initial request followed by multiple, polling requests. This class
is used to store the possible exception occuring in the thread serving
the first request, and is then sent to a polling request.
('Traceback' is misleading, this is really a exc_info() triple.)
"""
def __init__(self, msg, tb):
self.message = msg
self.traceback = tb
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
7e7f0585971f472c25fda3b6370e37eb4d8d0ea5 | test import of ssh.tunnel | Mustard-Systems-Ltd/pyzmq,swn1/pyzmq,caidongyun/pyzmq,caidongyun/pyzmq,yyt030/pyzmq,dash-dash/pyzmq,yyt030/pyzmq,dash-dash/pyzmq,Mustard-Systems-Ltd/pyzmq,yyt030/pyzmq,swn1/pyzmq,swn1/pyzmq,Mustard-Systems-Ltd/pyzmq,ArvinPan/pyzmq,caidongyun/pyzmq,ArvinPan/pyzmq,dash-dash/pyzmq,ArvinPan/pyzmq | zmq/tests/test_imports.py | zmq/tests/test_imports.py | # Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
import sys
from unittest import TestCase
class TestImports(TestCase):
"""Test Imports - the quickest test to ensure that we haven't
introduced version-incompatible syntax errors."""
def test_toplevel(self):
"""test toplevel import"""
import zmq
def test_core(self):
"""test core imports"""
from zmq import Context
from zmq import Socket
from zmq import Poller
from zmq import Frame
from zmq import constants
from zmq import device, proxy
from zmq import Stopwatch
from zmq import (
zmq_version,
zmq_version_info,
pyzmq_version,
pyzmq_version_info,
)
def test_devices(self):
"""test device imports"""
import zmq.devices
from zmq.devices import basedevice
from zmq.devices import monitoredqueue
from zmq.devices import monitoredqueuedevice
def test_log(self):
"""test log imports"""
import zmq.log
from zmq.log import handlers
def test_eventloop(self):
"""test eventloop imports"""
import zmq.eventloop
from zmq.eventloop import ioloop
from zmq.eventloop import zmqstream
from zmq.eventloop.minitornado.platform import auto
from zmq.eventloop.minitornado import ioloop
def test_utils(self):
"""test util imports"""
import zmq.utils
from zmq.utils import strtypes
from zmq.utils import jsonapi
def test_ssh(self):
"""test ssh imports"""
from zmq.ssh import tunnel
| # Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
import sys
from unittest import TestCase
class TestImports(TestCase):
"""Test Imports - the quickest test to ensure that we haven't
introduced version-incompatible syntax errors."""
def test_toplevel(self):
"""test toplevel import"""
import zmq
def test_core(self):
"""test core imports"""
from zmq import Context
from zmq import Socket
from zmq import Poller
from zmq import Frame
from zmq import constants
from zmq import device, proxy
from zmq import Stopwatch
from zmq import (
zmq_version,
zmq_version_info,
pyzmq_version,
pyzmq_version_info,
)
def test_devices(self):
"""test device imports"""
import zmq.devices
from zmq.devices import basedevice
from zmq.devices import monitoredqueue
from zmq.devices import monitoredqueuedevice
def test_log(self):
"""test log imports"""
import zmq.log
from zmq.log import handlers
def test_eventloop(self):
"""test eventloop imports"""
import zmq.eventloop
from zmq.eventloop import ioloop
from zmq.eventloop import zmqstream
from zmq.eventloop.minitornado.platform import auto
from zmq.eventloop.minitornado import ioloop
def test_utils(self):
"""test util imports"""
import zmq.utils
from zmq.utils import strtypes
from zmq.utils import jsonapi
| bsd-3-clause | Python |
4e8da16d761c507f9cb2a2ad2635903f90390c5c | Add python implementation for problem 38. | daithiocrualaoich/euler | python/038.py | python/038.py | '''
Pandigital Multiples
===================
Take the number 192 and multiply it by each of 1, 2, and 3:
192 × 1 = 192
192 × 2 = 384
192 × 3 = 576
By concatenating each product we get the 1 to 9 pandigital, 192384576. We
will call 192384576 the concatenated product of 192 and (1,2,3)
The same can be achieved by starting with 9 and multiplying by 1, 2, 3, 4,
and 5, giving the pandigital, 918273645, which is the concatenated product
of 9 and (1,2,3,4,5).
What is the largest 1 to 9 pandigital 9-digit number that can be formed as
the concatenated product of an integer with (1,2, ... , n) where n > 1?
'''
# A one digit number will admit a 9-digit concatenated product with n = 9 at
# most, e.g. 1 concatenated with (1, 2, ..., 9) is 123,456,789.
#
# A two digit number will admit a 9-digit concatenated product with n = 4 at
# most, e.g. 10 concatenated with (1, 2, ... 4) is 10,203,040 but 10
# concatenated with (1, 2, ..., 5) is 1,020,304,050 is larger than 9 digits.
#
# A three digit number will admit a 9-digit concatenated product with n = 3 at
# most, e.g. 100 concatenated with (1, 2, 3) is 100,200,300 but n > 3 will
# result in too many digits.
#
# A four digit number will admit a 9-digit concatenated product with n = 2 at
# most, e.g. 1000 concatenated with (1, 2) is 10,002,000 but n > 2 will
# result in too many digits.
#
# Since n must be > 1, no more 9-digit concatenations are possible.
#
# i.e. take digits of length d where d <= 4 and use n = floor(d/2) as the
# maximum n.
from itertools import chain
from number_theory import digit_expansion
def flatten(list_of_lists):
'''
Convert a list of lists into a flat iterable with the same elements.
https://stackoverflow.com/questions/1077015/python-list-comprehensions-compressing-a-list-of-lists
'''
return chain.from_iterable(list_of_lists)
def is_pandigital(n):
'''
Return True if n is a 9-digit number containing each digit exactly once.
'''
digits = digit_expansion(n)
return (len(digits) == 9) and (0 not in digits) and (len(set(digits)) == 9)
def concatenate(d, n):
'''
Return the concatenation of d with n.
'''
multiples = [d * i for i in range(1, n + 1)]
concatenation = ''.join(str(multiple) for multiple in multiples)
return int(concatenation)
def pandigital_concatenations(d):
'''
Returns a list of 9-digit pandigital concatenation products that can be
formed using d.
'''
# Numbers with more than four digits cannot form 9-digit concatenations.
if d >= 10_000:
return []
max_n = int(len(digit_expansion(d)) / 2)
concatenations = [concatenate(d, n) for n in range(1, max_n + 1)]
return filter(is_pandigital, concatenations)
candidates = range(1, 10_000)
pandigital_concatenations = flatten(map(pandigital_concatenations, candidates))
answer = max(pandigital_concatenations) # = 932,718,654
print(answer)
| apache-2.0 | Python | |
df9fc9f64b5450851abef90b50804e56e0d152bf | add fragment reaction class | KEHANG/AutoFragmentModeling | afm/reaction.py | afm/reaction.py |
class FragmentReaction(object):
def __init__(self,
index=-1,
reactants=None,
products=None,
kinetics=None,
reversible=False,
pairs=None,
family=None
):
self.index = index
self.reactants = reactants
self.products = products
self.kinetics = kinetics
self.reversible = reversible
self.pairs = pairs
self.family = family
def __str__(self):
"""
Return a string representation of the reaction, in the form 'A + B <=> C + D'.
"""
arrow = ' <=> '
if not self.reversible: arrow = ' => '
return arrow.join([' + '.join([str(s) for s in self.reactants]), ' + '.join([str(s) for s in self.products])])
| mit | Python | |
9192fe92621d6f79b0f99802f50014d27c967d26 | Add alg_knapsack.py | bowen0701/algorithms_data_structures | alg_knapsack.py | alg_knapsack.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def main():
wt_cap = 10
wt = [1, 2, 4, 2, 5]
val = [5, 3, 5, 3, 2]
if __name__ == '__main__':
main()
| bsd-2-clause | Python | |
a101bd9ccb280348e6da32b3f2c0540ca6c65807 | Implement String field | polygraph-python/polygraph | polygraph/types/fields.py | polygraph/types/fields.py | from collections import OrderedDict
from graphql.type.definition import GraphQLField, GraphQLNonNull
from graphql.type.scalars import GraphQLString
from marshmallow import fields
class String(fields.String):
def __init__(self, description, nullable=False, args=None,
deprecation_reason=None, **additional_args):
super().__init__()
self.description = description
self.nullable = nullable
self.args = args or OrderedDict()
self.deprecation_reason = deprecation_reason
def build_definition(self):
if self.nullable:
base_type = GraphQLString
else:
base_type = GraphQLNonNull(GraphQLString)
return GraphQLField(
type=base_type,
args=self.args,
deprecation_reason=self.deprecation_reason,
description=self.description,
)
| mit | Python | |
f0033f87e0b4082e55dd3641282e65369e03c03e | Create natural_sort.py (#3286) | TheAlgorithms/Python | sorts/natural_sort.py | sorts/natural_sort.py | from __future__ import annotations
import re
def natural_sort(input_list: list[str]) -> list[str]:
"""
Sort the given list of strings in the way that humans expect.
The normal Python sort algorithm sorts lexicographically,
so you might not get the results that you expect...
>>> example1 = ['2 ft 7 in', '1 ft 5 in', '10 ft 2 in', '2 ft 11 in', '7 ft 6 in']
>>> sorted(example1)
['1 ft 5 in', '10 ft 2 in', '2 ft 11 in', '2 ft 7 in', '7 ft 6 in']
>>> # The natural sort algorithm sort based on meaning and not computer code point.
>>> natural_sort(example1)
['1 ft 5 in', '2 ft 7 in', '2 ft 11 in', '7 ft 6 in', '10 ft 2 in']
>>> example2 = ['Elm11', 'Elm12', 'Elm2', 'elm0', 'elm1', 'elm10', 'elm13', 'elm9']
>>> sorted(example2)
['Elm11', 'Elm12', 'Elm2', 'elm0', 'elm1', 'elm10', 'elm13', 'elm9']
>>> natural_sort(example2)
['elm0', 'elm1', 'Elm2', 'elm9', 'elm10', 'Elm11', 'Elm12', 'elm13']
"""
def alphanum_key(key):
return [int(s) if s.isdigit() else s.lower() for s in re.split("([0-9]+)", key)]
return sorted(input_list, key=alphanum_key)
if __name__ == "__main__":
import doctest
doctest.testmod()
| mit | Python | |
bba04c867055715bb93e2fc2736538337b9f26ac | Add caesar cipher | xiao0720/leetcode,xliiauo/leetcode,xliiauo/leetcode,xiao0720/leetcode,xliiauo/leetcode | CaesarCipher.py | CaesarCipher.py | class CaesarCipher:
def encrypt(self, plain, n):
rst = [None] * len(plain)
for i in range(len(plain)):
rst[i] = chr((ord(plain[i]) - ord('A') + n) % 26 + ord('A'))
return ''.join(rst)
def decrypt(self, encrypted, n):
rst = [None] * len(encrypted)
for i in range(len(encrypted)):
rst[i] = chr((ord(encrypted[i]) - ord('A') - n) % 26 + ord('A'))
return ''.join(rst)
if __name__ == '__main__':
print(CaesarCipher().encrypt('APPLE', 5))
print(CaesarCipher().decrypt('FUUQJ', 5)) | mit | Python | |
4fef2ff44b2f195a9a135ba3ca5c70bb08572d39 | Add sphinx configuration | openthings/zeppelin,nkconnor/magellan,chiwanpark/incubator-zeppelin,vgmartinez/incubator-zeppelin,elbamos/Zeppelin-With-R,karuppayya/zeppelin,digitalreasoning/incubator-zeppelin,wary/zeppelin,vrlo/zeppelin,Yingmin-Li/incubator-zeppelin,r-kamath/zeppelin,astroshim/incubator-zeppelin,Solution-Global/zeppelin,ReeceRobinson/incubator-zeppelin,radicalbit/incubator-zeppelin,ravicodder/incubator-zeppelin,cfries/zeppelin,Flipkart/incubator-zeppelin,lobamba/zeppelin,opsun/incubator-zeppelin,almacro/zeppelin-derived,jongyoul/zeppelin,astroshim/incubator-zeppelin,mfelgamal/zeppelin,kabong009/incubator-zeppelin,digitalreasoning/incubator-zeppelin,felixcheung/incubator-zeppelin,jhsbeat/incubator-zeppelin,almacro/zeppelin-derived,elbamos/Zeppelin-With-R,soralee/zeppelin,jyt109/incubator-zeppelin,Leemoonsoo/zeppelin,tillrohrmann/incubator-zeppelin,ReeceRobinson/incubator-zeppelin,cquptEthan/incubator-zeppelin,r-kamath/incubator-zeppelin,hammertank/zeppelin,rajeshkp/incubator-zeppelin,jhsbeat/incubator-zeppelin,MikeTYChen/incubator-zeppelin,prabhjyotsingh/incubator-zeppelin,rerngvit/incubator-zeppelin,vrajat/incubator-zeppelin,soralee/zeppelin,lobamba/zeppelin,datalayer/zeppelin-R,leonardofoderaro/incubator-zeppelin,apache/incubator-zeppelin,tribbloid/incubator-zeppelin,suvam97/zeppelin,herval/zeppelin,caofangkun/incubator-zeppelin,prabhjyotsingh/incubator-zeppelin,rohit2b/incubator-zeppelin,ravicodder/incubator-zeppelin,RamVenkatesh/incubator-zeppelin,piyush-mukati/incubator-zeppelin,mfelgamal/zeppelin,pellmont/zeppelin,raysteam/zeppelin,zetaris/zeppelin,granturing/incubator-zeppelin,yu74n/zeppelin,leancloud/zeppelin,tillrohrmann/incubator-zeppelin,leonardofoderaro/incubator-zeppelin,swakrish/incubator-zeppelin,nkconnor/magellan,radicalbit/incubator-zeppelin,minahlee/zeppelin,Nova-Boy/zeppelin,vrajat/incubator-zeppelin,debugger87/incubator-zeppelin,cris83/incubator-zeppelin,SarunasG/zeppelin-oidc,minahlee/zeppelin,doanduyhai/incubator-zeppelin,digitalreasoning/incubator-zeppelin,sagarkulkarni3592/incubator-zeppelin,jongyoul/zeppelin,sagarkulkarni3592/incubator-zeppelin,galleon/incubator-zeppelin,optimizely/incubator-zeppelin,huangchaosuper/incubator-zeppelin,almacro/zeppelin-derived,rlugojr/incubator-zeppelin,tribbloid/incubator-zeppelin,swakrish/incubator-zeppelin,minahlee/zeppelin,ravicodder/incubator-zeppelin,karuppayya/zeppelin,pravin-dsilva/zeppelin,piyush-mukati/incubator-zeppelin,yorek/zeppelin,dirceusemighini/incubator-zeppelin,Peaceful-learner/incubator-zeppelin,tillrohrmann/incubator-zeppelin,prabhjyotsingh/zeppelin,galleon/incubator-zeppelin,Madhuka/incubator-zeppelin,jongyoul/zeppelin,bloomer1/incubator-zeppelin,jongyoul/zeppelin,tribbloid/incubator-zeppelin,piyush-mukati/incubator-zeppelin,fazlan-nazeem/incubator-zeppelin,volumeint/zeppelin,apache/incubator-zeppelin,AlexanderShoshin/zeppelin,IceKhan13/zeppelin,onkarshedge/incubator-zeppelin,vgmartinez/incubator-zeppelin,namanmishra91/zeppelin,zetaris/zeppelin,ibtawfik/incubator-zeppelin,sctincman/zeppelin,prabhjyotsingh/zeppelin,granturing/incubator-zeppelin,TelekomAustriaGroup/incubator-zeppelin,ankurmitujjain/incubator-zeppelin,blrunner/incubator-zeppelin,fogbeam/zeppelin_mirror,cris11/incubator-zeppelin,raysteam/zeppelin,jhsbeat/incubator-zeppelin,hkropp/incubator-zeppelin,doanduyhai/incubator-zeppelin,spacewalkman/incubator-zeppelin,suvam97/zeppelin,rajeshkp/incubator-zeppelin,nkconnor/magellan,pravin-dsilva/zeppelin,cris83/incubator-zeppelin,tinkoff-dwh/zeppelin,EasonYi/incubator-zeppelin,rookalkar/incubator-zeppelin,nikste/incubator-zeppelin,r-kamath/incubator-zeppelin,Solution-Global/zeppelin,yorek/zeppelin,rohit2b/incubator-zeppelin,vgmartinez/incubator-zeppelin,dsdinter/incubator-zeppelin,SachinJanani/zeppelin,1ambda/zeppelin,yu74n/zeppelin,datalayer/zeppelin-datalayer,Madhuka/incubator-zeppelin,chilang/zeppelin,prabhjyotsingh/zeppelin,jongyoul/incubator-zeppelin,astroshim/zeppelin,epahomov/zeppelin,HeartSaVioR/incubator-zeppelin,dirceusemighini/incubator-zeppelin,spacewalkman/incubator-zeppelin,leancloud/zeppelin,epahomov/zeppelin,RamVenkatesh/incubator-zeppelin,soralee/zeppelin,cfries/zeppelin,zjffdu/zeppelin,rerngvit/incubator-zeppelin,mross-pivotal/incubator-zeppelin,rlugojr/incubator-zeppelin,rajeshkp/incubator-zeppelin,r-kamath/zeppelin,soralee/zeppelin,fazlan-nazeem/incubator-zeppelin,zetaris/zeppelin,volumeint/zeppelin,pravin-dsilva/zeppelin,vipul1409/zeppelin,galleon/incubator-zeppelin,sravan-s/zeppelin,anthonycorbacho/incubator-zeppelin,djoelz/incubator-zeppelin,sergeymazin/zeppelin,samuel-pt/incubator-zeppelin,IceKhan13/zeppelin,fogbeam/fogbeam_zeppelin,catap/incubator-zeppelin,Nova-Boy/zeppelin,radicalbit/incubator-zeppelin,apache/zeppelin,chilang/zeppelin,cris11/incubator-zeppelin,optimizely/incubator-zeppelin,Altiscale/incubator-zeppelin,apache/incubator-zeppelin,VipinRathor/zeppelin,Leemoonsoo/incubator-zeppelin,Altiscale/incubator-zeppelin,rconline/zeppelin,benoyantony/zeppelin,digitalreasoning/incubator-zeppelin,rozza/incubator-zeppelin,rlugojr/incubator-zeppelin,fogbeam/fogbeam_zeppelin,benoyantony/zeppelin,herval/zeppelin,YuanGunGun/zeppelin,issaclee/silkroad,Madhuka/incubator-zeppelin,issaclee/silkroad,lresende/incubator-zeppelin,prasadwagle/incubator-zeppelin,benoyantony/zeppelin,SachinJanani/zeppelin,chilang/zeppelin,dsdinter/incubator-zeppelin,zetaris/zeppelin,apache/zeppelin,hkropp/incubator-zeppelin,xuyanhui/incubator-zeppelin,BabbleGrabble/incubator-zeppelin-oauth2,omegapointresearch/incubator-zeppelin,igorborojevic/incubator-zeppelin,xuyanhui/incubator-zeppelin,jhshin9/incubator-zeppelin,bloomer1/incubator-zeppelin,ankurmitujjain/incubator-zeppelin,wary/zeppelin,vrajat/incubator-zeppelin,tribbloid/incubator-zeppelin,radicalbit/incubator-zeppelin,eranwitkon/incubator-zeppelin,tinkoff-dwh/zeppelin,mahantheshhv/incubator-zeppelin,vrlo/zeppelin,caofangkun/incubator-zeppelin,Nova-Boy/zeppelin,yorek/zeppelin,Peaceful-learner/incubator-zeppelin,datalayer/zeppelin,jongyoul/zeppelin,cjmatta/incubator-zeppelin,DataScienceX/incubator-zeppelin,suvam97/zeppelin,rajeshkp/incubator-zeppelin,nikste/incubator-zeppelin,AntoineAugusti/incubator-zeppelin,tillrohrmann/incubator-zeppelin,prabhjyotsingh/incubator-zeppelin,Altiscale/incubator-zeppelin,mross-pivotal/incubator-zeppelin,rozza/incubator-zeppelin,rlugojr/incubator-zeppelin,IceKhan13/zeppelin,vrlo/zeppelin,sravan-s/zeppelin,piyush-mukati/incubator-zeppelin,cfries/zeppelin,astroshim/zeppelin,hammertank/zeppelin,djoelz/incubator-zeppelin,vrlo/zeppelin,SarunasG/zeppelin-oidc,xuyanhui/incubator-zeppelin,openthings/zeppelin,sergeymazin/zeppelin,kabong009/incubator-zeppelin,wakamori/incubator-zeppelin,rozza/incubator-zeppelin,cjmatta/incubator-zeppelin,Nova-Boy/zeppelin,RamVenkatesh/incubator-zeppelin,rerngvit/incubator-zeppelin,Flipkart/incubator-zeppelin,sravan-s/zeppelin,mfelgamal/zeppelin,1ambda/zeppelin,rconline/zeppelin,samuel-pt/incubator-zeppelin,kidaa/incubator-zeppelin,sergeymazin/zeppelin,karuppayya/zeppelin,cris11/incubator-zeppelin,openthings/zeppelin,fazlan-nazeem/incubator-zeppelin,zjffdu/zeppelin,namanmishra91/zeppelin,lresende/incubator-zeppelin,cquptEthan/incubator-zeppelin,hkropp/incubator-zeppelin,r-kamath/zeppelin,jyt109/incubator-zeppelin,datalayer/zeppelin-datalayer,epahomov/zeppelin,MikeTYChen/incubator-zeppelin,namanmishra91/zeppelin,rlugojr/incubator-zeppelin,djoelz/incubator-zeppelin,granturing/incubator-zeppelin,jhshin9/incubator-zeppelin,soralee/zeppelin,VipinRathor/zeppelin,cjmatta/incubator-zeppelin,jlagarden/zeppelin,PeinYu/incubator-zeppelin,lobamba/zeppelin,openthings/zeppelin,rookalkar/incubator-zeppelin,rozza/incubator-zeppelin,ibtawfik/incubator-zeppelin,pravin-dsilva/zeppelin,mwpenny/zeppelin-esri,namanmishra91/zeppelin,Madhuka/incubator-zeppelin,kidaa/incubator-zeppelin,tillrohrmann/incubator-zeppelin,optimizely/incubator-zeppelin,tinkoff-dwh/zeppelin,vipul1409/zeppelin,herval/zeppelin,mahantheshhv/incubator-zeppelin,fazlan-nazeem/incubator-zeppelin,prasadwagle/incubator-zeppelin,mross-pivotal/incubator-zeppelin,yu74n/zeppelin,chiwanpark/incubator-zeppelin,astroshim/incubator-zeppelin,openthings/zeppelin,datalayer/zeppelin,sohaibiftikhar/zeppelin,volumeint/zeppelin,mwpenny/zeppelin-esri,jlagarden/zeppelin,igorborojevic/incubator-zeppelin,RPCMoritz/incubator-zeppelin,RPCMoritz/incubator-zeppelin,granturing/incubator-zeppelin,cfries/zeppelin,lobamba/zeppelin,mross-pivotal/incubator-zeppelin,lresende/incubator-zeppelin,radicalbit/incubator-zeppelin,rerngvit/incubator-zeppelin,kcompher/incubator-zeppelin,samuel-pt/incubator-zeppelin,mfelgamal/zeppelin,karuppayya/zeppelin,apache/incubator-zeppelin,SarunasG/zeppelin-oidc,jlagarden/zeppelin,soralee/zeppelin,Leemoonsoo/zeppelin,Flipkart/incubator-zeppelin,PeinYu/incubator-zeppelin,caofangkun/incubator-zeppelin,felixcheung/incubator-zeppelin,swakrish/incubator-zeppelin,EasonYi/incubator-zeppelin,tillrohrmann/incubator-zeppelin,jongyoul/incubator-zeppelin,fogbeam/zeppelin_mirror,leonardofoderaro/incubator-zeppelin,bloomer1/incubator-zeppelin,catap/incubator-zeppelin,mwkang/zeppelin,brigade/incubator-zeppelin,herval/zeppelin,Solution-Global/zeppelin,prabhjyotsingh/zeppelin,fogbeam/zeppelin_mirror,PeinYu/incubator-zeppelin,ankurmitujjain/incubator-zeppelin,suvam97/zeppelin,vipul1409/zeppelin,wary/zeppelin,rookalkar/incubator-zeppelin,rookalkar/incubator-zeppelin,r-kamath/incubator-zeppelin,astroshim/zeppelin,fogbeam/fogbeam_zeppelin,DataScienceX/incubator-zeppelin,cris83/incubator-zeppelin,mfelgamal/zeppelin,huangchaosuper/incubator-zeppelin,Peaceful-learner/incubator-zeppelin,igorborojevic/incubator-zeppelin,rconline/zeppelin,optimizely/incubator-zeppelin,datalayer/zeppelin,1ambda/zeppelin,prabhjyotsingh/incubator-zeppelin,YuanGunGun/zeppelin,issaclee/silkroad,mfelgamal/zeppelin,piyush-mukati/incubator-zeppelin,mwkang/zeppelin,MikeTYChen/incubator-zeppelin,nikste/incubator-zeppelin,SachinJanani/zeppelin,hboutemy/incubator-zeppelin,minahlee/zeppelin,datalayer/zeppelin-R,ReeceRobinson/incubator-zeppelin,jongyoul/incubator-zeppelin,r-kamath/incubator-zeppelin,tzolov/incubator-zeppelin,radicalbit/incubator-zeppelin,ravicodder/incubator-zeppelin,Nova-Boy/zeppelin,onkarshedge/incubator-zeppelin,astroshim/zeppelin,elbamos/Zeppelin-With-R,swakrish/incubator-zeppelin,BabbleGrabble/incubator-zeppelin-oauth2,Leemoonsoo/zeppelin,SachinJanani/zeppelin,sohaibiftikhar/zeppelin,kabong009/incubator-zeppelin,r-kamath/zeppelin,vardancse/incubator-zeppelin,raysteam/zeppelin,samuel-pt/incubator-zeppelin,datalayer/zeppelin-datalayer,sctincman/zeppelin,tillrohrmann/incubator-zeppelin,YuanGunGun/zeppelin,doanduyhai/incubator-zeppelin,nkconnor/magellan,kcompher/incubator-zeppelin,MikeTYChen/incubator-zeppelin,nikste/incubator-zeppelin,joroKr21/incubator-zeppelin,cris11/incubator-zeppelin,kcompher/incubator-zeppelin,xuyanhui/incubator-zeppelin,r-kamath/zeppelin,jongyoul/incubator-zeppelin,swakrish/incubator-zeppelin,herval/zeppelin,DataScienceX/incubator-zeppelin,pellmont/zeppelin,rookalkar/incubator-zeppelin,raysteam/zeppelin,ibtawfik/incubator-zeppelin,ankurmitujjain/incubator-zeppelin,herval/zeppelin,jhsbeat/incubator-zeppelin,yu74n/zeppelin,catap/incubator-zeppelin,apache/incubator-zeppelin,wakamori/incubator-zeppelin,jlagarden/zeppelin,AlexanderShoshin/zeppelin,mwkang/zeppelin,AlexanderShoshin/zeppelin,benoyantony/zeppelin,fazlan-nazeem/incubator-zeppelin,EasonYi/incubator-zeppelin,sergeymazin/zeppelin,myrtleTree33/zeppelin,Solution-Global/zeppelin,sravan-s/zeppelin,vardancse/incubator-zeppelin,cris11/incubator-zeppelin,vrlo/zeppelin,leancloud/zeppelin,jlagarden/zeppelin,astroshim/incubator-zeppelin,wary/zeppelin,chilang/zeppelin,nikste/incubator-zeppelin,djoelz/incubator-zeppelin,sagarkulkarni3592/incubator-zeppelin,hammertank/zeppelin,cfries/zeppelin,DataScienceX/incubator-zeppelin,cquptEthan/incubator-zeppelin,SarunasG/zeppelin-oidc,jongyoul/incubator-zeppelin,VipinRathor/zeppelin,sctincman/zeppelin,cris83/incubator-zeppelin,igorborojevic/incubator-zeppelin,RamVenkatesh/incubator-zeppelin,onkarshedge/incubator-zeppelin,RPCMoritz/incubator-zeppelin,mwkang/zeppelin,vgmartinez/incubator-zeppelin,zjffdu/zeppelin,SachinJanani/zeppelin,datalayer/zeppelin-R,omegapointresearch/incubator-zeppelin,myrtleTree33/zeppelin,jhshin9/incubator-zeppelin,IceKhan13/zeppelin,mross-pivotal/incubator-zeppelin,AlexanderShoshin/zeppelin,Madhuka/incubator-zeppelin,caofangkun/incubator-zeppelin,leonardofoderaro/incubator-zeppelin,datalayer/zeppelin-datalayer,joroKr21/incubator-zeppelin,raysteam/zeppelin,prabhjyotsingh/zeppelin,wary/zeppelin,openthings/zeppelin,jyt109/incubator-zeppelin,cfries/zeppelin,RPCMoritz/incubator-zeppelin,Altiscale/incubator-zeppelin,dsdinter/incubator-zeppelin,mfelgamal/zeppelin,eranwitkon/incubator-zeppelin,astroshim/incubator-zeppelin,rconline/zeppelin,sagarkulkarni3592/incubator-zeppelin,ReeceRobinson/incubator-zeppelin,dirceusemighini/incubator-zeppelin,digitalreasoning/incubator-zeppelin,hboutemy/incubator-zeppelin,brigade/incubator-zeppelin,sravan-s/zeppelin,dsdinter/incubator-zeppelin,prabhjyotsingh/incubator-zeppelin,debugger87/incubator-zeppelin,tribbloid/incubator-zeppelin,astroshim/zeppelin,dsdinter/incubator-zeppelin,cquptEthan/incubator-zeppelin,datalayer/zeppelin-R,vipul1409/zeppelin,tinkoff-dwh/zeppelin,fogbeam/zeppelin_mirror,soralee/zeppelin,datalayer/zeppelin,RamVenkatesh/incubator-zeppelin,joroKr21/incubator-zeppelin,prabhjyotsingh/zeppelin,yorek/zeppelin,VipinRathor/zeppelin,vipul1409/zeppelin,joroKr21/incubator-zeppelin,sagarkulkarni3592/incubator-zeppelin,datalayer/zeppelin,SachinJanani/zeppelin,suvam97/zeppelin,hboutemy/incubator-zeppelin,RPCMoritz/incubator-zeppelin,catap/incubator-zeppelin,swakrish/incubator-zeppelin,jhshin9/incubator-zeppelin,pellmont/zeppelin,tzolov/incubator-zeppelin,Yingmin-Li/incubator-zeppelin,sravan-s/zeppelin,datalayer/zeppelin-R,astroshim/zeppelin,Leemoonsoo/zeppelin,rconline/zeppelin,vipul1409/zeppelin,prasadwagle/incubator-zeppelin,apache/zeppelin,fazlan-nazeem/incubator-zeppelin,benoyantony/zeppelin,kabong009/incubator-zeppelin,namanmishra91/zeppelin,prabhjyotsingh/zeppelin,SachinJanani/zeppelin,igorborojevic/incubator-zeppelin,myrtleTree33/zeppelin,kcompher/incubator-zeppelin,AlexanderShoshin/zeppelin,vgmartinez/incubator-zeppelin,wakamori/incubator-zeppelin,samuel-pt/incubator-zeppelin,wakamori/incubator-zeppelin,doanduyhai/incubator-zeppelin,anthonycorbacho/incubator-zeppelin,jongyoul/zeppelin,astroshim/zeppelin,onkarshedge/incubator-zeppelin,minahlee/incubator-zeppelin,Altiscale/incubator-zeppelin,elbamos/Zeppelin-With-R,zjffdu/zeppelin,chilang/zeppelin,Solution-Global/zeppelin,keedio/incubator-zeppelin,omegapointresearch/incubator-zeppelin,mwkang/zeppelin,mwpenny/zeppelin-esri,spacewalkman/incubator-zeppelin,jhshin9/incubator-zeppelin,spacewalkman/incubator-zeppelin,Leemoonsoo/incubator-zeppelin,rerngvit/incubator-zeppelin,doanduyhai/incubator-zeppelin,SarunasG/zeppelin-oidc,HeartSaVioR/incubator-zeppelin,wakamori/incubator-zeppelin,hammertank/zeppelin,apache/zeppelin,benoyantony/zeppelin,IceKhan13/zeppelin,cris83/incubator-zeppelin,opsun/incubator-zeppelin,Solution-Global/zeppelin,Leemoonsoo/zeppelin,blrunner/incubator-zeppelin,myrtleTree33/zeppelin,BabbleGrabble/incubator-zeppelin-oauth2,huangchaosuper/incubator-zeppelin,PeinYu/incubator-zeppelin,pellmont/zeppelin,opsun/incubator-zeppelin,mwpenny/zeppelin-esri,mahantheshhv/incubator-zeppelin,keedio/incubator-zeppelin,IceKhan13/zeppelin,joroKr21/incubator-zeppelin,kabong009/incubator-zeppelin,jyt109/incubator-zeppelin,opsun/incubator-zeppelin,pravin-dsilva/zeppelin,YuanGunGun/zeppelin,minahlee/incubator-zeppelin,astroshim/incubator-zeppelin,yu74n/zeppelin,herval/zeppelin,joroKr21/incubator-zeppelin,1ambda/zeppelin,karuppayya/zeppelin,apache/zeppelin,issaclee/silkroad,volumeint/zeppelin,Flipkart/incubator-zeppelin,prasadwagle/incubator-zeppelin,zetaris/zeppelin,MikeTYChen/incubator-zeppelin,yorek/zeppelin,issaclee/silkroad,minahlee/incubator-zeppelin,lresende/incubator-zeppelin,sohaibiftikhar/zeppelin,Nova-Boy/zeppelin,minahlee/zeppelin,blrunner/incubator-zeppelin,wary/zeppelin,cfries/zeppelin,doanduyhai/incubator-zeppelin,fogbeam/zeppelin_mirror,vipul1409/zeppelin,suvam97/zeppelin,vardancse/incubator-zeppelin,Leemoonsoo/incubator-zeppelin,RamVenkatesh/incubator-zeppelin,almacro/zeppelin-derived,vgmartinez/incubator-zeppelin,cquptEthan/incubator-zeppelin,yorek/zeppelin,tzolov/incubator-zeppelin,datalayer/zeppelin,mwpenny/zeppelin-esri,nikste/incubator-zeppelin,caofangkun/incubator-zeppelin,doanduyhai/incubator-zeppelin,AntoineAugusti/incubator-zeppelin,mwpenny/zeppelin-esri,lobamba/zeppelin,AntoineAugusti/incubator-zeppelin,minahlee/incubator-zeppelin,optimizely/incubator-zeppelin,rconline/zeppelin,rohit2b/incubator-zeppelin,prasadwagle/incubator-zeppelin,TelekomAustriaGroup/incubator-zeppelin,prasadwagle/incubator-zeppelin,TelekomAustriaGroup/incubator-zeppelin,cquptEthan/incubator-zeppelin,anthonycorbacho/incubator-zeppelin,fogbeam/fogbeam_zeppelin,fogbeam/fogbeam_zeppelin,sctincman/zeppelin,vardancse/incubator-zeppelin,almacro/zeppelin-derived,felixcheung/incubator-zeppelin,samuel-pt/incubator-zeppelin,keedio/incubator-zeppelin,r-kamath/incubator-zeppelin,yu74n/zeppelin,eranwitkon/incubator-zeppelin,chiwanpark/incubator-zeppelin,Leemoonsoo/incubator-zeppelin,brigade/incubator-zeppelin,karuppayya/zeppelin,epahomov/zeppelin,r-kamath/zeppelin,digitalreasoning/incubator-zeppelin,SarunasG/zeppelin-oidc,HeartSaVioR/incubator-zeppelin,sohaibiftikhar/zeppelin,onkarshedge/incubator-zeppelin,tzolov/incubator-zeppelin,optimizely/incubator-zeppelin,ravicodder/incubator-zeppelin,HeartSaVioR/incubator-zeppelin,vardancse/incubator-zeppelin,mwkang/zeppelin,Flipkart/incubator-zeppelin,Yingmin-Li/incubator-zeppelin,r-kamath/zeppelin,r-kamath/incubator-zeppelin,brigade/incubator-zeppelin,mwpenny/zeppelin-esri,joroKr21/incubator-zeppelin,onkarshedge/incubator-zeppelin,pellmont/zeppelin,apache/incubator-zeppelin,mahantheshhv/incubator-zeppelin,sergeymazin/zeppelin,optimizely/incubator-zeppelin,sctincman/zeppelin,nkconnor/magellan,jongyoul/zeppelin,vrlo/zeppelin,cjmatta/incubator-zeppelin,Madhuka/incubator-zeppelin,wary/zeppelin,HeartSaVioR/incubator-zeppelin,jongyoul/incubator-zeppelin,1ambda/zeppelin,PeinYu/incubator-zeppelin,djoelz/incubator-zeppelin,Peaceful-learner/incubator-zeppelin,hkropp/incubator-zeppelin,ReeceRobinson/incubator-zeppelin,rerngvit/incubator-zeppelin,Leemoonsoo/zeppelin,EasonYi/incubator-zeppelin,elbamos/Zeppelin-With-R,vrlo/zeppelin,anthonycorbacho/incubator-zeppelin,rconline/zeppelin,namanmishra91/zeppelin,Yingmin-Li/incubator-zeppelin,tinkoff-dwh/zeppelin,Leemoonsoo/incubator-zeppelin,sohaibiftikhar/zeppelin,galleon/incubator-zeppelin,tzolov/incubator-zeppelin,Yingmin-Li/incubator-zeppelin,brigade/incubator-zeppelin,rerngvit/incubator-zeppelin,granturing/incubator-zeppelin,vrajat/incubator-zeppelin,dirceusemighini/incubator-zeppelin,hboutemy/incubator-zeppelin,igorborojevic/incubator-zeppelin,kcompher/incubator-zeppelin,yorek/zeppelin,cris83/incubator-zeppelin,ravicodder/incubator-zeppelin,HeartSaVioR/incubator-zeppelin,vrajat/incubator-zeppelin,zjffdu/zeppelin,ibtawfik/incubator-zeppelin,pellmont/zeppelin,zjffdu/zeppelin,volumeint/zeppelin,issaclee/silkroad,hammertank/zeppelin,granturing/incubator-zeppelin,bloomer1/incubator-zeppelin,hkropp/incubator-zeppelin,Nova-Boy/zeppelin,kidaa/incubator-zeppelin,1ambda/zeppelin,Altiscale/incubator-zeppelin,rohit2b/incubator-zeppelin,MikeTYChen/incubator-zeppelin,fogbeam/zeppelin_mirror,VipinRathor/zeppelin,sravan-s/zeppelin,brigade/incubator-zeppelin,debugger87/incubator-zeppelin,lresende/incubator-zeppelin,omegapointresearch/incubator-zeppelin,ankurmitujjain/incubator-zeppelin,elbamos/Zeppelin-With-R,volumeint/zeppelin,debugger87/incubator-zeppelin,minahlee/zeppelin,EasonYi/incubator-zeppelin,kidaa/incubator-zeppelin,sctincman/zeppelin,blrunner/incubator-zeppelin,rlugojr/incubator-zeppelin,jyt109/incubator-zeppelin,sagarkulkarni3592/incubator-zeppelin,lresende/incubator-zeppelin,blrunner/incubator-zeppelin,suvam97/zeppelin,tzolov/incubator-zeppelin,keedio/incubator-zeppelin,sergeymazin/zeppelin,lobamba/zeppelin,lresende/incubator-zeppelin,lobamba/zeppelin,minahlee/zeppelin,karuppayya/zeppelin,BabbleGrabble/incubator-zeppelin-oauth2,Leemoonsoo/zeppelin,raysteam/zeppelin,bloomer1/incubator-zeppelin,sctincman/zeppelin,apache/zeppelin,pellmont/zeppelin,Leemoonsoo/incubator-zeppelin,anthonycorbacho/incubator-zeppelin,sohaibiftikhar/zeppelin,IceKhan13/zeppelin,joroKr21/incubator-zeppelin,sergeymazin/zeppelin,yu74n/zeppelin,VipinRathor/zeppelin,chiwanpark/incubator-zeppelin,leancloud/zeppelin,minahlee/incubator-zeppelin,AntoineAugusti/incubator-zeppelin,prasadwagle/incubator-zeppelin,1ambda/zeppelin,rajeshkp/incubator-zeppelin,felixcheung/incubator-zeppelin,digitalreasoning/incubator-zeppelin,anthonycorbacho/incubator-zeppelin,galleon/incubator-zeppelin,huangchaosuper/incubator-zeppelin,almacro/zeppelin-derived,spacewalkman/incubator-zeppelin,rozza/incubator-zeppelin,zetaris/zeppelin,onkarshedge/incubator-zeppelin,kidaa/incubator-zeppelin,Madhuka/incubator-zeppelin,datalayer/zeppelin,debugger87/incubator-zeppelin,chiwanpark/incubator-zeppelin,leonardofoderaro/incubator-zeppelin,volumeint/zeppelin,jlagarden/zeppelin,AntoineAugusti/incubator-zeppelin,spacewalkman/incubator-zeppelin,wakamori/incubator-zeppelin,hammertank/zeppelin,BabbleGrabble/incubator-zeppelin-oauth2,dirceusemighini/incubator-zeppelin,namanmishra91/zeppelin,TelekomAustriaGroup/incubator-zeppelin,tinkoff-dwh/zeppelin,hkropp/incubator-zeppelin,eranwitkon/incubator-zeppelin,openthings/zeppelin,AlexanderShoshin/zeppelin,prabhjyotsingh/incubator-zeppelin,ankurmitujjain/incubator-zeppelin,TelekomAustriaGroup/incubator-zeppelin,myrtleTree33/zeppelin,pravin-dsilva/zeppelin,hboutemy/incubator-zeppelin,ankurmitujjain/incubator-zeppelin,rohit2b/incubator-zeppelin,anthonycorbacho/incubator-zeppelin,benoyantony/zeppelin,jhsbeat/incubator-zeppelin,hammertank/zeppelin,felixcheung/incubator-zeppelin,SarunasG/zeppelin-oidc,cjmatta/incubator-zeppelin,Leemoonsoo/zeppelin,blrunner/incubator-zeppelin,opsun/incubator-zeppelin,AlexanderShoshin/zeppelin,chilang/zeppelin,ravicodder/incubator-zeppelin,catap/incubator-zeppelin,cris11/incubator-zeppelin,mahantheshhv/incubator-zeppelin,r-kamath/incubator-zeppelin,spacewalkman/incubator-zeppelin,granturing/incubator-zeppelin,elbamos/Zeppelin-With-R,raysteam/zeppelin,xuyanhui/incubator-zeppelin,jlagarden/zeppelin,brigade/incubator-zeppelin,sohaibiftikhar/zeppelin,huangchaosuper/incubator-zeppelin,chilang/zeppelin,apache/zeppelin,zjffdu/zeppelin,radicalbit/incubator-zeppelin,vrajat/incubator-zeppelin,YuanGunGun/zeppelin,Solution-Global/zeppelin,keedio/incubator-zeppelin,fogbeam/zeppelin_mirror,tinkoff-dwh/zeppelin,TelekomAustriaGroup/incubator-zeppelin,sagarkulkarni3592/incubator-zeppelin,omegapointresearch/incubator-zeppelin,ibtawfik/incubator-zeppelin,datalayer/zeppelin-R,VipinRathor/zeppelin,cjmatta/incubator-zeppelin,DataScienceX/incubator-zeppelin,YuanGunGun/zeppelin,pravin-dsilva/zeppelin,eranwitkon/incubator-zeppelin,mwkang/zeppelin,Peaceful-learner/incubator-zeppelin,cquptEthan/incubator-zeppelin,YuanGunGun/zeppelin,zetaris/zeppelin | zeppelin-docs/src/main/spinx/conf.py | zeppelin-docs/src/main/spinx/conf.py | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Presto documentation build configuration file
#
# This file is execfile()d with the current directory set to its containing dir.
#
import os
import sys
import xml.dom.minidom
try:
sys.dont_write_bytecode = True
except:
pass
sys.path.insert(0, os.path.abspath('ext'))
def child_node(node, name):
for i in node.childNodes:
if (i.nodeType == i.ELEMENT_NODE) and (i.tagName == name):
return i
return None
def node_text(node):
return node.childNodes[0].data
def maven_version(pom):
dom = xml.dom.minidom.parse(pom)
project = dom.childNodes[0]
version = child_node(project, 'version')
if version:
return node_text(version)
parent = child_node(project, 'parent')
version = child_node(parent, 'version')
return node_text(version)
def get_version():
version = os.environ.get('ZEPPELIN_VERSION', '').strip()
return version or maven_version('../../../pom.xml')
# -- General configuration -----------------------------------------------------
needs_sphinx = '1.0'
extensions = ['download']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Zeppelin'
copyright = u'2013, NFlabs'
version = get_version()
release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
highlight_language = 'sql'
# -- Options for HTML output ---------------------------------------------------
html_theme_path = ['./themes']
html_theme = 'zeppelin'
html_title = '%s %s Documentation' % (project, release)
html_add_permalinks = None
| apache-2.0 | Python | |
a5aadd892df181a8e4a48ceebedff48211d5d22c | Add paver file. | cournape/audiolab,cournape/audiolab,cournape/audiolab | pavement.py | pavement.py | import os
import subprocess
import sphinx
import setuptools
import numpy.distutils
import paver
import paver.doctools
import common
from setup import configuration
options(
setup=Bunch(
name=common.DISTNAME,
namespace_packages=['scikits'],
packages=setuptools.find_packages(),
install_requires=common.INSTALL_REQUIRE,
version=common.VERSION,
include_package_data=True,
),
sphinx=Bunch(
builddir="build",
sourcedir="src"
),
)
#options.setup.package_data =
# setuputils.find_package_data("scikits/audiolab",
# package="scikits/audiolab",
# only_in_packages=False)
if paver.doctools.has_sphinx:
def _latex_paths():
"""look up the options that determine where all of the files are."""
opts = options
docroot = path(opts.get('docroot', 'docs'))
if not docroot.exists():
raise BuildFailure("Sphinx documentation root (%s) does not exist."
% docroot)
builddir = docroot / opts.get("builddir", ".build")
builddir.mkdir()
srcdir = docroot / opts.get("sourcedir", "")
if not srcdir.exists():
raise BuildFailure("Sphinx source file dir (%s) does not exist"
% srcdir)
latexdir = builddir / "latex"
latexdir.mkdir()
return Bunch(locals())
@task
def latex():
"""Build Audiolab's documentation and install it into
scikits/audiolab/docs"""
paths = _latex_paths()
sphinxopts = ['', '-b', 'latex', paths.srcdir, paths.latexdir]
dry("sphinx-build %s" % (" ".join(sphinxopts),), sphinx.main, sphinxopts)
def build_latex():
subprocess.call(["make", "all-pdf"], cwd=paths.latexdir)
dry("Build pdf doc", build_latex)
destdir = path("scikits") / "audiolab" / "docs" / "pdf"
destdir.rmtree()
destdir.makedirs()
pdf = paths.latexdir / "audiolab.pdf"
pdf.move(destdir)
@task
@needs(['paver.doctools.html'])
def html():
"""Build Audiolab's documentation and install it into
scikits/audiolab/docs"""
builtdocs = path("docs") / options.sphinx.builddir / "html"
destdir = path("scikits") / "audiolab" / "docs" / "html"
destdir.rmtree()
builtdocs.move(destdir)
@task
@needs(['html', 'latex'])
def doc():
pass
@task
@needs(['doc', 'setuptools.command.sdist'])
def sdist():
"""Build doc + tarball."""
pass
| lgpl-2.1 | Python | |
91d83745d94ba0eeb06d6d12eb32d5950963ad2a | move backend definition | rthill/django-ldapdb | ldapdb/backends/ldap/base.py | ldapdb/backends/ldap/base.py | # -*- coding: utf-8 -*-
#
# django-ldapdb
# Copyright (c) 2009-2010, Bolloré telecom
# All rights reserved.
#
# See AUTHORS file for a full list of contributors.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Bolloré telecom nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import ldap
from django.db.backends import BaseDatabaseFeatures, BaseDatabaseOperations, BaseDatabaseWrapper
class DatabaseCursor(object):
def __init__(self, ldap_connection):
self.connection = ldap_connection
class DatabaseFeatures(BaseDatabaseFeatures):
def __init__(self, connection):
self.connection = connection
class DatabaseOperations(BaseDatabaseOperations):
def quote_name(self, name):
return name
class DatabaseWrapper(BaseDatabaseWrapper):
def __init__(self, settings_dict, alias='ldap'):
super(DatabaseWrapper, self).__init__(settings_dict, alias=alias)
self.charset = "utf-8"
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations()
def close(self):
pass
def _cursor(self):
if self.connection is None:
self.connection = ldap.initialize(self.settings_dict['NAME'])
self.connection.simple_bind_s(
self.settings_dict['USER'],
self.settings_dict['PASSWORD'])
return DatabaseCursor(self.connection)
def add_s(self, dn, modlist):
cursor = self._cursor()
return cursor.connection.add_s(dn.encode(self.charset), modlist)
def delete_s(self, dn):
cursor = self._cursor()
return cursor.connection.delete_s(dn.encode(self.charset))
def modify_s(self, dn, modlist):
cursor = self._cursor()
return cursor.connection.modify_s(dn.encode(self.charset), modlist)
def rename_s(self, dn, newrdn):
cursor = self._cursor()
return cursor.connection.rename_s(dn.encode(self.charset), newrdn.encode(self.charset))
def search_s(self, base, scope, filterstr, attrlist):
cursor = self._cursor()
results = cursor.connection.search_s(base, scope, filterstr.encode(self.charset), attrlist)
output = []
for dn, attrs in results:
output.append((dn.decode(self.charset), attrs))
return output
| bsd-3-clause | Python | |
f3eb1c8efbcd3695dba0037faa4f90328625f547 | Add script for creating numeric passwordlists using permutation & combination. | JohnTroony/Scriptology,JohnTroony/Scriptology,JohnTroony/Scriptology,JohnTroony/Scriptology,JohnTroony/Scriptology,JohnTroony/Scriptology | permcomb.py | permcomb.py | #!/usr/bin/python
import itertools
import sys
def combination(elements,items):
for combination in itertools.product(xrange(elements), repeat=items):
print ''.join(map(str, combination))
if len(sys.argv) == 3:
allSet = int(sys.argv[1])
setItems = int(sys.argv[2])
if allSet >= setItems:
combination(allSet,setItems)
else:
print "[-] Set Items Should be greater than the Elements."
print " Example : permcomb.py 10 4"
else:
print "[-] Please Supply Two Arguments."
print " Example : percomb.py 10 5"
| cc0-1.0 | Python | |
b01076381ebc91f20c527f1632c7b3f2aa82d39a | Add a very simple performance testing tool. | ajmirsky/couchdb-python | perftest.py | perftest.py | """
Simple peformance tests.
"""
import sys
import time
import couchdb
def main():
print 'sys.version : %r' % (sys.version,)
print 'sys.platform : %r' % (sys.platform,)
tests = [create_doc, create_bulk_docs]
if len(sys.argv) > 1:
tests = [test for test in tests if test.__name__ in sys.argv[1:]]
server = couchdb.Server()
for test in tests:
_run(server, test)
def _run(server, func):
"""Run a test in a clean db and log its execution time."""
sys.stdout.write("* [%s] %s ... " % (func.__name__, func.__doc__.strip()))
sys.stdout.flush()
db_name = 'couchdb-python/perftest'
db = server.create(db_name)
try:
start = time.time()
func(db)
stop = time.time()
sys.stdout.write("%0.2fs\n" % (stop-start,))
sys.stdout.flush()
except Exception, e:
sys.stdout.write("FAILED - %r\n" % (unicode(e),))
sys.stdout.flush()
finally:
server.delete(db_name)
def create_doc(db):
"""Create lots of docs, one at a time"""
for i in range(1000):
db.save({'_id': unicode(i)})
def create_bulk_docs(db):
"""Create lots of docs, lots at a time"""
batch_size = 100
num_batches = 1000
for i in range(num_batches):
db.update([{'_id': unicode((i * batch_size) + j)} for j in range(batch_size)])
if __name__ == '__main__':
main()
| bsd-3-clause | Python | |
29c59fcbe8b15e37e96fec43e613d6f537727ea2 | Create Base64_Demo.py | danjia/EncryptDemo | Base64_Demo/Base64_Demo.py | Base64_Demo/Base64_Demo.py | # -*- coding: utf8 -*-
'''
@brief 用base64加密,解密(必须为ascii)
'''
import base64
'''
@brief 编码加密
@params content 要编码加密的内容(必须为ascii)
'''
def encrypt_base64(content):
return base64.b64encode(content)
'''
@brief 解码解密
@params secretContent 要解密的内容
'''
def decrypt_base64(secretContent):
return base64.b64decode(secretContent)
if "__main__" == __name__:
#要加密的内容
content = u"123456789ABCDEFGabcefgd"
#加密
secretContent = encrypt_base64(content)
print(secretContent)
#解密
print(decrypt_base64(secretContent))
| mit | Python | |
ff7292352b7d4b1609f077c3650d94a3c83051fc | Add property.py. | ueno/ibus,j717273419/ibus,j717273419/ibus,luoxsbupt/ibus,ibus/ibus,luoxsbupt/ibus,Keruspe/ibus,phuang/ibus,fujiwarat/ibus,fujiwarat/ibus,Keruspe/ibus,ueno/ibus,luoxsbupt/ibus,luoxsbupt/ibus,fujiwarat/ibus,ibus/ibus,Keruspe/ibus,ibus/ibus-cros,fujiwarat/ibus,ueno/ibus,ibus/ibus-cros,j717273419/ibus,phuang/ibus,ibus/ibus,phuang/ibus,ibus/ibus,ueno/ibus,phuang/ibus,Keruspe/ibus,j717273419/ibus,ibus/ibus-cros,luoxsbupt/ibus,ibus/ibus-cros,ueno/ibus | ibus/property.py | ibus/property.py | import dbus
PROP_TYPE_NORMAL = 0
PROP_TYPE_TOGGLE = 1
PROP_TYPE_RADIO = 2
PROP_TYPE_SEPARATOR = 3
PROP_STATE_UNCHECKED = 0
PROP_STATE_CHECKED = 1
PROP_STATE_INCONSISTENT = 2
class Property:
def __init__ (self, name,
type = PROP_TYPE_NORMAL,
label = "",
icon = "",
tip = "",
sensitive = True,
visible = True,
state = PROP_STATE_CHECKED):
self._name = name
self._type = type
self._label = label
self._icon = icon
self._tip = tip
self._sensitive = sensitive
self._visible = visible
self._state = state
self._sub_props = PropList ()
def set_sub_props (self, props):
self._sub_props = props
def get_sub_props (self):
return self._sub_props
def to_dbus_value (self):
sub_props = self._sub_props.to_dbus_value ()
values = (dbus.String (self._name),
dbus.Int32 (self._type),
dbus.String (self._label),
dbus.String (self._icon),
dbus.String (self._tip),
dbus.Boolean (self._sensitive),
dbus.Boolean (self._visible),
dbus.Int32 (self._state),
sub_props)
return dbus.Struct (values)
def from_dbus_value (self, value):
self._name, \
self._type, \
self._label, \
self._icon, \
self._tip, \
self._sensitive, \
self._visible, \
self._state, \
props = value
self._sub_props = prop_list_from_dbus_value (props)
def property_from_dbus_value (value):
p = Property ("")
p.from_dbus_value (value)
return p
class PropList:
def __init__ (self):
self._props = []
def append (self, prop):
self._props.append (prop)
def prepand (self, prop):
self._props.insert (0, prop)
def insert (self, index, prop):
self._props.insert (index, prop)
def get_properties (self):
return self._props[:]
def to_dbus_value (self):
props = map (lambda p: p.to_dbus_value (), self._props)
return dbus.Array (props, signature = "v")
def from_dbus_value (self, value):
props = []
for p in value:
props.append (property_from_dbus_value (p))
self._props = props
def prop_list_from_dbus_value (value):
props = PropList ()
props.from_dbus_value (value)
return props
def test ():
props = PropList ()
props.append (Property ("a"))
props.append (Property ("b"))
props.append (Property ("c"))
props.append (Property ("d"))
value = props.to_dbus_value ()
print prop_list_from_dbus_value (value)
p = Property ("z")
p.set_sub_props (props)
props = PropList ()
props.append (p)
value = props.to_dbus_value ()
print prop_list_from_dbus_value (value)
if __name__ == "__main__":
test ()
| lgpl-2.1 | Python | |
55ee2e14a173ea68f3ed02edbd525a6538dd0c0c | add deploy.py script | andyfischer/circa,andyfischer/circa,andyfischer/circa,andyfischer/circa | improv/deploy.py | improv/deploy.py |
import os, shutil, zipfile
AppName = 'ImprovAlpha4.app'
def mkdir(path):
if os.path.exists(path):
return
os.mkdir(path)
def rmdir(path):
if not os.path.exists(path):
return
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
def copy(path, dest):
if os.path.isdir(path):
# Copy the whole folder, not just its contents
folderName = os.path.split(path)[1]
dest = os.path.join(dest, folderName)
rmdir(dest)
shutil.copytree(path, dest)
else:
mkdir(dest)
os.copy2(path,dest)
# Setup common dirs
#copy(AppName, macTempDir)
#copy('ca', macTempDir)
#copy('demos', macTempDir)
macZip = zipfile.ZipFile('ImprovAlpha4.zip', 'w')
def add(dir):
for root, dirnames, filenames in os.walk(dir):
for file in filenames:
fullpath = os.path.join(root,file)
macZip.write(fullpath, fullpath, zipfile.ZIP_DEFLATED)
add('improv.app')
add('ca')
add('demos')
| mit | Python | |
fbf91352da4cf16be8462f57c71aa9f86f21746f | Add class balance checking code | googleinterns/amaranth,googleinterns/amaranth | amaranth/data_analysis/class_balance.py | amaranth/data_analysis/class_balance.py | # Lint as: python3
"""This script checks the balance of classes in the FDC dataset.
Classes are split based on LOW_CALORIE_THRESHOLD and
HIGH_CALORIE_THRESHOLD in the amaranth module.
"""
import os
import pandas as pd
import amaranth
from amaranth.ml import lib
FDC_DATA_DIR = '../../data/fdc/'
def main():
# Read in calorie data
current_dir = os.path.dirname(__file__)
abs_fdc_data_dir = os.path.join(current_dir, FDC_DATA_DIR)
food = pd.read_csv(os.path.join(abs_fdc_data_dir, 'food.csv'))
nutrient = pd.read_csv(os.path.join(
abs_fdc_data_dir, 'nutrient.csv')).rename(columns={'id': 'nutrient_id'})
food_nutrient = pd.read_csv(
os.path.join(abs_fdc_data_dir, 'food_nutrient.csv'))
combined = lib.combine_dataframes('fdc_id', food, food_nutrient)
combined = lib.combine_dataframes('nutrient_id', combined, nutrient)
calorie_data = lib.get_calorie_data(combined, 'kcal')
calorie_data = calorie_data[[
'description', 'data_type', 'name', 'amount', 'unit_name'
]] # Keep only relevant cols
calorie_data = lib.clean_data(calorie_data)
# Count rows with low, avg, or high calorie labels
low_cal_cnt = 0
avg_cal_cnt = 0
hi_cal_cnt = 0
for _, row in calorie_data.iterrows():
cal = row['amount']
if cal < amaranth.LOW_CALORIE_THRESHOLD:
low_cal_cnt += 1
elif cal < amaranth.HIGH_CALORIE_THRESHOLD:
avg_cal_cnt += 1
else:
hi_cal_cnt += 1
print('Class balance in FDC Dataset:')
print(f'Low calorie: {low_cal_cnt/len(calorie_data)}')
print(f'Average calorie: {avg_cal_cnt/len(calorie_data)}')
print(f'High calorie: {hi_cal_cnt/len(calorie_data)}')
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
85bd9515a92b3e603c2113919230d37729a0bd44 | add Python3LexerBase.py | antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4 | python/python3/Python/Python3LexerBase.py | python/python3/Python/Python3LexerBase.py | from typing import TextIO
from antlr4 import *
from antlr4.Token import CommonToken
from .Python3Parser import Python3Parser
import sys
from typing import TextIO
import re
class Python3LexerBase(Lexer):
NEW_LINE_PATTERN = re.compile('[^\r\n\f]+')
SPACES_PATTERN = re.compile('[\r\n\f]+')
def __init__(self, input: InputStream, output: TextIO = sys.stdout):
super().__init__(input, output)
self.tokens = []
self.indents = []
self.opened = 0
def reset(self):
self.tokens = []
self.indents = []
self.opened = 0
super().reset()
def emitToken(self, token):
self._token = token
self.tokens.append(token)
def nextToken(self):
# Check if the end-of-file is ahead and there are still some DEDENTS expected.
if self._input.LA(1) == Python3Parser.EOF and len(self.indents) != 0:
# Remove any trailing EOF tokens from our buffer.
self.tokens = [token for token in self.tokens if token.type != Python3Parser.EOF]
# First emit an extra line break that serves as the end of the statement.
self.emitToken(self.commonToken(Python3Parser.NEWLINE, '\n'))
# Now emit as much DEDENT tokens as needed.
while len(self.indents) != 0:
self.emitToken(self.createDedent())
self.indents.pop()
# Put the EOF back on the token stream.
self.emitToken(self.commonToken(Python3Parser.EOF, '<EOF>'))
next_ = super().nextToken()
return next_ if len(self.tokens) == 0 else self.tokens.pop(0)
def createDedent(self):
return self.commonToken(Python3Parser.DEDENT, '')
def commonToken(self, type_: int, text: str):
stop = self.getCharIndex() - 1
start = stop if text == '' else stop - len(text) + 1
return CommonToken(self._tokenFactorySourcePair, type_, Lexer.DEFAULT_TOKEN_CHANNEL, start, stop)
def getIndentationCount(self, whitespace: str):
count = 0
for c in whitespace:
if c == '\t':
count += 8 - count % 8
else:
count += 1
return count
def atStartOfInput(self):
return self.getCharIndex() == 0
def openBrace(self):
self.opened += 1
def closeBrace(self):
self.opened -= 1
def onNewLine(self):
new_line = self.NEW_LINE_PATTERN.sub('', self.text)
spaces = self.SPACES_PATTERN.sub('', self.text)
# Strip newlines inside open clauses except if we are near EOF. We keep NEWLINEs near EOF to
# satisfy the final newline needed by the single_put rule used by the REPL.
next_ = self._input.LA(1)
next_next = self._input.LA(2)
if self.opened > 0 or (next_next != -1 and next_ in (10, 13, 35)):
self.skip()
else:
self.emitToken(self.commonToken(Python3Parser.NEWLINE, new_line))
indent = self.getIndentationCount(spaces)
previous = 0 if len(self.indents) == 0 else self.indents[-1]
if indent == previous:
self.skip()
elif indent > previous:
self.indents.append(indent)
self.emitToken(self.commonToken(Python3Parser.INDENT, spaces))
else:
while len(self.indents) > 0 and self.indents[-1] > indent:
self.emitToken(self.createDedent())
self.indents.pop() | mit | Python | |
7432e7fb9ad5199ef3f55e7c85e542eaef4237da | Add pelicanconf_sample.py | lord63/pelican-scribble-hex,lord63/pelican-scribble-hex,lord63/pelican-scribble-hex | pelicanconf_sample.py | pelicanconf_sample.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# The followings are recommanded to set them up.
AUTHOR = ''
SITENAME = ''
SITEURL = ''
SITE_DESCRIPTION = ''
SELF_INTRO = 'A brief introduction about yourself.'
PATH = 'content'
DEFAULT_LANG = ''
THEME = ''
TIMEZONE = ''
LINKS = (('About', '#'),
('Github', '#'),
('Twitter', '#'),
('Your links', '#'),)
DEFAULT_DATE_FORMAT = '%Y-%m-%d'
# Let them be or edit as you want.
DISQUS_SITENAME = ''
GOOGLE_ANALYTICS = ''
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
| mit | Python | |
7eab5ef84db52800912b8cfcc9d631655c002a3f | read pg_hba.conf and resolve DNS names to addresses | aiven/aiven-tools | pg/pg_hba_resolver.py | pg/pg_hba_resolver.py | #!/usr/bin/python
"""
pg_hba_resolver - read pg_hba.conf and resolve DNS names to addresses
Copyright (C) 2016, https://aiven.io/
This file is under the Apache License, Version 2.0.
See http://www.apache.org/licenses/LICENSE-2.0 for details.
Read pg_hba.conf and look for comment lines ending with a '# RESOLVE' tag:
Hostnames on such lines are looked up and pg_hba.conf is updated with
uncommented versions of such lines with the hostname replaced by the
resolved address.
Existing entries for the same names are removed if the name no longer
resolves to them. Postgres is reloaded by sending SIGHUP to the postmaster
if there are any changes.
"""
import os
import re
import signal
import socket
import sys
def update_hba(hba_filename, pid_filename=None):
with open(hba_filename, "r") as fp:
orig_hba = fp.read().splitlines()
template_re = re.compile(r"^\s*#\s*(host[nosl]*\s+\S+\s+\S+\s+(\S+)\s+.*?)\s*#\s*RESOLVE\s*$")
last_host_comment = None
addrmap = {}
new_hba = []
for line in orig_hba:
if last_host_comment and line.endswith(last_host_comment):
continue # Drop all previous entries for this host
# All other lines are included as-is in the config
new_hba.append(line)
# If the line matches our template for resolving, look up the host and add all addresses
match = template_re.match(line)
if not match:
continue
entry = match.group(1)
hostname = match.group(2)
if hostname not in addrmap:
try:
addrs = socket.getaddrinfo(hostname, 5432, socket.AF_INET, 0, socket.IPPROTO_TCP)
addrmap[hostname] = sorted(res[4][0] for res in addrs)
except socket.gaierror:
print("Unable to resolve {!r}".format(hostname))
addrmap[hostname] = []
last_host_comment = " # RESOLVED: {}".format(hostname)
for addr in addrmap[hostname]:
new_hba.append(entry.replace(hostname, addr + "/32") + last_host_comment)
if not addrmap:
name_list = "no names"
else:
name_list = ", ".join("{!r}".format(name) for name in sorted(addrmap))
if new_hba == orig_hba:
print("Looked up {}: no changes to {!r} required".format(name_list, hba_filename))
return
print("Looked up {}: updating {}".format(name_list, hba_filename))
for line in sorted(set(orig_hba) - set(new_hba)):
print("-{}".format(line))
for line in sorted(set(new_hba) - set(orig_hba)):
print("+{}".format(line))
with open(hba_filename, "w") as fp:
fp.write("\n".join(new_hba))
fp.write("\n")
if not pid_filename:
pid_filename = os.path.join(os.path.dirname(hba_filename), "postmaster.pid")
if not os.path.exists(pid_filename):
print("No {!r} found, not reloading postmaster".format(pid_filename))
return
with open(pid_filename, "r") as fp:
pid = int(fp.read().splitlines()[0])
print("Sending SIGHUP to postmaster process {!r}".format(pid))
os.kill(pid, signal.SIGHUP)
def main(args):
update_hba(hba_filename=args[0], pid_filename=args[1] if len(args) > 1 else None)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| apache-2.0 | Python | |
93b2d93098c395d866f18e51b6ac42a9ba81a9b5 | Test if C changes with more examples. | charanpald/APGL | exp/modelselect/RealDataSVMExp.py | exp/modelselect/RealDataSVMExp.py | """
Observe if C varies when we use more examples
"""
import logging
import numpy
import sys
import multiprocessing
from apgl.util.PathDefaults import PathDefaults
from apgl.predictors.AbstractPredictor import computeTestError
from exp.modelselect.ModelSelectUtils import ModelSelectUtils
from apgl.util.Sampling import Sampling
from apgl.predictors.LibSVM import LibSVM
import matplotlib.pyplot as plt
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
numpy.seterr(all="raise")
numpy.random.seed(45)
dataDir = PathDefaults.getDataDir()
dataDir += "modelPenalisation/regression/"
outputDir = PathDefaults.getOutputDir() + "modelPenalisation/regression/CART/"
figInd = 0
loadMethod = ModelSelectUtils.loadRegressDataset
datasets = ModelSelectUtils.getRegressionDatasets(True)
datasetName = datasets[9][0]
#sampleSizes = numpy.array([50, 100, 200])
sampleSizes = numpy.array([50, 100, 200])
foldsSet = numpy.arange(2, 13, 1)
alpha = 1.0
paramDict = {}
paramDict["setC"] = 2.0**numpy.arange(-10, 14, 2, dtype=numpy.float)
paramDict["setGamma"] = 2.0**numpy.arange(-10, 4, 2, dtype=numpy.float)
paramDict["setEpsilon"] = numpy.array([2**-2])
sampleMethod = Sampling.crossValidation
numProcesses = multiprocessing.cpu_count()
j = 0
trainX, trainY, testX, testY = ModelSelectUtils.loadRegressDataset(dataDir, datasetName, j)
learner = LibSVM(kernel='gaussian', type="Epsilon_SVR", processes=numProcesses)
for sampleSize in sampleSizes:
print("Sample size " +str(sampleSize))
trainInds = numpy.random.permutation(trainX.shape[0])[0:sampleSize]
validX = trainX[trainInds,:]
validY = trainY[trainInds]
folds = 5
idx = sampleMethod(folds, sampleSize)
meanErrors = learner.parallelPenaltyGrid(validX, validY, testX, testY, paramDict, computeTestError)
meanErrors = numpy.squeeze(meanErrors)
for i in range(paramDict["setGamma"].shape[0]):
plt.figure(i)
plt.plot(numpy.arange(paramDict["setC"].shape[0]), meanErrors[i, :], label=str(sampleSize))
plt.legend(loc="upper left")
plt.xlabel("C")
plt.ylabel("Error")
plt.show() | bsd-3-clause | Python | |
d8e0104c92d9457ba60285cb856d8f435e0e08bd | Add initial setup script | Mstrodl/jose,lnmds/jose,Mstrodl/jose | initial-setup.py | initial-setup.py | import pickle
from pathlib import Path
import joseconfig as jcfg
# touch files
Path(jcfg.jcoin_path).touch()
Path('db/jose-data.txt').touch()
def initialize_db(path):
with open(path, 'wb') as f:
pickle.dump({}, f)
# initialize databases
initialize_db(jcfg.jcoin_path)
initialize_db('ext/josememes.db')
| mit | Python | |
40d8cf13bd91b2da43c5cecedcabc8e794f7febd | Add __init__.py to suite/wrappers directory. | deepmind/dm_control | dm_control/suite/wrappers/__init__.py | dm_control/suite/wrappers/__init__.py | # Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Environment wrappers used to extend or modify environment behaviour."""
| apache-2.0 | Python | |
7515f9de3e1aab11eb6ffae93cbff7290557c6af | Make functions visible | markbrough/maedi-projects,markbrough/maedi-projects,markbrough/maedi-projects | maediprojects/views/codelists.py | maediprojects/views/codelists.py | from flask import Flask, render_template, flash, request, Markup, \
session, redirect, url_for, escape, Response, abort, send_file, jsonify
from flask.ext.login import login_required, current_user
from maediprojects import app, db, models
from maediprojects.query import activity as qactivity
from maediprojects.query import location as qlocation
from maediprojects.lib import codelists
import json
@app.route("/codelists/")
@login_required
def codelists_management():
return render_template("codelists.html",
loggedinuser=current_user,
codelist_codes = codelists.get_db_codelists(),
codelist_names = codelists.get_db_codelist_names()
)
| agpl-3.0 | Python | |
50e4d81b034c930784df2cab36ba3f7ff726d6d8 | Add ETCD implementation for NB API | no2key/dragonflow,openstack/dragonflow,FrankDuan/df_code,FrankDuan/df_code,neoareslinux/dragonflow,openstack/dragonflow,neoareslinux/dragonflow,no2key/dragonflow,FrankDuan/df_code,openstack/dragonflow | dragonflow/db/drivers/etcd_nb_impl.py | dragonflow/db/drivers/etcd_nb_impl.py | # Copyright (c) 2015 OpenStack Foundation.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import etcd
from dragonflow.db import api_nb
class EtcdNbApi(api_nb.NbApi):
def __init__(self, db_ip='127.0.0.1', db_port=4001):
super(EtcdNbApi, self).__init__()
self.client = None
self.ip = db_ip
self.port = db_port
def initialize(self):
self.client = etcd.Client(host=self.ip, port=self.port)
def sync(self):
pass
def get_chassis(self, name):
try:
chassis_value = self.client.read('/chassis/' + name).value
return EtcdChassis(chassis_value)
except Exception:
return None
def get_all_chassis(self):
res = []
directory = self.client.get("/chassis")
for result in directory.children:
res.append(EtcdChassis(result.value))
return res
def add_chassis(self, name, ip, tunnel_type):
chassis_value = name + ',' + ip + ',' + tunnel_type
self.client.write('/chassis/' + name, chassis_value)
def register_local_ports(self, chassis_name, local_ports_ids):
directory = self.client.get("/binding")
for binding in directory.children:
lport = EtcdLogicalPort(binding.value)
if lport.get_id() in local_ports_ids:
if lport.get_chassis() == chassis_name:
continue
lport.set_chassis(chassis_name)
self.client.write('/binding/' + lport.get_id(),
lport.parse_value())
elif lport.get_chassis() == chassis_name:
lport.set_chassis('None')
self.client.write('/binding/' + lport.get_id(),
lport.parse_value())
def get_all_logical_ports(self):
res = []
directory = self.client.get("/binding")
for binding in directory.children:
lport = EtcdLogicalPort(binding.value)
if lport.get_chassis() is None:
continue
res.append(lport)
return res
class EtcdChassis(api_nb.Chassis):
def __init__(self, value):
# Entry <chassis_name, chassis_ip, chassis_tunnel_type>
self.values = value.split(',')
def get_name(self):
return self.values[0]
def get_ip(self):
return self.values[1]
def get_encap_type(self):
return self.values[2]
class EtcdLogicalPort(api_nb.LogicalPort):
def __init__(self, value):
# Entry <chassis_name, network, lport, mac, tunnel_key>
self.values = value.split(',')
self.external_dict = {}
def parse_value(self):
return (self.values[0] + ',' + self.values[1] + ','
+ self.values[2] + ','
+ self.values[3] + ',' + self.values[4])
def set_chassis(self, chassis):
self.values[0] = chassis
def get_id(self):
return self.values[2]
def get_mac(self):
return self.values[3]
def get_chassis(self):
chassis = self.values[0]
if chassis == 'None':
return None
return chassis
def get_network_id(self):
return self.values[1]
def get_tunnel_key(self):
return int(self.values[4])
def set_external_value(self, key, value):
self.external_dict[key] = value
def get_external_value(self, key):
return self.external_dict.get(key)
| apache-2.0 | Python | |
476ccea37c0509f55be1bbeb90fdd999e3b3f3b4 | Create bip-0070-payment-protocol.py | petertodd/dust-b-gone | examples/bip-0070-payment-protocol.py | examples/bip-0070-payment-protocol.py | #!/usr/bin/python2.7
#
# bip-0070-payment-protocol.py
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Bip-0070-related functionality
Handles incoming serialized string data in the form of a http request
and returns an appropriate response using googles protocol buffers.
"""
import payments_pb2
o = payments_pb2
import bitcoin
#bitcoin.SelectParams('testnet')
from bitcoin.wallet import CBitcoinAddress
from bitcoin.core.script import CScript
from bitcoin.rpc import Proxy
from time import time
def payment_request(request):
"""Generates a PaymentRequest object"""
bc = Proxy()
btc = bc.getnewaddress()
# Setting the 'amount' field to 0 (zero) should prompt the user to enter
# the amount for us but a bug in bitcoin core qt version 0.9.1 (at time of
# writing) wrongly informs us that the value is too small and aborts.
# https://github.com/bitcoin/bitcoin/issues/3095
# Also there can be no leading 0's (zeros).
btc_amount = 100000
serialized_pubkey = btc.to_scriptPubKey()
pdo = o.PaymentDetails()
pdo.outputs.add(amount = btc_amount,script = serialized_pubkey)
pdo.time = int(time())
pdo.memo = 'String shown to user before confirming payment'
pdo.payment_url = 'http://payment_ack.url'
pro = o.PaymentRequest()
pro.serialized_payment_details = pdo.SerializeToString()
return HttpResponse(pro.SerializeToString(), content_type="application/bitcoin-paymentrequest")
def payment_ack(request):
"""Generates a PaymentACK object, captures client refund address and returns a message"""
pao = o.PaymentACK()
pao.payment.ParseFromString(request.body)
pao.memo = 'String shown to user after payment confirmation'
refund_address = CBitcoinAddress.from_scriptPubKey(CScript(pao.payment.refund_to[0].script))
return HttpResponse(pao.SerializeToString(), content_type="application/bitcoin-paymentack")
| mit | Python | |
5a471d778a8affea5552923a8fbd74a61bcc81f1 | add radiometric_normalization.py - need to test on remote server | planetlabs/radiometric_normalization,planetlabs/radiometric_normalization | radiometric_normalization/radiometric_normalization.py | radiometric_normalization/radiometric_normalization.py | import numpy
from radiometric_normalization.time_stack import time_stack
from radiometric_normalization.pif import pif
from radiometric_normalization.transformation import transformation
from radiometric_normalization.validation import validation
from radiometric_normalization import gimage
def generate_luts(candidate_path, reference_paths, config=None):
if config is None:
config = {'time_stack_method': 'identity',
'pif_method': 'identity',
'transformation_method': 'linear_relationship'}
time_stack_image = time_stack.generate(reference_paths,
method=config['time_stack_method'])
pifs = pif.generate(candidate_path,
time_stack_path=time_stack_image,
method=config['pif_method'])
transformations = transformation.generate(
pifs, method=config['transformation_method'])
luts = transformation.transformations_to_luts(transformations)
return luts
def apply_luts(input_path, luts, output_path):
def apply_lut(band, lut):
'Changes band intensity values based on intensity look up table (lut)'
if lut.dtype != band.dtype:
raise Exception(
"Band ({}) and lut ({}) must be the same data type.").format(
band.dtype, lut.dtype)
return numpy.take(lut, band, mode='clip')
img = gimage.load(input_path)
for i in range(len(img.bands)):
img.bands[i] = apply_lut(img.bands[i], luts[i])
img.save(output_path)
def validate_luts(input_path, reference_path, config=None):
if config is None:
config = {'validation_method': 'identity'}
score = validation.get_score(input_path,
reference_path,
method=config['validation_method'])
return score
| apache-2.0 | Python | |
08493dd851a5023057c6f5b3439d3e965b256bf8 | add aux script | robertaboukhalil/ginkgo,robertaboukhalil/ginkgo,robertaboukhalil/ginkgo,robertaboukhalil/ginkgo,robertaboukhalil/ginkgo,robertaboukhalil/ginkgo,robertaboukhalil/ginkgo,robertaboukhalil/ginkgo | genomes/scripts/other/fix_hg19_exons.py | genomes/scripts/other/fix_hg19_exons.py | #!/usr/bin/env python
import numpy as np
import sys
import re
fin = open("genes.hg19.out", 'r')
fout = open("genes.hg19.exons.temp", 'w')
line=fin.readline()
for line in fin:
fields = line.strip().split()
chr = fields[2]
strand = fields[3]
gene = fields[1]
exon_start = fields[9].split(',')[:-1]
exon_end = fields[10].split(',')[:-1]
cnt = len(exon_start)
for i in range(cnt):
fout.write(chr + '\t' + exon_start[i] + '\t' + exon_end[i] + '\t' + gene + '\t' + fields[12] + '\t' + strand + '\n')
| bsd-2-clause | Python | |
7997d02e52172b8ad0e96a845f953f90a6e739b7 | Add VSYNC GPIO output example. | iabdalkader/openmv,kwagyeman/openmv,openmv/openmv,kwagyeman/openmv,kwagyeman/openmv,iabdalkader/openmv,openmv/openmv,openmv/openmv,iabdalkader/openmv,kwagyeman/openmv,openmv/openmv,iabdalkader/openmv | scripts/examples/02-Board-Control/vsync_gpio_output.py | scripts/examples/02-Board-Control/vsync_gpio_output.py | # VSYNC GPIO output example.
#
# This example shows how to toggle the IR LED pin on VSYNC interrupt.
import sensor, image, time
from pyb import Pin
sensor.reset() # Reset and initialize the sensor.
sensor.set_pixformat(sensor.RGB565) # Set pixel format to RGB565 (or GRAYSCALE)
sensor.set_framesize(sensor.QVGA) # Set frame size to QVGA (320x240)
sensor.skip_frames(time = 2000) # Wait for settings take effect.
# IR LED pin object
ir_led_pin = Pin('LED_IR', Pin.OUT_PP, Pin.PULL_NONE)
# This pin will be toggled on/off on VSYNC (start of frame) interrupt.
sensor.set_vsync_output(ir_led_pin)
clock = time.clock() # Create a clock object to track the FPS.
while(True):
clock.tick() # Update the FPS clock.
img = sensor.snapshot() # Take a picture and return the image.
# Turn off the IR LED after snapshot.
ir_led_pin.off()
print(clock.fps()) # Note: OpenMV Cam runs about half as fast when connected
# to the IDE. The FPS should increase once disconnected.
| mit | Python | |
7d88a914cba0141a0f1b0b35a84e2d82aa7b080e | Create code_3.py | jnimish77/Cloud-Computing-and-Programming-using-various-tools,jnimish77/Cloud-Computing-and-Programming-using-various-tools,jnimish77/Cloud-Computing-and-Programming-using-various-tools | MPI_Practice_Examples/code_3.py | MPI_Practice_Examples/code_3.py | import numpy
import sys
from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
randNum = numpy.zeros(1)
rank = 1
a=input("number of processes ?? \n")
while(rank<a):
randNum = numpy.random.random_sample(1)
print "Process", rank, "draw the number", randNum[0]
comm.Send(randNum, dest=0)
comm.Recv(randNum, source=0)
print "Process", rank+1, "received the number", randNum[0]
rank = rank +1
if(rank==a):
randNum = numpy.random.random_sample(1)
print "Process", rank, "draw the number", randNum[0]
comm.Send(randNum, dest=0)
comm.Recv(randNum, source=0)
print "Process", "1", "received the number", randNum[0]
| apache-2.0 | Python | |
4d4a862aa81218b788e961916115667a212f42e0 | Add conftest.py to allow skipping slow test. | dask/dask,chrisbarber/dask,ssanderson/dask,PhE/dask,freeman-lab/dask,mikegraham/dask,wiso/dask,wiso/dask,PhE/dask,mraspaud/dask,cowlicks/dask,freeman-lab/dask,vikhyat/dask,jcrist/dask,blaze/dask,jcrist/dask,pombredanne/dask,jayhetee/dask,mraspaud/dask,pombredanne/dask,ContinuumIO/dask,dask/dask,clarkfitzg/dask,simudream/dask,simudream/dask,gameduell/dask,jakirkham/dask,blaze/dask,cpcloud/dask,ContinuumIO/dask,vikhyat/dask,jayhetee/dask,mrocklin/dask,ssanderson/dask,clarkfitzg/dask,jakirkham/dask,mrocklin/dask | conftest.py | conftest.py | import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true", help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
| bsd-3-clause | Python | |
4d0e0a4c7fb70838427212180bb213061f0b67ea | Create config11.py | davidfergusonaz/davidtest,davidfergusonaz/davidtest | config11.py | config11.py | provider "aws" {
access_key = "AKIAJCJUB35JFIDR5XWW"
secret_key = "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"
region = "${var.region}
}
| mit | Python | |
1e79c1580055d2279b1a8523a2b382d98fe6cad3 | Configure minimal django settings | altaurog/django-caspy,altaurog/django-caspy,altaurog/django-caspy | conftest.py | conftest.py | from django.conf import settings
def pytest_configure():
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
)
| bsd-3-clause | Python | |
7d29d96385ec9a3274f5e6409e04635e89f8c8c9 | Create find-anagram-mappings.py | tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015 | Python/find-anagram-mappings.py | Python/find-anagram-mappings.py | # Time: O(n)
# Space: O(n)
class Solution(object):
def anagramMappings(self, A, B):
"""
:type A: List[int]
:type B: List[int]
:rtype: List[int]
"""
lookup = collections.defaultdict(collections.deque)
for i, n in enumerate(B):
lookup[n].append(i)
result = []
for n in A:
result.append(lookup[n].popleft())
return result
| mit | Python | |
da9ed4dacbeaf7f8ec3873b658547a215f9d6920 | Create __init__.py | coryjog/anemoi,coryjog/anemoi,coryjog/anemoi | anemoi/io/__init__.py | anemoi/io/__init__.py | mit | Python | ||
a228f5874d6d419a6333b91abceaf2c50843f92e | Create multObjShapeUpdate.py | aaronfang/personal_scripts | af_scripts/tmp/multObjShapeUpdate.py | af_scripts/tmp/multObjShapeUpdate.py | import maya.cmds as cmds
def multObjShapeImport():
files_to_import = cmds.fileDialog2(fileFilter = '*.obj', dialogStyle = 2, caption = 'import multiple object files', fileMode = 4,okc="Import")
for file_to_import in files_to_import:
object_name = file_to_import.split('/')[-1].split('.obj')[0]
returnedNodes = cmds.file('%s' % file_to_import, i = True, type = "OBJ", rnn=True, ignoreVersion = True, options = "mo=0", loadReferenceDepth = "all" )
cmds.delete(cmds.ls(returnedNodes,type="objectSet"))
geo = cmds.listRelatives(cmds.ls(returnedNodes,g=1)[0],p=1)
cmds.rename( geo, object_name)
def multObjShapeUpdate():
sel_objs = cmds.ls(sl=True,fl=True)
if len(sel_objs)>0:
files_to_import = cmds.fileDialog2(fileFilter = '*.obj', dialogStyle = 2, caption = 'import multiple object files', fileMode = 4,okc="Import")
if len(files_to_import) == len(sel_objs):
object_names = [file_to_import.split('/')[-1].split('.obj')[0] for file_to_import in files_to_import]
if len(sel_objs) == len([x for x in object_names if x in sel_objs]):
for file_to_import in files_to_import:
object_name = file_to_import.split('/')[-1].split('.obj')[0]
returnedNodes = cmds.file('%s' % file_to_import, i = True, type = "OBJ", rnn=True, ignoreVersion = True, options = "mo=0", loadReferenceDepth = "all" )
cmds.delete(cmds.ls(returnedNodes,type="objectSet"))
geo = cmds.listRelatives(cmds.ls(returnedNodes,g=1)[0],p=1)
cmds.rename( geo, "newShape_{0}".format(object_name))
new_shapes = [s for s in cmds.listRelatives(cmds.ls(g=1),p=1) if "newShape_" in s]
cur_shapes = sel_objs
for new in new_shapes:
for cur in cur_shapes:
if new.split("newShape_")[1] == cur:
blendshapeNd = cmds.blendShape(new,cur)[0]
cmds.setAttr("{0}.{1}".format(blendshapeNd,new),1)
cmds.delete(cur_shapes,ch=True)
cmds.delete(new_shapes)
cmds.confirmDialog(m="---===All Shapes Updated!===---")
else:
cmds.confirmDialog(m="--==Not Matching The Name!==--")
else:
cmds.confirmDialog(m="--==Please Select The Same Number Of Objects!==--")
else:
cmds.confirmDialog(m="--==Please Select Something!==--")
multObjShapeUpdate()
| mit | Python | |
8056aa34ac52a09952e3588605ce0e1a8642e29a | Create Final-Project.py | phstearns/Final-Project | Final-Project.py | Final-Project.py | mit | Python | ||
4a1644452b7ddf8e18a57e6520bf7be8b060b7f7 | Add Sorting Comparator solution | denisrmp/hacker-rank | algorithms/sorting/sorting_comparator.py | algorithms/sorting/sorting_comparator.py | # https://www.hackerrank.com/challenges/ctci-comparator-sorting/problem
from functools import cmp_to_key
class Player:
def __init__(self, name, score):
self.name = name
self.score = score
def comparator(a, b):
if a.score == b.score:
if a.name >= b.name:
return 1
else:
return -1
elif a.score > b.score:
return -1
else:
return 1
n = int(input())
data = []
for i in range(n):
name, score = input().split()
score = int(score)
player = Player(name, score)
data.append(player)
data = sorted(data, key=cmp_to_key(Player.comparator))
for i in data:
print(i.name, i.score)
| mit | Python | |
15d21e8ce24e8058db26035e192ee2ba240c7184 | Update clusters tasks | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | api/clusters/tasks.py | api/clusters/tasks.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import logging
from polyaxon_k8s.manager import K8SManager
from api.settings import CeleryTasks, CeleryRoutedTasks
from api.celery_api import app as celery_app
logger = logging.getLogger('polyaxon.tasks.clusters')
@celery_app.task(name=CeleryTasks.CLUSTERS_UPDATE_SYSTEM_INFO, time_limit=150)
def update_system_info():
from clusters.utils import get_cluster
k8s_manager = K8SManager(in_cluster=True)
version_api = k8s_manager.get_version()
cluster = get_cluster()
if cluster.version_api != version_api:
cluster.version_api = version_api
cluster.save()
@celery_app.task(name=CeleryTasks.CLUSTERS_UPDATE_SYSTEM_NODES, time_limit=150)
def update_system_nodes():
from clusters.models import ClusterNode
from clusters.utils import get_cluster
k8s_manager = K8SManager(in_cluster=True)
nodes = k8s_manager.list_nodes()
cluster = get_cluster()
nodes_to_update = {}
nodes_to_create = {node.metadata.name: node for node in nodes}
deprecated_nodes = []
for node in cluster.nodes.all():
if node.name in nodes_to_create:
nodes_to_update[node.name] = (node, nodes_to_create.pop(node.name))
else:
deprecated_nodes.append(node)
for node in deprecated_nodes:
node.is_current = False
node.save()
for node in nodes_to_create.values():
node_dict = ClusterNode.from_node_item(node)
node_dict['cluster'] = cluster
ClusterNode.objects.create(**node_dict)
for current_node, new_node in nodes_to_update.values():
node_dict = ClusterNode.from_node_item(new_node)
for k, v in node_dict.items():
setattr(current_node, k, v)
current_node.save()
@celery_app.task(name=CeleryTasks.CLUSTERS_UPDATE_SYSTEM_NODES_GPUS, time_limit=150)
def update_system_node_gpus():
pass
| apache-2.0 | Python | |
ffed6fb5b853f621af0e242abf00d97defc2a4a8 | add audio setup routine | swb1701/AlexaPi | audio.py | audio.py | #
# Set Up Audio Pairing with Alexa from a Pi
#
import os
import sys
import secrets as s
from pulsectl import Pulse
def run(cmd):
print("Executing:"+cmd)
os.system(cmd)
def btctl(cmd):
print("Sending "+cmd+" to bluetoothctl...")
os.system('echo -e "'+cmd+'\nquit\n"|bluetoothctl')
try:
p=Pulse()
except:
print("Couldn't find pulseaudio running ... trying to fix that...")
run("pulseaudio --start &")
try:
p=Pulse()
btctl("connect "+s.ECHO_BT_ADDRESS)
except:
print("Sorry, couldn't get pulseaudio running")
sys.exit(0)
print("We've verified pulse is running")
cards=p.card_list()
for card in p.card_list():
if card.name.startswith("bluez_card"):
idx=card.index
print("Found a bluetooth audio source as card #"+str(idx))
run("pactl set-card-profile "+str(idx)+" a2dp")
print("I've ensured that the bluetooth card is using a2dp profile")
for sink in p.sink_list():
if sink.description.startswith("Echo"):
print("Found "+sink.description+" as sink #"+str(sink.index))
run("pactl set-default-sink "+str(sink.index))
print("I've set the Echo is the default sink")
| mit | Python | |
b2a4709eae73786b40b4d0f58b1e02075ce023b3 | Create blink.py | brice-morin/resin-blink-python | blink.py | blink.py | import RPi.GPIO as GPIO
import time
# blinking function
def blink(pin):
GPIO.output(pin,GPIO.HIGH)
time.sleep(1)
GPIO.output(pin,GPIO.LOW)
time.sleep(1)
return
# to use Raspberry Pi board pin numbers
GPIO.setmode(GPIO.BOARD)
# set up GPIO output channel
GPIO.setup(11, GPIO.OUT)
# blink GPIO17 50 times
for i in range(0,50):
blink(11)
GPIO.cleanup()
| mit | Python | |
bae50ccc70a077944c92738faf2009df28ae75a7 | Add Python boilerplate | foxscotch/advent-of-code,foxscotch/advent-of-code | bp/bp.py | bp/bp.py | # Python 3.6.1
with open('input.txt', 'r') as f:
puzzle_input = f.read().split()
# Code here
| mit | Python | |
2585b44484b175bb116c228496069cc4269440c0 | Add python tests for cosine squared angles | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | hoomd/md/test-py/test_angle_cosinesq.py | hoomd/md/test-py/test_angle_cosinesq.py | # -*- coding: iso-8859-1 -*-
# Maintainer: joaander
from hoomd import *
from hoomd import md
context.initialize()
import unittest
import os
import numpy
# tests md.angle.cosinesq
class angle_cosinesq_tests (unittest.TestCase):
def setUp(self):
print
snap = data.make_snapshot(N=40,
box=data.boxdim(L=100),
particle_types = ['A'],
bond_types = [],
angle_types = ['angleA'],
dihedral_types = [],
improper_types = [])
if comm.get_rank() == 0:
snap.angles.resize(10);
for i in range(10):
x = numpy.array([i, 0, 0], dtype=numpy.float32)
snap.particles.position[4*i+0,:] = x;
x += numpy.random.random(3)
snap.particles.position[4*i+1,:] = x;
x += numpy.random.random(3)
snap.particles.position[4*i+2,:] = x;
x += numpy.random.random(3)
snap.particles.position[4*i+3,:] = x;
snap.angles.group[i,:] = [4*i+0, 4*i+1, 4*i+2];
init.read_snapshot(snap)
context.current.sorter.set_params(grid=8)
# test to see that se can create an md.angle.cosinesq
def test_create(self):
md.angle.cosinesq();
# test setting coefficients
def test_set_coeff(self):
cosinesq = md.angle.cosinesq();
cosinesq.angle_coeff.set('angleA', k=1.0, t0=0.78125)
all = group.all();
md.integrate.mode_standard(dt=0.005);
md.integrate.nve(all);
run(100);
# test coefficient not set checking
def test_set_coeff_fail(self):
cosinesq = md.angle.harmonic();
all = group.all();
md.integrate.mode_standard(dt=0.005);
md.integrate.nve(all);
self.assertRaises(RuntimeError, run, 100);
def tearDown(self):
context.initialize();
if __name__ == '__main__':
unittest.main(argv = ['test.py', '-v'])
| bsd-3-clause | Python | |
cbbe6f4709763c44ca0185f7e9127a0737525aff | add test_iterator_example.py | alphatwirl/alphatwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl | tests/test_iterator_example.py | tests/test_iterator_example.py | #!/usr/bin/env python
import alphatwirl
import unittest
##____________________________________________________________________________||
def genFunc():
yield 101
yield 102
yield 103
##____________________________________________________________________________||
class IteClass(object):
def __init__(self):
self.val = 100
pass
def __iter__(self):
self.val = 101
yield self
self.val = 102
yield self
self.val = 103
yield self
##____________________________________________________________________________||
class TestExampleGeneratorFunction(unittest.TestCase):
def test_genFunc_Iteration(self):
self.assertEqual([101, 102, 103], [e for e in genFunc()])
def test_genFunc_ManualIteration(self):
gen = genFunc()
it = iter(gen)
self.assertIs(it, gen)
self.assertEqual(101, next(it))
self.assertEqual(102, next(it))
self.assertEqual(103, next(it))
self.assertRaises(StopIteration, next, it)
##____________________________________________________________________________||
class TestExampleIterableObject(unittest.TestCase):
def test_iteObj_Iteration(self):
self.assertEqual([101, 102, 103], [i.val for i in IteClass()])
def test_iteObj_ManualIteration(self):
itObj = IteClass()
self.assertEqual(100, itObj.val)
it = iter(itObj)
itObj1 = next(it)
self.assertIs(itObj, itObj1)
self.assertEqual(101, itObj1.val)
itObj2 = next(it)
self.assertIs(itObj, itObj2)
self.assertEqual(102, itObj2.val)
itObj3 = next(it)
self.assertIs(itObj, itObj3)
self.assertEqual(103, itObj3.val)
self.assertRaises(StopIteration, next, it)
##____________________________________________________________________________||
| bsd-3-clause | Python | |
ea4e0742317b2b26dc8fa9ddca79b1179f301329 | Add protocol module | dotoscat/Polytank-ASIR | protocol.py | protocol.py | #Copyright (C) 2017 Oscar Triano 'dotoscat' <dotoscat (at) gmail (dot) com>
#This program is free software: you can redistribute it and/or modify
#it under the terms of the GNU Affero General Public License as
#published by the Free Software Foundation, either version 3 of the
#License, or (at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU Affero General Public License for more details.
#You should have received a copy of the GNU Affero General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
import struct
class Client:
CONNECT = 0
MOVE = 1
def __init__(self):
self._connect = struct.Struct("!c")
def connect(self):
return self._connect.pack(Client.CONNECT)
class Server:
CREATE_TANK = 0
client = Client()
| agpl-3.0 | Python | |
9912b5a8fd981bae9ab003eb8386643661918cde | fix name OUtsideBrightness Sensor | k-team/KHome,k-team/KHome,k-team/KHome | all_module/OutsideBrightnessSensor.py | all_module/OutsideBrightnessSensor.py | from twisted.internet import reactor
import core.module
import core.fields
import core.fields.io
import core.fields.persistant
import time
class OutsideBrightness(core.module.Base):
update_rate = 10
class Brightness(
core.fields.sensor.Light,
core.fields.io.Readable,
core.fields.Base):
pass
| mit | Python | |
6b142bc64f5966c695907692c29f52fce808a78d | add poll demo for linux platform | ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study | network/echo-server/echo-poll/lnx_poll.py | network/echo-server/echo-poll/lnx_poll.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2016 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import socket
import select
def main():
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(('', 5555))
server_socket.listen(5)
poll = select.poll()
poll.register(server_socket.fileno(), select.POLLIN)
connections = {}
while True:
events = poll.poll(10000)
for fileno, event in events:
if fileno == server_socket.fileno():
(client_socket, client_address) = server_socket.accept()
print 'got connection from', client_address
poll.register(client_socket.fileno(), select.POLLIN)
connections[client_socket.fileno()] = client_socket
elif event & select.POLLIN:
client_socket = connections[fileno]
data = client_socket.recv(4096)
if data:
client_socket.send(data)
else:
poll.unregister(fileno)
client_socket.close()
del connections[fileno]
if __name__ == '__main__':
main()
| bsd-2-clause | Python | |
d168256dd4b75375770b3391f716ceaba2cf722e | Add scrapper of Bureau of Labor Statistis Employment status | lexieheinle/python-productivity | cpsScrap.py | cpsScrap.py | #user/local/bin/python
#uses python3
import urllib.request
from bs4 import BeautifulSoup
url = "http://www.bls.gov/cps/cpsaat01.htm" #access the search term through website
page = urllib.request.urlopen(url).read()
soup = BeautifulSoup(page)
tables = soup.findAll('table') #find all tables
#print(tables)
mainTable = soup.find(id="cps_eeann_year")
print(mainTable)
for table in tables:
caption = table.find('caption')
print(caption)
data = [] #create holder for results
rows = mainTable.findAll('tr')
print(rows)
for row in rows[1:]:
dataRow = [] #create smaller list for each row
for th in row.findAll('th'):
dataRow.append(th.text)
for td in row.findAll('td'):
dataRow.append(td.text)
data.append(dataRow)
data.pop()
print(data) | mit | Python | |
7bdfc081cee0326d54c667bed8f870427fe2eeb6 | Add new file createDB.py | HeLanDou/free-time,HeLanDou/free-time,HeLanDou/free-time,HeLanDou/free-time,HeLanDou/free-time | createDB.py | createDB.py | #!/usr/bin/python
#coding=utf-8
import MySQLdb
db = MySQLdb.connect("localhost", "root", "soeasy", "free_time")
cursor = db.cursor();
cursor.execute("drop table lib");
cursor.execute('''
create table lib (
id char(8) not null,
name varchar(10) not null,
Mon_m1 bool,
Mon_m2 bool,
Mon_a1 bool,
Mon_a2 bool,
Mon_e bool,
Tue_m1 bool,
Tue_m2 bool,
Tue_a1 bool,
Tue_a2 bool,
Tue_e bool,
Wed_m1 bool,
Wed_m2 bool,
Wed_a1 bool,
Wed_a2 bool,
Wed_e bool,
Thur_m1 bool,
Thur_m2 bool,
Thur_a1 bool,
Thur_a2 bool,
Thur_e bool,
Fri_m1 bool,
Fri_m2 bool,
Fri_a1 bool,
Fri_a2 bool,
Fri_e bool,
Sat_m bool,
Sat_a bool,
Sat_e bool,
Sun_m bool,
Sun_a bool,
Sun_e bool,
primary key(id) );''')
cursor.close()
| mit | Python | |
eebe75ffbe39e7f8f91c3e3a425c7058f7bd8f01 | Write some preliminary code for undersampling component | tensorflow/tfx-addons | projects/component.py | projects/component.py | from tfx import v1 as tfx
from tfx.types import artifact_utils
from tfx.utils import io_utils
from tfx.components.util import tfxio_utils
from tfx.dsl.component.experimental.decorators import component
import apache_beam as beam
import random
@component
def UndersamplingComponent(
examples: tfx.dsl.components.InputArtifact[tfx.types.standard_artifacts.Examples]
) -> tfx.dsl.components.OutputArtifact[tfx.types.standard_artifacts.Examples]:
# examples = artifact_utils.get_single_instance(examples.outputs["examples"]._artifacts)
tfxio_factory = tfxio_utils.get_tfxio_factory_from_artifact(examples=[examples], telemetry_descriptors=[])
split_and_tfxio = [(split, tfxio_factory(io_utils.all_files_pattern(artifact_utils.get_split_uri([examples], split))))
for split in artifact_utils.decode_split_names(examples.split_names)]
def generate_elements(data):
for i in range(len(data[list(data.keys())[0]])):
yield {key: data[key][i][0] if len(data[key][i]) > 0 else "" for key in data.keys()}
def sample(key, value, side=0):
for item in random.sample(value, side):
yield item
split, tfxio = split_and_tfxio[0]
# TODO: alter this code to account for multiple files in the uri
uri = examples.uri.split("/")
uri[3] = "UndersamplingComponent"
uri.append(f"Split-{split}")
uri = "/".join(uri)
with beam.Pipeline() as p:
data = (
# TODO: convert to list and back using a schema to save key space?
p
| 'TFXIORead[%s]' % split >> tfxio.BeamSource()
| beam.Map(lambda x: x.to_pydict())
| beam.FlatMap(generate_elements)
| beam.Map(lambda x: (x["company"], x))
)
val = (
data
| beam.combiners.Count.PerKey()
| beam.Values()
| beam.CombineGlobally(lambda elements: min(elements or [-1]))
)
res = (
data
| beam.GroupByKey()
| beam.FlatMapTuple(sample, side=beam.pvalue.AsSingleton(val))
| beam.io.WriteToTFRecord(uri, file_name_suffix='.gz')
)
| apache-2.0 | Python | |
d377867ea501c4d9dae1f5c3ce1efc02f0a9639b | check Python version | pympler/pympler,pympler/pympler,Khan/pympler,Khan/pympler,yunjianfei/pympler,yunjianfei/pympler,swiftstack/pympler | pympler/sizer/__init__.py | pympler/sizer/__init__.py |
# check supported Python version
import sys
if getattr(sys, 'hexversion', 0) < 0x2020000:
raise NotImplementedError('sizer requires Python 2.2 or newer')
from asizeof import *
| from asizeof import *
| apache-2.0 | Python |
39de01462baf3db60c5a0f5d8a3b529f798730ab | Add script to check the performance | studiawan/pygraphc | pygraphc/bin/Check.py | pygraphc/bin/Check.py | import csv
from os import listdir
from pygraphc.evaluation.ExternalEvaluation import ExternalEvaluation
# read result and ground truth
result_dir = '/home/hudan/Git/pygraphc/result/improved_majorclust/Kippo/per_day/'
groundtruth_dir = '/home/hudan/Git/labeled-authlog/dataset/Kippo/attack/'
result_files = listdir(result_dir)
# open evaluation file
f = open('check.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('file_name', 'tp', 'fp', 'fn', 'tn', 'specificity', 'precision', 'recall', 'accuracy')
writer.writerow(header)
for result_file in result_files:
if result_file.endswith('.anomaly.perline.txt'):
filename = result_file.split('.anomaly')[0]
print filename
groundtruth_file = groundtruth_dir + filename + '.attack'
# check confusion matrix
true_false, specificity, precision, recall, accuracy = \
ExternalEvaluation.get_confusion(groundtruth_file, result_dir + result_file)
# write evaluation result to file
row = (filename, true_false[0], true_false[1], true_false[2], true_false[3],
specificity, precision, recall, accuracy)
writer.writerow(row)
f.close()
| mit | Python | |
477d310ca2add1a5fd539159592f36ca626502f0 | add way to read geotiffs | jrising/research-common,jrising/research-common,jrising/research-common,jrising/research-common | python/geotiffgrid.py | python/geotiffgrid.py | import gdal
from spacegrid import SpatialGrid
class GeotiffGrid(SpatialGrid):
def __init__(self, filepath):
ds = gdal.Open(filepath)
x0_corner, sizex, zero1, y0_corner, zero2, sizey = ds.GetGeoTransform()
band = ds.GetRasterBand(1)
array = band.ReadAsArray()
self.array = array
super(BinaryGrid, self).__init__(x0_corner, y0_corner, sizex, sizey, array.shape[2], array.shape[1])
def getll_raw(self, latitude, longitude):
return self.array[self.rowcol(latitude, longitude)]
| mit | Python | |
8da927a0a196301ce5fb2ef2224e556b4d414729 | Add solution for counting DNA nucleotides | MichaelAquilina/rosalind-solutions | problem1.py | problem1.py | from collections import Counter
if __name__ == '__main__':
with open('data/rosalind_dna.txt', mode='r') as f:
sequence = f.read()
counts = Counter(sequence)
print '%d %d %d %d' % (counts['A'], counts['C'], counts['G'], counts['T'])
| mit | Python | |
572c8bdc1b18620857db9f61386fed5234bce957 | Create searchBook.py | frank-cq/Toys | searchBook.py | searchBook.py | # sudo apt install python-lxml,python-requests
from lxml import html
import requests
urlPrefix = 'https://book.douban.com/subject/'
candidateBookNums = []
candidateBookNums.append('3633461')
selectedBooks = {}
# i = 1
while candidateBookNums:
bookNum = candidateBookNums.pop(0)
bookUrl = urlPrefix + str(bookNum)
# 获取网页
page = requests.get(bookUrl)
# 将网页格式化为树型
tree = html.fromstring(page.text)
# 书籍名称
bookName = tree.xpath('//title/text()')
# 平均分
rating_num = tree.xpath('//strong[@property="v:average"]/text()')[0]
# 评分人数
rating_people = tree.xpath('//a/span[@property="v:votes"]/text()')[0]
if rating_num < 8 or rating_people < 800:
continue
stars = tree.xpath('//span[@class="rating_per"]/text()')
# 5星评价比例
stars5 = stars[0]
# 4星评价比例
stars4 = stars[1]
# 3星评价比例
stars3 = stars[2]
# 2星评价比例
stars2 = stars[3]
# 1星评价比例
stars1 = stars[4]
# 豆瓣读书中指向其他书的链接
links = tree.xpath('//div[@class="content clearfix"]/dl/dd/a/@href')
# 去掉空白符,如回车、换行、空格、缩进
bookName = bookName[0].strip()
# 整理豆瓣上书籍的评分信息
book = {
'name':bookName,
'score':rating_num,
'rating_people':rating_people,
'stars5':stars5,
'stars4':stars4,
'stars3':stars3,
'stars2':stars2,
'stars1':stars1,
}
selectedBooks[bookNum] = book
print bookName,book
for j in links:
bookNum = j.split('/')[-2]
if bookNum not in selectedBooks.keys() and bookNum not in candidateBookNums:
candidateBookNums.append(bookNum)
# i += 1
# if i > 100:
# break
print selectedBooks
| apache-2.0 | Python | |
a2f6a399c643b89c73aca2335b938f584cd572d5 | create Page object to better organize Links | rivergillis/crawler | page.py | page.py | from bs4 import BeautifulSoup, SoupStrainer
from link import Link
import requests
class Page(object):
def __init__(self, full_hyperlink, links=None):
self.full_hyperlink = full_hyperlink
self.links = links
# This doesn't feel great, maybe pull root_url creation method out of Link?
self.domain = Link("#null", self.full_hyperlink).root_url
def get_full_hyperlink(self):
return self.full_hyperlink
def get_links(self):
if not self.links:
self.create_links()
return self.links
def create_links(self):
"""
this method creates a set of links by downloading the html and searching for link tags
:return: a set of Link objects
"""
try:
response = requests.get(self.full_hyperlink)
# Note: This will be catching an SSL Error
except IOError:
# Attempt to visit the http instead of https site
response = requests.get("http" + self.full_hyperlink[5:])
soup = BeautifulSoup(response.content, "html.parser", parse_only=SoupStrainer('a'))
links = []
for link in soup:
if link.has_attr('href'):
if not (link['href'].startswith("#")):
links.append(str(link['href']))
self.links = {Link(link_str, self.full_hyperlink) for link_str in links}
| mit | Python | |
5178b104993401f47b1c4d8e3c796bef379e389e | Add migration for `communities` app. | letsmeet-click/letsmeet.click,letsmeet-click/letsmeet.click,letsmeet-click/letsmeet.click,letsmeet-click/letsmeet.click | letsmeet/communities/migrations/0011_auto_20160318_2240.py | letsmeet/communities/migrations/0011_auto_20160318_2240.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-18 21:40
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('communities', '0010_auto_20160108_1618'),
]
operations = [
migrations.AlterModelManagers(
name='community',
managers=[
],
),
]
| mit | Python | |
bb04b6771ccc39d86ea099f595d64c64ee2974f8 | Add a cookbook recipe for DipoleMagDir class | cmeessen/fatiando,cmeessen/fatiando,rafaelmds/fatiando,mtb-za/fatiando,drandykass/fatiando,rafaelmds/fatiando,mtb-za/fatiando,eusoubrasileiro/fatiando,santis19/fatiando,santis19/fatiando,eusoubrasileiro/fatiando,victortxa/fatiando,eusoubrasileiro/fatiando_seismic,fatiando/fatiando,drandykass/fatiando,eusoubrasileiro/fatiando_seismic,victortxa/fatiando,eusoubrasileiro/fatiando,fatiando/fatiando | cookbook/gravmag_magdir_dipolemagdir.py | cookbook/gravmag_magdir_dipolemagdir.py | """
GravMag: Use the DipoleMagDir class to estimate the magnetization direction
of dipoles with known centers
"""
import numpy
from fatiando import mesher, gridder
from fatiando.utils import ang2vec, contaminate
from fatiando.gravmag import sphere
from fatiando.vis import mpl
from fatiando.gravmag.magdir import DipoleMagDir
# Make noise-corrupted synthetic data
inc, dec = -10.0, -15.0 # inclination and declination of the Geomagnetic Field
model = [mesher.Sphere(3000, 3000, 1000, 1000,
{'magnetization': ang2vec(6.0, -20.0, -10.0)}),
mesher.Sphere(7000, 7000, 1000, 1000,
{'magnetization': ang2vec(10.0, 3.0, -67.0)})]
area = (0, 10000, 0, 10000)
x, y, z = gridder.scatter(area, 1000, z=-150, seed=0)
tf = contaminate(sphere.tf(x, y, z, model, inc, dec), 5.0)
# Give the centers of the dipoles
centers = [[3000, 3000, 1000], [7000, 7000, 1000]]
# Estimate the magnetization vectors
solver = DipoleMagDir(x, y, z, tf, inc, dec, centers).fit()
# Print the estimated dipole monents, inclinations and declinations
print '[dipole moment, inclination, declination]'
for i in range(len(centers)):
print solver.estimate_[i]
# Plot the fit and the normalized histogram of the residuals
mpl.close("all")
mpl.figure(figsize=(14, 5))
mpl.subplot(1, 2, 1)
mpl.title("Total Field Anomaly (nT)", fontsize = 14)
mpl.axis('scaled')
nlevels = mpl.contour(y, x, tf, (50,50), 15, interp=True, color='r', label='Observed',
clabel=True, style='solid', linewidth=2.0)
mpl.contour(y, x, solver.predicted(), (50,50), nlevels, interp=True, color='b',
label='Predicted', clabel=True, style='dashed', linewidth=2.0)
mpl.legend(loc='upper left', shadow=True, prop={'size':13})
mpl.xlabel('East y (m)', fontsize = 14)
mpl.ylabel('North x (m)', fontsize = 14)
mpl.subplot(1, 2, 2)
residuals_mean = numpy.mean(solver.residuals())
residuals_std = numpy.std(solver.residuals())
# Each residual is subtracted from the mean and the resulting
# difference is divided by the standard deviation
s = (solver.residuals() - residuals_mean)/residuals_std
mpl.hist(s, bins=21, range=None, normed=True, weights=None,
cumulative=False, bottom=None, histtype='bar', align='mid',
orientation='vertical', rwidth=None, log=False,
color=None, label=None)
mpl.xlim(-4, 4)
mpl.title("mean = %.3f std = %.3f" % (residuals_mean, residuals_std),
fontsize = 14)
mpl.ylabel("P(z)", fontsize = 14)
mpl.xlabel("z", fontsize = 14)
mpl.show()
| bsd-3-clause | Python | |
5c0b2d662b08f49b5de1393c7db9826203e842e8 | add tool for count words | jasonwbw/jason_putils | putils/tools/words_length.py | putils/tools/words_length.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# count how much word in one line, get the statistics result
#
# @Author : Jasonwbw@yahoo.com
import sys
des = '<input file name> [-p parts] [-c threshold]' + \
'\n\tcompute average word count for each line, default will print out int average length, standard variance, max and min value, total line. ' + \
'\n\tAnd divide the line into [parts] by word count, parts must large than 1 and odd, default is 2.' + \
'\n\tAnd you can give threshold to count how much line\'s word count less than it.'
def check_params(argv):
if len(argv) < 1:
print 'leak params, please use like:', des
return False
return True
def handle(argv, opts):
# check the params is right or not
if not check_params(argv):
return
# params
filename = argv[0]
parts = 2
threshold = -1
if opts != None:
for opt, arg in opts:
if opt == '-p':
parts = int(arg)
if parts % 2 != 0:
print '[Error] parts should be odd.'
return
elif opt == '-c':
threshold = int(arg)
# var for count
total_words, min_words, max_words = 0, sys.float_info.max, 0.0
min_s, max_s = '', ''
with open(argv[0], 'r') as fp:
line_num = 0
for line in fp:
sentence = line.strip()
count = sentence.count(' ') + 1
if count < min_words:
min_words, min_s = count, sentence
if count > max_words:
max_words, max_s = count, sentence
total_words += count
line_num += 1
# statistics var
avg, variance = float(total_words) / line_num, 0
small_one_part, large_one_part = int((avg - min_words) / (parts / 2)) + 1, int((max_words - avg) / (parts / 2)) + 1,
part_count = [0] * parts
threshold_count = 0
with open(argv[0], 'r') as fp:
for line in fp:
count = line.strip().count(' ') + 1
variance += (count - avg) ** 2
if count > avg:
part_count[parts / 2 + int(count - avg) / large_one_part] += 1
else:
part_count[int(avg - count) / small_one_part] += 1
if threshold != -1 and count < threshold:
threshold_count += 1
# print out result
print 'The file is total', line_num, 'line.'
print 'average word count for one line is' , avg
print 'min word count', min_words
print 'max word count', max_words
print 'standard variance is', (variance / float(line_num)) ** 0.5
print '\ntake the line into', parts, 'parts'
print_table(min_words, small_one_part, max_words, large_one_part, part_count)
if threshold != -1:
print '\nthere are', threshold_count, 'line\'s word count less than', threshold
def print_table(min_words, small_one_part, max_words, large_one_part, part_count):
for i in xrange(len(part_count) / 2):
print 'smaller ' + str(min_words + small_one_part * i) + '-' + str(min_words + small_one_part * (i + 1)) + '\t' + str(part_count[i])
for i in xrange(len(part_count) / 2, 0, -1):
print 'larger ' + str(max_words - large_one_part * i) + '-' + str(max_words - large_one_part * (i - 1)) + '\t' + str(part_count[len(part_count) - i])
if __name__ == '__main__':
import getopt
reload(sys)
sys.setdefaultencoding('utf-8')
try:
opts, args = getopt.getopt(sys.argv[1:], 'c:p:', [])
handle(args, opts)
except getopt.GetoptError:
sys.exit(2) | apache-2.0 | Python | |
eee85e5157d69cee515c01fa0f638b064de74a6e | Add a script to graph problem reports over time by transport mode | mysociety/fixmytransport,mysociety/fixmytransport,mysociety/fixmytransport,mysociety/fixmytransport,mysociety/fixmytransport,mysociety/fixmytransport | script/graph-reports-by-transport-mode.py | script/graph-reports-by-transport-mode.py | #!/usr/bin/python
# A script to draw graphs showing the number of reports by transport
# type each month. This script expects to find a file called
# 'problems.csv' in the current directory which should be generated
# by:
# DIR=`pwd` rake data:create_problem_spreadsheet
import csv
import datetime
from collections import defaultdict
import matplotlib.pyplot as plt
import itertools
transport_types = 'Bus', 'Train', 'Tram', 'Ferry'
counts = {}
for transport_type in transport_types:
counts[transport_type] = defaultdict(int)
today = datetime.date.today()
latest_month = earliest_month = (today.year, today.month)
maximum_count = -1
with open('problems.csv') as fp:
reader = csv.DictReader(fp, delimiter=',', quotechar='"')
for row in reader:
d = datetime.datetime.strptime(row['Created'],
'%H:%M %d %b %Y')
ym = (d.year, d.month)
earliest_month = min(earliest_month, ym)
transport_modes = row['Transport mode']
for transport_type in transport_types:
if transport_type in transport_modes:
counts[transport_type][ym] += 1
maximum_count = max(maximum_count, counts[transport_type][ym])
def months_between(earlier, later):
"""A generator for iterating over months represented as (year, month) tuples"""
year = earlier[0]
month = earlier[1]
while True:
yield (year, month)
if month == 12:
year = year + 1
month = 1
else:
month += 1
if (year, month) > later:
return
all_months = list(months_between(earliest_month, latest_month))
# Don't include the most recent month, since the data won't be
# complete:
all_months = all_months[0:-1]
months = len(all_months)
# Make sure that there's at least a zero count for each month we're
# considering:
for d in counts.values():
for ym in all_months:
d[ym] += 0
for transport_type in transport_types:
fig = plt.figure()
d = counts[transport_type]
x = all_months
y = [d[ym] for ym in x]
x_labels = ["%d-%d" % ym for ym in x]
plt.bar(range(months), y)
plt.xticks(range(months), x_labels, size='small', rotation=60)
plt.xlim(0, months)
plt.ylim(0, maximum_count)
plt.title(transport_type + ' issue report counts per month on FixMyTransport')
plt.ylabel('Number of problems or campaigns')
plt.savefig(transport_type.lower() + ".png", dpi=100)
| agpl-3.0 | Python | |
bdbf5f538ea15360004a4efe769b629c3032b4bb | add admin view for eventschedule | dimagi/rapidsms,dimagi/rapidsms | lib/rapidsms/contrib/scheduler/admin.py | lib/rapidsms/contrib/scheduler/admin.py | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
from django.contrib import admin
from .models import EventSchedule
class EventScheduleAdmin(admin.ModelAdmin):
model = EventSchedule
admin.site.register(EventSchedule, EventScheduleAdmin)
| bsd-3-clause | Python | |
7bfd677d1f4fce45b657e201ea5dfc639974cd16 | add script to generate color space matrices | Ryp/Reaper,Ryp/Reaper,Ryp/Reaper,Ryp/Reaper | tools/convert-rgb-space-xyz.py | tools/convert-rgb-space-xyz.py | #!/usr/bin/env python
#
# This program allows to generate matrices to convert between RGB spaces and XYZ
# All hardcoded values are directly taken from the ITU-R documents
#
# NOTE: When trying to convert from one space to another, make sure the whitepoint is the same,
# otherwise math gets more complicated (see Bradford transform).
#
# See also:
# http://www.brucelindbloom.com/index.html?Eqn_RGB_XYZ_Matrix.html
# https://ninedegreesbelow.com/photography/xyz-rgb.html
import numpy
def xy_to_XYZ(xy):
return [xy[0] / xy[1], 1.0, (1.0 - xy[0] - xy[1]) / xy[1]]
def compute_rgb_to_zyx_matrix(whitepoint_XYZ, R, G, B):
Xr = R[0] / R[1]
Yr = 1
Zr = (1 - R[0] - R[1]) / R[1]
Xg = G[0] / G[1]
Yg = 1
Zg = (1 - G[0] - G[1]) / G[1]
Xb = B[0] / B[1]
Yb = 1
Zb = (1 - B[0] - B[1]) / B[1]
m = numpy.array([
[Xr, Xg, Xb],
[Yr, Yg, Yb],
[Zr, Zg, Zb]])
m_inverse = numpy.linalg.inv(m)
S = numpy.dot(m_inverse, whitepoint_XYZ)
m = numpy.array([
[S[0] * Xr, S[1] * Xg, S[2] * Xb],
[S[0] * Yr, S[1] * Yg, S[2] * Yb],
[S[0] * Zr, S[1] * Zg, S[2] * Zb]])
return m
def d65_XYZ():
d65_xy = [0.3127, 0.3290]
return xy_to_XYZ(d65_xy)
def compute_srgb_to_xyz():
R_xy = [0.640, 0.330]
G_xy = [0.300, 0.600]
B_xy = [0.150, 0.060]
return compute_rgb_to_zyx_matrix(d65_XYZ(), R_xy, G_xy, B_xy)
def compute_rec709_rgb_to_xyz():
R_xy = [0.640, 0.330]
G_xy = [0.300, 0.600]
B_xy = [0.150, 0.060]
return compute_rgb_to_zyx_matrix(d65_XYZ(), R_xy, G_xy, B_xy)
def compute_rec2020_rgb_to_xyz():
R_xy = [0.708, 0.292]
G_xy = [0.170, 0.797]
B_xy = [0.131, 0.046]
return compute_rgb_to_zyx_matrix(d65_XYZ(), R_xy, G_xy, B_xy)
def compute_display_p3_rgb_to_xyz():
R_xy = [0.680, 0.320]
G_xy = [0.265, 0.690]
B_xy = [0.150, 0.060]
return compute_rgb_to_zyx_matrix(d65_XYZ(), R_xy, G_xy, B_xy)
def compute_full_transform(A_to_XYZ, B_to_XYZ):
XYZ_to_B = numpy.linalg.inv(B_to_XYZ)
A_to_B = numpy.matmul(XYZ_to_B, A_to_XYZ)
print(f'M\n{A_to_B}')
B_to_A = numpy.linalg.inv(A_to_B)
print(f'M-1\n{B_to_A}')
if __name__ == '__main__':
numpy.set_printoptions(precision = 10, suppress = True, floatmode = 'fixed')
compute_full_transform(compute_rec709_rgb_to_xyz(), compute_rec2020_rgb_to_xyz())
| mit | Python | |
26d56afb094db4b471ec6bd6d5e496e0f9b547d0 | check all dataset shapes | dswah/pyGAM | pygam/tests/test_datasets.py | pygam/tests/test_datasets.py | # -*- coding: utf-8 -*-
import numpy as np
import pytest
from pygam.datasets import cake
from pygam.datasets import coal
from pygam.datasets import default
from pygam.datasets import faithful
from pygam.datasets import hepatitis
from pygam.datasets import mcycle
from pygam.datasets import trees
from pygam.datasets import wage
from pygam.datasets import __all__ as DATASETS
def _test_dataset(dataset_loader, n_rows, n_columns_X, n_columns_df, n_rows_X=None):
"""check the length of the dataset is the same regardless of the transformation
check the columns of the dataset are correct in X_y and as a DataFrame
check the transformation is correct
check dtype is float for X_y
check ndim for X is 2
Parameters
----------
dataset_loader : function, returns a dataframe or a tuple of arrays
n_rows : int, expected number of rows in dataset
n_columns_X : int, expected number of columns in the transformed
dataset independent variables
n_columns_df : int, expected number of columns in the original
dataset dataframe
n_rows_X : None, or int, expected number of rows in the transformed
dataset independent variables if different from the original.
This is usually necessary for datasets that use histogram transforms
Returns
-------
None
"""
if n_rows_X is None:
n_rows_X = n_rows
df = dataset_loader(return_X_y=False)
X_y = dataset_loader(return_X_y=True)
# number of rows never changes
assert df.shape[0] == n_rows
assert X_y[0].shape[0] == X_y[1].shape[0] == n_rows_X
# check columns
assert df.shape[1] == n_columns_df
assert X_y[0].shape[1] == n_columns_X
# check dtype
assert X_y[0].dtype == X_y[1].dtype == 'float'
# check shape
assert X_y[0].ndim == 2
def test_cake():
_test_dataset(cake, n_rows=270, n_columns_X=3, n_columns_df=5)
def test_coal():
_test_dataset(coal, n_rows=191, n_columns_X=1, n_columns_df=1, n_rows_X=150)
def test_default():
_test_dataset(default, n_rows=10000, n_columns_X=3, n_columns_df=4)
def test_faithful():
_test_dataset(faithful, n_rows=272, n_columns_X=1, n_columns_df=2, n_rows_X=200)
def test_hepatitis():
_test_dataset(hepatitis, n_rows=86, n_columns_X=1, n_columns_df=3, n_rows_X=83)
def test_mcycle():
_test_dataset(mcycle, n_rows=133, n_columns_X=1, n_columns_df=2)
def test_trees():
_test_dataset(trees, n_rows=31, n_columns_X=2, n_columns_df=3)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.