commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
fb16bb12e12fd820856ca0397a2cb4a857d2c7ca | chatbot function | src/tuling.py | src/tuling.py | # coding:utf-8
import requests
def chatbot(body):
resp = requests.post("http://www.tuling123.com/openapi/api", data={
# "key": "d59c41e816154441ace453269ea08dba",
"key": "ff772ad12e0c421f98da2dd7f6a9289c",
"info": body,
"userid": "xiaopeng"
})
resp = resp.json()
return resp
| Python | 0.999999 | |
b090a41b42e82bb8cabd6fcde26ecdd8434e7757 | Add a script to setup the DB auth for a simple scenario | setup/db_auth.py | setup/db_auth.py | # This script sets up authentication with the following setup
# - one ADMIN_USER
# - one RW_USER
# - one RO_USER with createIndex privileges
import sys
import pymongo
import argparse
import traceback
# Variables to change
DB_HOST="localhost"
ADMIN_USERNAME="test-admin"
ADMIN_PASSWORD="test-admin-pw"
RW_USERNAME="test-rw"
RW_PASSWORD="test-rw-pw"
RO_USERNAME="test-ro"
RO_PASSWORD="test-ro-pw"
class SetupDBAuth(object):
def __init__(self):
# At this point, there is no authentication
pass
def getURL(self, username, password):
return ("mongodb://%s:%s@%s/admin?authMechanism=SCRAM-SHA-1" %
(username, password, DB_HOST))
# First set up the admin user
# We will open a new connection instead of using the configured URL because
# that may change later to include a username and password
def setupAdminUser(self):
self.admin_default = pymongo.MongoClient(DB_HOST).admin
create_result = self.admin_default.command(
{
"createUser": ADMIN_USERNAME,
"pwd": ADMIN_PASSWORD,
"roles": [ { "role": "userAdminAnyDatabase", "db": "admin" } ]
}
)
self.admin_auth = pymongo.MongoClient(self.getURL(ADMIN_USERNAME, ADMIN_PASSWORD)).admin
print("Created admin user, result = %s" % create_result)
print("At current state, list of users = %s" % self.admin_auth.command({"usersInfo": 1}))
def teardownAdminUser(self):
try:
self.admin_default = pymongo.MongoClient(DB_HOST).admin
drop_result = self.admin_default.command(
{
"dropUser": ADMIN_USERNAME
}
)
self.admin_auth = None
print("Dropped admin user, result = %s" % drop_result)
print("At current state, list of users = %s" % self.admin_default.command({"usersInfo": 1}))
except Exception as e:
traceback.print_exc(limit=5, file=sys.stdout)
print("Error while dropping admin user, skipping")
def setupRWUser(self):
create_result = self.admin_auth.command(
{
"createUser": RW_USERNAME,
"pwd": RW_PASSWORD,
"roles": [ { "role": "readWrite", "db": "Stage_database" } ]
}
)
print("Created RW user, result = %s" % create_result)
print("At current state, list of users = %s" % self.admin_auth.command({"usersInfo": 1}))
def teardownRWUser(self):
try:
drop_result = self.admin_auth.command(
{
"dropUser": RW_USERNAME,
}
)
print("Dropped RW user, result = %s" % drop_result)
print("At current state, list of users = %s" % self.admin_auth.command({"usersInfo": 1}))
except Exception as e:
traceback.print_exc(limit=5, file=sys.stdout)
print("Error while dropping RW user, skipping")
def setupROUser(self):
self.stagedb_auth = pymongo.MongoClient(
self.getURL(ADMIN_USERNAME, ADMIN_PASSWORD)).Stage_database
create_role_result = self.stagedb_auth.command(
{
"createRole": "createIndex",
"privileges": [
{ "resource": { "db": "Stage_database", "collection": "" },
"actions": [ "createIndex"] }
],
"roles": []
}
)
print("Created new role, result = %s" % create_role_result)
print("At current state, list of roles = %s" %
self.stagedb_auth.command({ "rolesInfo": 1, "showBuiltinRoles": False, "showPrivileges": True}))
create_result = self.admin_auth.command(
{
"createUser": RO_USERNAME,
"pwd": RO_PASSWORD,
"roles": [ { "role": "readWrite", "db": "Stage_database" } ]
}
)
print("Created RO user, result = %s" % create_result)
print("At current state, list of users = %s" % self.admin_auth.command({"usersInfo": 1}))
def teardownROUser(self):
try:
self.stagedb_auth = pymongo.MongoClient(
self.getURL(ADMIN_USERNAME, ADMIN_PASSWORD)).Stage_database
drop_role_result = self.stagedb_auth.command(
{
"dropRole": "createIndex"
}
)
print("Dropped new role, result = %s" % drop_role_result)
print("At current state, list of roles = %s" %
self.stagedb_auth.command({ "rolesInfo": 1, "showBuiltinRoles": False, "showPrivileges": True}))
except Exception as e:
traceback.print_exc(limit=5, file=sys.stdout)
print("Error while dropping role, skipping")
try:
drop_result = self.admin_auth.command(
{
"dropUser": RO_USERNAME
}
)
print("Dropped RO user, result = %s" % drop_result)
print("At current state, list of users = %s" % self.admin_auth.command({"usersInfo": 1}))
except Exception as e:
traceback.print_exc(limit=5, file=sys.stdout)
print("Error while dropping ro user, skipping")
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog="db_auth", epilog="Run this script against a database without authentication - e.g. mongod *without* --auth")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-s", "--setup",
help="create users and roles in the database", action='store_true')
group.add_argument("-t", "--teardown",
help="remove users and roles created by this script from the database.", action='store_true')
args = parser.parse_args()
sad = SetupDBAuth()
if args.setup:
sad.setupAdminUser()
sad.setupRWUser()
sad.setupROUser()
else:
assert(args.teardown == True)
sad.admin_auth = pymongo.MongoClient(sad.getURL(ADMIN_USERNAME, ADMIN_PASSWORD)).admin
sad.teardownROUser()
sad.teardownRWUser()
sad.teardownAdminUser()
| Python | 0.999999 | |
0a079e378fcb4994a2e1fd9bc8c71e22d4342901 | Generate HTML snippet for Jekyll | kuveja-html.py | kuveja-html.py | #!/usr/bin/env python3
import json
FILE = 'kuveja.json'
PREFIX = 'http://joneskoo.kapsi.fi/kuveja/'
HEADER = """---
layout: main
title: joneskoon kuvafeedi
---"""
HTML = """<div class="kuva">
<h3>%(title)s</h3>
<img src="%(url)s" alt="%(title)s" />
</div>"""
with open(FILE) as f:
data = json.load(f)
print(HEADER)
for d in data[0:10]:
title = d['file']
url = PREFIX + d['file']
print(HTML % vars()) | Python | 0.999999 | |
7303672fe9cf98c22afd83ae6c0dd7a136f4e5c8 | Create hyperventilate.py | hyperventilate.py | hyperventilate.py | # -*- coding: utf-8 -*-
"""
Created on Fri Oct 02 14:39:39 2015
@author: William Herrera
IMPORTANT: run as administrator
Color breathing package for G20aj series PC
"""
import light_acpi as la
from time import sleep
def make_rgb(cred, cgreen, cblue):
"""
make rgb for components
"""
redval = int(round(cred))
greenval = int(round(cgreen))
blueval = int(round(cblue))
ret = int(redval * 0x10000 + greenval * 0x100 + blueval)
if ret < 0:
ret = 0
if ret > 0x00ffffff:
ret = 0x00ffffff
return ret
def split_rgb(color):
"""
split rgb into red, green, blue
"""
red = (color & 0xff0000) / 0x10000
green = (color & 0xff00) / 0x100
blue = color & 0xff
return red, green, blue
def make_ratios(cred, cgreen, cblue):
"""
get ratios of colors
"""
maxcolor = max(cred, cgreen, cblue)
return float(cred)/float(maxcolor), \
float(cgreen)/float(maxcolor), \
float(cblue)/float(maxcolor)
def make_gamut(color):
"""
make a sequence of 256 colors
"""
sequence256 = []
cred, cgreen, cblue = split_rgb(color)
rred, rgreen, rblue = make_ratios(cred, cgreen, cblue)
for step in range(256):
tred = float(step) * rred
tgreen = float(step) * rgreen
tblue = float(step) * rblue
sequence256.append(make_rgb(tred, tgreen, tblue))
return sequence256, sequence256.index(color)
def dim_up_down_up_sequence(gamut, idex, frames):
"""
up color intensity to full
"""
# initial compiled list is size 512
cseq = gamut[idex:] + gamut[::-1] + gamut[0:idex]
# adjust size
reframed = []
ratio = 512.0 / frames
for frameidx in range(frames):
gamut_pos = int(round(frameidx * ratio))
reframed.append(cseq[gamut_pos])
return reframed
def run_color_sequence(lighting, colors, sleepinterval):
"""
run a breathing type change sequence through once
sleepinterval is in seconds or fractions of seconds
"""
for colr in colors:
lighting.set_color(colr)
sleep(sleepinterval)
def continuous_cycle(lighti, startcolor, frames=32, sleepinterval=0.1):
"""
breathe in color saturation
"""
gam, orig_idx = make_gamut(startcolor)
seq = dim_up_down_up_sequence(gam, orig_idx, frames)
while True:
run_color_sequence(lighti, seq, sleepinterval)
def run_triple_sequence(lightlist, colorlist, sleeptime):
"""
do all 3 lights given list of all 3
"""
lli = lightlist[0]
rli = lightlist[1]
bli = lightlist[2]
lcol = colorlist[0]
rcol = colorlist[1]
bcol = colorlist[2]
for idx in range(len(lcol)):
lli.set_color(lcol[idx])
rli.set_color(rcol[idx])
bli.set_color(bcol[idx])
sleep(sleeptime)
def all_cycle(scolors, frames=32, sleepinterval=0.1):
"""
all LED lighting do continuous cycle breathing
"""
# make light and color lists
lights = [la.ASUSLighting(la.DPATH, la.LEFT_VERTICAL), \
la.ASUSLighting(la.DPATH, la.RIGHT_VERTICAL), \
la.ASUSLighting(la.DPATH, la.BASE_HORIZONTAL)]
clists = []
for idx in range(len(lights)):
gam, orig_idx = make_gamut(scolors[idx])
seq = dim_up_down_up_sequence(gam, orig_idx, frames)
clists.append(seq)
while True:
run_triple_sequence(lights, clists, sleepinterval)
if __name__ == '__main__':
SCOL = 0x1111ff
LLIGHT = la.ASUSLighting(la.DPATH, la.LEFT_VERTICAL)
continuous_cycle(LLIGHT, SCOL)
| Python | 0.000254 | |
e30f1c21363e96801f75ee6bf913d2ea3366d8f2 | add browserid | settings.py | settings.py | LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'h98sp-!d$d15*u+-1^_&)!v5t)d^u(mhm-ksmi!$-raz+=wlv+'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django_browserid.context_processors.browserid_form',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'sunoss.urls'
# Python dotted path to the WSGI application used by Django's runserver.
#WSGI_APPLICATION = 'sunoss.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django_browserid',
)
AUTHENTICATION_BACKENDS = (
'django_browserid.auth.BrowserIDBackend',
)
try:
from local_settings import *
except ImportError:
pass
| LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'h98sp-!d$d15*u+-1^_&)!v5t)d^u(mhm-ksmi!$-raz+=wlv+'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'sunoss.urls'
# Python dotted path to the WSGI application used by Django's runserver.
#WSGI_APPLICATION = 'sunoss.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
#'django.contrib.admin',
)
try:
from local_settings import *
except ImportError:
pass
| Python | 0.000001 |
260c7942b21329fd0190ebb0d0381283afa39afb | Add Nice Module | bitbots_common/src/bitbots_common/nice.py | bitbots_common/src/bitbots_common/nice.py | #-*- coding:utf-8 -*-
"""
Nice
^^^^
Dieses Modul vereinfacht die Manipulation des Nicelevels
"""
import os
import time
import multiprocessing
import rospy
class Nice(object):
"""
Diese Klasse stellt methoden bereit, um die relevanz des aktuellen
Prozesses zu verändern.
Wenn das runtersetzen des Nicelevels (hochsetzen der Priorität) nicht
möglich ist wird aus gründen der sicherheit nichts getan.
"""
def __init__(self):
#config = get_config()
config = {}
config['nice'] = True # TODO
if config['nice']:
try:
self.nice_ok = True
self.level = self.get_nice()
self.__test_nice()
except: # behandlung um intern consistent zu bleiben
self.nice_ok = False
raise # fehler weiterwerfen
else:
rospy.logwarn("Nice is disabled in Config")
self.nice_ok = False
def __test_nice(self):
"""
Testet ob das runtersetzen des nicelevels möglich ist
"""
try:
# wenn wir hier unterbrochen werden passieren dumme dinge
try:
self.set_nice(-1, True)
self.set_nice(1, True)
self.nice_ok = 1
except OSError:
# dann der lange weg
try:
os.popen("sudo -n renice -n %d -p %d 2> /dev/null" %
(-1, multiprocessing.current_process().pid))
except OSError:
#irgentwass schiefgegangen, der rest fängt es ab...
pass
time.sleep(1)
if not (self.get_nice() == -1):
# wir dürfen nicht.... :(
self.nice_ok = False
rospy.logwarn(
"Nicelevel darf nicht reduziert werden, disable Nice")
else:
self.nice_ok = 2
self.set_normal(True)
except:
# dann gehts nicht
# wenn wir hier landen wird es hoffentlich meist ein
# KeybordInterrupt sein, wenn wir es nicht auf false setzen
# passieren unter umständen komische dinge
self.nice_ok = False
raise # weitergeben des fehlers...
def get_active(self):
"""
:return: Ob das Modul aktiv ist.
:return type: boolean
"""
return self.nice_ok
def get_nice(self):
"""
:return: Das aktuelle Nicelevel
:return type: int
"""
self.level = os.nice(0)
rospy.loginfo("niceines: %d" % self.level)
return self.level
def set_nice(self, change, silence=False):
"""
Verändert das Nicellevel um change
:param change: die Änderung des Nicelevels
:type change: int
:param silence: Supress debugmessages
:type silence: boolean
:return: True wenn erfolgt
:return type: boolean
.. hint:: Wenn :func:`get_nice` == False wird ''nichts'' getan
(außer einer debug warning)
"""
if self.nice_ok:
if self.nice_ok == 1:
self.level = os.nice(change)
else:
os.popen("sudo -n renice -n %d -p %d 2> /dev/null" %
(self.level + change,
multiprocessing.current_process().pid))
time.sleep(1)
self.get_nice()
if not silence:
rospy.loginfo("Set Nicelevel to %d" % self.level)
return True
else:
rospy.logwarn("Setzen von Nice nicht möglich")
return False
def set_realtime(self):
"""
Setzt die Priorität auf "Realtime"
"""
if self.nice_ok:
return self.set_level(-20)
else:
rospy.logwarn("Set (Soft-) Realtime Priorität nicht möglich!")
return False
def set_normal(self, silence=False):
"""
Setzt die Prioritöt auf Normal
"""
return self.set_level(0, silence)
def set_level(self, level, silence=False):
"""
Setzt das nice level auf level
:param level: das Level auf das die Priorität gesetzt werden soll
:type level: int
"""
return self.set_nice((self.level - level) * (-1), silence)
| Python | 0 | |
0ec3fd40c85f2a61eee5960031318c7f5ab06bc5 | Allow whitelisted shell calls in transforms | humfrey/update/transform/shell.py | humfrey/update/transform/shell.py | import logging
import subprocess
import tempfile
from django.conf import settings
from .base import Transform, TransformException
SHELL_TRANSFORMS = getattr(settings, 'SHELL_TRANSFORMS', {})
logger = logging.getLogger(__name__)
class Shell(Transform):
def __init__(self, name, extension, params):
self.shell = SHELL_TRANSFORMS[name]
self.extension = extension
def execute(self, transform_manager, input):
params = self.params.copy()
if 'store' not in params:
params['store'] = transform_manager.store.slug
popen_args = [input if arg is None else arg.format(params) for arg in self.shell]
with open(transform_manager(self.extension), 'w') as output:
with tempfile.TemporaryFile() as stderr:
transform_manager.start(self, [input])
returncode = subprocess.call(popen_args, stdout=output, stderr=stderr)
if stderr.tell():
stderr.seek(0)
logger.warning("Shell warnings:\n\n%s\n", stderr.read())
if returncode != 0:
logger.error("Shell transform failed with code %d", returncode)
raise TransformException
transform_manager.end([output.name])
return output.name
| Python | 0.000001 | |
eae844f96417ce0ec32fada7737a2d4ae8b03497 | Add commandline tool | twitch.py | twitch.py | import re
import argparse
import urllib
import json
import os
from twitchapi import TwitchAPI
from twitchapi.twitch import TwitchToken
TOKEN_FILE='.access_token'
AUTH_SETTINGS_FILE='.auth_settings'
args_pattern = re.compile(r'code=(?P<code>.*?)&scope=(?P<scopes>.*?)')
def get_auth_settings(auth_settings_file):
fp = open(auth_settings_file)
return json.load(fp)
def auth_url(client_id, redirect_uri, scopes):
base_url = (u'https://api.twitch.tv/kraken/oauth2/authorize'
u'?response_type=code')
return base_url + u'&' + urllib.urlencode({'client_id':client_id,
'redirect_uri':redirect_uri, 'scope':scopes}, True)
def make_token(token_file):
if os.path.isfile(token_file):
json_token = json.load(open(token_file))
return TwitchToken(json_token['access_token'],
json_token['scope'])
return None
def make_client(token_file):
return TwitchAPI(make_token(token_file))
def clear_token(token_file):
if os.path.isfile(token_file):
os.remove(token_file)
def prompt(name, default=None):
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = raw_input(prompt)
if rv:
return rv
if default is not None:
return default
def auth(twitch_client, token_file, auth_settings_file):
auth_settings = get_auth_settings(auth_settings_file)
print 'Navigate to: %s' % auth_url(auth_settings['client_id'],
auth_settings['redirect_uri'], auth_settings['scopes'])
args = {}
while not args:
args_text = urllib.unquote(
prompt('Args (copy the text after the ? in the url)'))
m = args_pattern.match(args_text)
if m:
args['code'] = m.group('code')
args['scopes'] = m.group('scopes').split()
args = {
'client_id':auth_settings['client_id'],
'client_secret':auth_settings['client_secret'],
'grant_type':'authorization_code',
'redirect_uri':auth_settings['redirect_uri'],
'code':args['code']
}
resp, con = twitch_client.post('oauth2/token', args=args)
token = json.loads(con)
clear_token(token_file)
json.dump(token, open(TOKEN_FILE, 'w'))
def check(twitch_client, token_file):
if os.path.isfile(token_file):
resp, con = twitch_client.get('/')
d = json.loads(con)
if d['token']['valid']:
print ('Authenticated! Scopes: %s' %
d['token']['authorization']['scopes'])
return
print 'Not authenticated!'
clear_token(token_file)
def update(twitch_client, channel, status, game):
resp, con = twitch_client.update_channel(channel, status, game)
if resp.status != 200:
print 'Error occurred!'
print resp, con
else:
print 'Update successful.'
def channel_info(twitch_client):
print twitch_client.my_channel()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--token-file', default=TOKEN_FILE, dest='token_file')
parser.add_argument('--auth-settings-file', default=AUTH_SETTINGS_FILE,
dest='auth_settings_file')
subparsers = parser.add_subparsers(dest='subparser_name')
auth_parser = subparsers.add_parser('auth')
check_parser = subparsers.add_parser('check')
up_parser = subparsers.add_parser('update')
up_parser.add_argument('channel', type=str)
up_parser.add_argument('--status', type=str)
up_parser.add_argument('--game', type=str)
channel_info_parser = subparsers.add_parser('channel_info')
args = parser.parse_args()
twitch_client = make_client(args.token_file)
if args.subparser_name == 'auth':
auth(twitch_client, args.token_file, args.auth_settings_file)
elif args.subparser_name == 'check':
check(twitch_client, args.token_file)
elif args.subparser_name == 'update':
if args.game or args.status:
update(twitch_client, args.channel, args.status, args.game)
elif args.subparser_name == 'channel_info':
channel_info(twitch_client)
| Python | 0.000005 | |
2a1b46740c4cf14f7db4f344431aced9bf06d1e7 | Add a little program that calls sync until is is done | scripts/sync_for_real.py | scripts/sync_for_real.py | #!/usr/bin/env python3
import subprocess
import sys
from time import time
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def main():
nr_fast = 3
while nr_fast > 0:
eprint('syncing... ', end='', flush=True)
start_t = time()
subprocess.Popen('/usr/bin/sync', stdout=None, stderr=None).wait()
time_length = time() - start_t
eprint('{0:0.3f}'.format(time_length))
if time_length < 0.10:
nr_fast = nr_fast - 1
else:
nr_fast = 3
return 0
if __name__ == '__main__':
sys.exit(main())
| Python | 0.000001 | |
5188a3b125d0a7479a95c15ba3ab3fe309d378a6 | add xz2rpm.py | xz2rpm.py | xz2rpm.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
@author: mosquito
@email: sensor.wen@gmail.com
@project: xz2rpm
@github: http://github.com/1dot75cm/repo-checker
@description: xz(PKGBUILD) tranlate to rpm(spec).
@version: 0.1
@history:
0.1 - Initial version (2016.03.15)
'''
import os
import sys
import re
import json
import time
import argparse
class Package:
def __init__(self):
self.opts = {
'input_file': '',
'author': '',
'mail': ''
}
self.helper()
self.data = self.openfile(self.opts['input_file'])
self.pkg = {
'pkgbase': '',
'pkgname': '',
'pkgver': '',
'pkgrel': '',
'epoch': '',
'pkgdesc': '',
'arch': '',
'url': '',
'license': '',
'groups': '',
'depends': '',
'optdepends': '',
'makedepends': '',
'checkdepends': '',
'provides': '',
'conflicts': '',
'replaces': '',
'backup': '',
'options': '',
'install': '',
'changelog': '',
'source': '',
'noextract': '',
'prepare': '',
'build': '',
'check': '',
'package': ''
}
self.pkg_dict = {
'"': '',
"'": '',
'$pkgname': self.pkg['pkgname'],
'$startdir': '%{_topdir}',
'$srcdir': '%{_builddir}',
'$pkgdir': '%{buildroot}'
}
def openfile(self, filename):
with open(filename, 'r') as fs:
content = fs.read()
return content
def _tojson(self, data):
# TODO
return json.loads(data, 'utf-8')
def parse(self, item):
real_item = None
patterns = ['=\((.*?)\)', '=(.*)', '\(\)\s*\{(.*)\}']
for i in patterns:
pattern = item + i
if i == '=(.*)':
val = re.compile(pattern)
else:
val = re.compile(pattern, re.S)
if not val.search(self.data):
continue
else:
self.pkg[item] = val.search(self.data).groups()[0]
real_item = item
break
return real_item
def replace_words(self, text, word_dict):
''' https://segmentfault.com/q/1010000002474308 '''
yo = re.compile('|'.join(map(re.escape, word_dict)))
def translate(match):
return word_dict[match.group(0)]
return yo.sub(translate, text)
def get_item(self):
list(map(self.parse, self.pkg))
for i in self.pkg:
self.pkg[i] = self.replace_words(self.pkg[i], self.pkg_dict)
def output(self):
self.get_item()
author = self.opts['author']
email = self.opts['mail']
date = time.strftime('%a %b %d %Y', time.localtime())
content = '''%global debug_package %nil
Name: {name}
Epoch: {epoch}
Version: {ver}
Release: {rel}%?dist
Summary: {desc}
Group: {group}
License: {license}
URL: {url}
Source0: {src}
BuildArch: {arch}
BuildRequires: {makereq}
Requires: {req}
Recommends: {optreq}
Provides: {prov}
Conflicts: {conf}
Obsoletes: {repl}
%description
{desc}
%prep
%setup -q
{prep}
%build
%configure
make %?_smp_mflags
{build}
%install
%make_install
{install}
%check
{check}
%post
/bin/touch --no-create %_datadir/icons/hicolor &>/dev/null ||:
/usr/bin/update-desktop-database -q ||:
%postun
if [ $1 -eq 0 ]; then
/bin/touch --no-create %_datadir/icons/hicolor &>/dev/null ||:
/usr/bin/gtk-update-icon-cache -f -t -q %_datadir/icons/hicolor ||:
fi
/usr/bin/update-desktop-database -q ||:
%posttrans
/usr/bin/gtk-update-icon-cache -f -t -q %_datadir/icons/hicolor ||:
%files
%defattr(-,root,root,-)
%doc README
%license LICENSE
%changelog
* {date} {author} <{email}> - {ver}-{rel}
- '''.format(
name=self.pkg['pkgname'],
epoch=self.pkg['epoch'],
ver=self.pkg['pkgver'],
rel=self.pkg['pkgrel'],
desc=self.pkg['pkgdesc'],
group=self.pkg['groups'],
license=self.pkg['license'],
url=self.pkg['url'],
src=self.pkg['source'],
arch=self.pkg['arch'],
makereq=self.pkg['makedepends'],
req=self.pkg['depends'],
optreq=self.pkg['optdepends'],
prov=self.pkg['provides'],
conf=self.pkg['conflicts'],
repl=self.pkg['replaces'],
prep=self.pkg['prepare'],
build=self.pkg['build'],
install=self.pkg['package'],
check=self.pkg['check'],
date=date,
author=author,
email=email
)
print(content)
def helper(self):
''' display help information.'''
parser = argparse.ArgumentParser(description='PKGBUILD translate to Spec.')
parser.add_argument('-f', '--file', metavar='PATH', type=str,
dest='files', action='store', default='PKGBUILD',
help='PKGBUILD file'
)
parser.add_argument('-a', '--author', metavar='NAME', type=str,
dest='author', action='store', default='Li Lei',
help='author of package'
)
parser.add_argument('-m', '--mail', metavar='MAIL', type=str,
dest='mail', action='store', default='hanmeimei@gmail.com',
help='email address of author'
)
args = parser.parse_args()
if args.files and os.path.exists(args.files):
self.opts['input_file'] = args.files
elif args.files is not None:
print("xz2rpm: cannot access '{}': No such file or directory"
.format(args.files))
sys.exit()
if args.author:
self.opts['author'] = args.author
if args.mail:
self.opts['mail'] = args.mail
if __name__ == '__main__':
item = Package()
item.output()
| Python | 0 | |
abdaa692977eb73b32a16d10936ae0254e8e48f5 | Add an example for batch normalization. | batch_normalization/__init__.py | batch_normalization/__init__.py | """Example of batch normalization tackling a difficult optimization.
Running with --no-batch-normalize, we see that the algorithm makes very
very slow progress in the same amount of time.
"""
import argparse
from theano import tensor
from blocks.algorithms import Adam, GradientDescent
from blocks.bricks import MLP, BatchNormalizedMLP, Logistic, Softmax
from blocks.bricks.cost import CategoricalCrossEntropy, MisclassificationRate
from blocks.extensions import Printing
from blocks.extensions.monitoring import DataStreamMonitoring
from blocks.extensions import FinishAfter
from blocks.graph import (ComputationGraph, apply_batch_normalization,
get_batch_normalization_updates)
from blocks.initialization import Constant, IsotropicGaussian
from blocks.main_loop import MainLoop
from fuel.datasets.toy import Spiral
from fuel.streams import DataStream
from fuel.schemes import ShuffledScheme, SequentialScheme
def main(num_epochs=50, batch_normalized=True, alpha=0.1):
"""Run the example.
Parameters
----------
batch_normalized : bool, optional
Batch-normalize the training graph. Defaults to `True`.
alpha : float, optional
Weight to apply to a new sample when calculating running
averages for population statistics (1 - alpha weight is
given to the existing average).
"""
if batch_normalized:
# Add an extra keyword argument that only BatchNormalizedMLP takes,
# in order to speed things up at the cost of a bit of extra memory.
mlp_class = BatchNormalizedMLP
extra_kwargs = {'conserve_memory': False}
else:
mlp_class = MLP
extra_kwargs = {}
mlp = mlp_class([Logistic(), Logistic(), Logistic(), Softmax()],
[2, 5, 5, 5, 3],
weights_init=IsotropicGaussian(0.2),
biases_init=Constant(0.), **extra_kwargs)
mlp.initialize()
# Generate a dataset with 3 spiral arms, using 8000 examples for
# training and 2000 for testing.
dataset = Spiral(num_examples=10000, classes=3,
sources=['features', 'label'],
noise=0.05)
train_stream = DataStream(dataset,
iteration_scheme=ShuffledScheme(examples=8000,
batch_size=20))
test_stream = DataStream(dataset,
iteration_scheme=SequentialScheme(
examples=list(range(8000, 10000)),
batch_size=2000))
# Build a cost graph; this contains BatchNormalization bricks that will
# by default run in inference mode.
features = tensor.matrix('features')
label = tensor.lvector('label')
prediction = mlp.apply(features)
cost = CategoricalCrossEntropy().apply(label, prediction)
misclass = MisclassificationRate().apply(label, prediction)
misclass.name = 'misclass' # The default name for this is annoyingly long
original_cg = ComputationGraph([cost, misclass])
if batch_normalized:
cg = apply_batch_normalization(original_cg)
# Add updates for population parameters
pop_updates = get_batch_normalization_updates(cg)
extra_updates = [(p, m * alpha + p * (1 - alpha))
for p, m in pop_updates]
else:
cg = original_cg
extra_updates = []
algorithm = GradientDescent(step_rule=Adam(0.001),
cost=cg.outputs[0],
parameters=cg.parameters)
algorithm.add_updates(extra_updates)
main_loop = MainLoop(algorithm=algorithm,
data_stream=train_stream,
# Use the original cost and misclass variables so
# that we monitor the (original) inference-mode graph.
extensions=[DataStreamMonitoring([cost, misclass],
train_stream,
prefix='train'),
DataStreamMonitoring([cost, misclass],
test_stream,
prefix='test'),
Printing(),
FinishAfter(after_n_epochs=50)])
main_loop.run()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Batch normalization demo.")
parser.add_argument('-n', '--num-epochs', default=50,
help='Number of epochs for which to train.')
parser.add_argument('--no-batch-normalize', '-N', action='store_const',
const=True, default=False,
help='Turn off batch normalization, to see the '
'difference it makes.')
parser.add_argument('--alpha', '-A', action='store',
type=float, default=0.05,
help='Moving average coefficient for population '
'statistics')
args = parser.parse_args()
main(args.num_epochs, not args.no_batch_normalize, args.alpha)
| Python | 0 | |
3e2c4f19d1eb5d66430ea46abe18a6a7022e13ef | Create svg_filter.py | svg_filter.py | svg_filter.py | f = open("domusdomezones.svg", "r")
svg = []
for line in f:
line = line.strip()
svg.append(line)
f.close()
vector_paths = []
for i in range(0, len(svg)):
if svg[i] == "<path":# spot the paths location
i = i+1
svg[i] = svg[i].replace(',', ' ')# remove the first 5 items in each path, replace spaces with commas
svg[i] = svg[i][5:-1].split(' ')# remove the first 5 and the last item of the line of each path, split each vector into a iterable list
vector_paths.append(svg[i])
paths = []
for n in range(0, len(vector_paths)):
paths.append(vector_paths[n])
for m in range(0, len(vector_paths[n])):
vector_paths[n][m] = float(vector_paths[n][m]) # float all strings of the vector_paths list
for p in range(0, len(paths)):
for o in range(2, len(paths[p])-1):# loop to sum vectors
paths[p][o] = paths[p][o-2] + paths[p][o]# sum the vectors of each cordinate
for o in range(0, len(paths[p])):#loop to round each cordinate
paths[p][o] = round(paths[p][o],2) #round the floating points to a two decimal float
print paths
| Python | 0.000006 | |
353d717c425cca9941d650d715c3ed8caf0aae64 | Reset tooltip timer also when cell editor is closed | src/robotide/editor/tooltips.py | src/robotide/editor/tooltips.py | # Copyright 2008-2009 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import wx
import wx.grid
from popupwindow import Tooltip
class GridToolTips(object):
def __init__(self, grid):
self._tooltip = Tooltip(grid, (250, 80), False, True)
self._information_popup = Tooltip(grid, (450, 300))
self._grid = grid
self._tooltip_timer = wx.Timer(grid.GetGridWindow())
grid.GetGridWindow().Bind(wx.EVT_MOTION, self.OnMouseMotion)
grid.GetGridWindow().Bind(wx.EVT_TIMER, self.OnShowToolTip)
grid.Bind(wx.grid.EVT_GRID_EDITOR_HIDDEN, self.OnGridEditorHidden)
def OnMouseMotion(self, event):
self._hide_tooltip()
self._start_tooltip_timer()
event.Skip()
def _start_tooltip_timer(self):
self._tooltip_timer.Start(500, True)
def OnShowToolTip(self, event):
self._hide_tooltip()
content = self._grid.get_tooltip_content()
if content:
self._show_tooltip_at(content, self._calculate_tooltip_position())
self._grid.SetFocus()
def OnGridEditorHidden(self, event):
cell = event.Row, event.Col
if cell == self._grid.cell_under_cursor:
self._start_tooltip_timer()
def _show_tooltip_at(self, content, position):
if not self._information_popup.IsShown():
self._tooltip.set_content(content)
self._tooltip.show_at(position)
def _calculate_tooltip_position(self):
x, y = wx.GetMousePosition()
return x+5, y+5
def _hide_tooltip(self):
self._tooltip.hide()
def hide_information(self):
self._information_popup.hide()
def hide(self):
self._hide_tooltip()
self.hide_information()
def show_info_at(self, info, title, position):
self._tooltip.hide()
self._information_popup.set_content(info, title)
self._information_popup.show_at(position)
| # Copyright 2008-2009 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import wx
from popupwindow import Tooltip
class GridToolTips(object):
def __init__(self, grid):
self._tooltip = Tooltip(grid, (250, 80), False, True)
self._information_popup = Tooltip(grid, (450, 300))
self._grid = grid
self._tooltip_timer = wx.Timer(grid.GetGridWindow())
grid.GetGridWindow().Bind(wx.EVT_MOTION, self.OnMouseMotion)
grid.GetGridWindow().Bind(wx.EVT_TIMER, self.OnShowToolTip)
def OnMouseMotion(self, event):
self._hide_tooltip()
self._tooltip_timer.Start(500, True)
event.Skip()
def OnShowToolTip(self, event):
self._hide_tooltip()
content = self._grid.get_tooltip_content()
if content:
self._show_tooltip_at(content, self._calculate_tooltip_position())
self._grid.SetFocus()
def _show_tooltip_at(self, content, position):
if not self._information_popup.IsShown():
self._tooltip.set_content(content)
self._tooltip.show_at(position)
def _calculate_tooltip_position(self):
x, y = wx.GetMousePosition()
return x+5, y+5
def _hide_tooltip(self):
self._tooltip.hide()
def hide_information(self):
self._information_popup.hide()
def hide(self):
self._hide_tooltip()
self.hide_information()
def show_info_at(self, info, title, position):
self._tooltip.hide()
self._information_popup.set_content(info, title)
self._information_popup.show_at(position)
| Python | 0 |
86434fb902caeea7bb740c35607dc6f9f7766d88 | Fix searching for notes in the django admin | notes/admin.py | notes/admin.py | #
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from snowy.accounts.models import UserProfile
from snowy.notes.models import Note, NoteTag
from reversion.admin import VersionAdmin
from django.contrib import admin
class NoteAdmin(VersionAdmin):
list_display = ('created', 'author', 'title')
search_fields = ['content', 'title']
prepopulated_fields = {'slug': ('title',)}
admin.site.register(Note, NoteAdmin)
admin.site.register(NoteTag)
admin.site.register(UserProfile)
| #
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from snowy.accounts.models import UserProfile
from snowy.notes.models import Note, NoteTag
from reversion.admin import VersionAdmin
from django.contrib import admin
class NoteAdmin(VersionAdmin):
list_display = ('created', 'author', 'title')
search_fields = ['body', 'title']
prepopulated_fields = {'slug': ('title',)}
admin.site.register(Note, NoteAdmin)
admin.site.register(NoteTag)
admin.site.register(UserProfile)
| Python | 0 |
75717747ffbc36f306e0f771a65ed101bd3ca9be | Create parser.py | parser.py | parser.py | from HTMLParser import HTMLParser
# create a subclass and override the handler methods
class Parser(HTMLParser):
tag = ""
doc = new Document()
def handle_starttag(self, tag, attrs):
tag = tag
print "Encountered a start tag:", tag
def handle_endtag(self, tag):
tag = ""
print "Encountered an end tag :", tag
def handle_data(self, data):
print "Encountered some data :", data
if tag == "DOCNO":
doc.setId(data)
if tag == "title":
doc.setTitle(data)
if tag == "h1":
doc.addH1(data)
if tag == "h2":
doc.addH2(data)
if tag == "h3":
doc.addH4(data)
elif tag != "":
doc.addContent(data)
class Document():
def __init__():
id = ""
title = ""
h1 = []
h2 = []
h3 = []
content = ""
def setId(self, id):
self.id = id
def setTitle(self, title):
self.title = title
def addH1(self, h1):
self.append(h1)
def addH2(self, h2):
self.append(h2)
def addH3(self, h3):
self.append(h3)
def addContent(self, content):
self.content += content + " "
| Python | 0.000011 | |
ecde3b823724e612fd4e5cc575eb75f0d3652a4b | add script for running test | test/run-test.py | test/run-test.py | import imp
import json
import os
mustache = imp.load_source('mustache', '../src/mustache.py')
#test_files = ['comments.json',
#'delimiters.json',
#'interpolation.json',
#'inverted.json',
#'~lambdas.json',
#'partials.json',
#'sections.json']
test_files = ['interpolation.json',
'delimiters.json']
for filename in test_files:
with open(os.path.join('./spec/specs/', filename)) as fp:
data = json.load(fp)['tests']
for test in data:
context = test['data']
template = test['template']
expected = test['expected']
result = mustache.render(template, [context])
if result != expected:
print('>>>>>>>>> Error >>>>>>>>>>>>')
print('template:', template)
print('expected:', expected)
print('result :', result)
| Python | 0.000001 | |
0feb8f3ae65fadaf600e7681349cfa537b41a8c3 | Add ParseBigCSV.py | parseBigCSV.py | parseBigCSV.py | import csv
import json
with open("evidata.csv", "r") as bigCSV:
with open("file.json", "w") as outFile:
reader = csv.DictReader(bigCSV)
output = json.dumps(list(reader))
outFile.write(output)
| Python | 0.000001 | |
31434ff2f5b208ae1d93b4340e1d28cfe5cb2e42 | Add IMDB Mocked Unit Test (#1579) | test/datasets/test_imdb.py | test/datasets/test_imdb.py | import os
import random
import string
import tarfile
from collections import defaultdict
from unittest.mock import patch
from parameterized import parameterized
from torchtext.datasets.imdb import IMDB
from ..common.case_utils import TempDirMixin, zip_equal
from ..common.torchtext_test_case import TorchtextTestCase
def _get_mock_dataset(root_dir):
"""
root_dir: directory to the mocked dataset
"""
base_dir = os.path.join(root_dir, "IMDB")
temp_dataset_dir = os.path.join(base_dir, "temp_dataset_dir")
os.makedirs(temp_dataset_dir, exist_ok=True)
seed = 1
mocked_data = defaultdict(list)
for split in ("train", "test"):
neg_dir = os.path.join(temp_dataset_dir, split, "neg")
pos_dir = os.path.join(temp_dataset_dir, split, "pos")
os.makedirs(neg_dir, exist_ok=True)
os.makedirs(pos_dir, exist_ok=True)
for i in range(5):
# all negative labels are read first before positive labels in the
# IMDB dataset implementation
label = "neg" if i < 2 else "pos"
cur_dir = pos_dir if label == "pos" else neg_dir
txt_file = os.path.join(cur_dir, f"{i}{i}_{i}.txt")
with open(txt_file, "w") as f:
rand_string = " ".join(
random.choice(string.ascii_letters) for i in range(seed)
)
dataset_line = (label, rand_string)
# append line to correct dataset split
mocked_data[split].append(dataset_line)
f.write(rand_string)
seed += 1
compressed_dataset_path = os.path.join(base_dir, "aclImdb_v1.tar.gz")
# create tar file from dataset folder
with tarfile.open(compressed_dataset_path, "w:gz") as tar:
tar.add(temp_dataset_dir, arcname="aclImdb_v1")
return mocked_data
class TestIMDB(TempDirMixin, TorchtextTestCase):
root_dir = None
samples = []
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.root_dir = cls.get_base_temp_dir()
cls.samples = _get_mock_dataset(cls.root_dir)
cls.patcher = patch(
"torchdata.datapipes.iter.util.cacheholder._hash_check", return_value=True
)
cls.patcher.start()
@classmethod
def tearDownClass(cls):
cls.patcher.stop()
super().tearDownClass()
@parameterized.expand(["train", "test"])
def test_imdb(self, split):
dataset = IMDB(root=self.root_dir, split=split)
samples = list(dataset)
expected_samples = self.samples[split]
for sample, expected_sample in zip_equal(samples, expected_samples):
self.assertEqual(sample, expected_sample)
@parameterized.expand(["train", "test"])
def test_imdb_split_argument(self, split):
dataset1 = IMDB(root=self.root_dir, split=split)
(dataset2,) = IMDB(root=self.root_dir, split=(split,))
for d1, d2 in zip_equal(dataset1, dataset2):
self.assertEqual(d1, d2)
| Python | 0 | |
29b09f283a2a62824a4e1f7ae05a5bb64db0a149 | Create fire_th_trollius.py | django_th/management/commands/fire_th_trollius.py | django_th/management/commands/fire_th_trollius.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import datetime
import time
import arrow
import trollius as asyncio
from trollius import From
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django_th.services import default_provider
from django_th.models import TriggerService
from django.utils.log import getLogger
# create logger
logger = getLogger('django_th.trigger_happy')
q = asyncio.Queue(maxsize=0)
q2 = asyncio.Queue(maxsize=0)
def to_datetime(data):
"""
convert Datetime 9-tuple to the date and time format
feedparser provides this 9-tuple
"""
my_date_time = None
if 'published_parsed' in data:
my_date_time = datetime.datetime.fromtimestamp(
time.mktime(data.published_parsed))
elif 'updated_parsed' in data:
my_date_time = datetime.datetime.fromtimestamp(
time.mktime(data.updated_parsed))
return my_date_time
class Command(BaseCommand):
@asyncio.coroutine
def update_trigger(self, service):
"""
update the date when occurs the trigger
"""
my_count = yield From(q2.get())
if my_count > 0:
logger.info("user: {} - provider: {} - consumer: {} - {} = {} new data".format(
service.user, service.provider.name.name, service.consumer.name.name, service.description, my_count))
now = arrow.utcnow().to(settings.TIME_ZONE).format('YYYY-MM-DD HH:mm:ss')
TriggerService.objects.filter(id=service.id).update(date_triggered=now)
else:
logger.info("user: {} - provider: {} - consumer: {} - {} nothing new".format(
service.user, service.provider.name.name, service.consumer.name.name, service.description))
asyncio.get_event_loop().stop()
@asyncio.coroutine
def my_dummy_provider(self):
"""
just a dummy provider when its the first time
the trigger is handling
"""
yield From(q2.put(1))
@asyncio.coroutine
def my_provider(self, service_provider, token, service_id, date_triggered):
"""
service_provider : the name of the class to trigger the service
token : is the token of the service provider from the database
service_id : is the service id from the database
date_triggered : date_triggered is the data from the database
"""
datas = getattr(service_provider, 'process_data')(
token, service_id, date_triggered)
for data in datas:
yield From(q.put(data))
@asyncio.coroutine
def my_consumer(self, service_consumer, token, service_id, date_triggered):
"""
service_consumer : the name of the consumer 'service' class
token : is the token of the service consumer
service_id : is the service id from the database
date_triggered : date_triggered is the data from the database
"""
count_new_data = 0
while q.empty() is not True:
data = yield From(q.get())
consumer = getattr(service_consumer, 'save_data')
published = ''
which_date = ''
# flag to know if we can push data to the consumer
# 2) for each one
# if in a pool of data once of them does not have
# a date, will take the previous date for this one
# if it's the first one, set it to 00:00:00
# let's try to determine the date contained in the data...
published = to_datetime(data)
if published is not None:
# get the published date of the provider
published = arrow.get(str(published), 'YYYY-MM-DD HH:mm:ss').to(settings.TIME_ZONE)
# store the date for the next loop
# if published became 'None'
which_date = published
#... otherwise set it to 00:00:00 of the current date
if which_date == '':
# current date
which_date = arrow.utcnow().replace(
hour=0, minute=0, second=0)
published = which_date
if published is None and which_date != '':
published = which_date
# 3) check if the previous trigger is older than the
# date of the data we retreived
# if yes , process the consumer
# add the TIME_ZONE settings
my_date_triggered = arrow.get(
str(date_triggered), 'YYYY-MM-DD HH:mm:ss').to(settings.TIME_ZONE)
# if the published date if greater or equal to the last
# triggered event ... :
if date_triggered is not None and \
published is not None and \
published >= date_triggered:
if 'title' in data:
logger.info("date {} >= date triggered {} title {}".format(
published, date_triggered, data['title']))
else:
logger.info(
"date {} >= date triggered {} ".format(published, my_date_triggered))
consumer(token, service_id, **data)
count_new_data += 1
# otherwise do nothing
else:
if 'title' in data:
logger.debug(
"data outdated skiped : [{}] {}".format(published, data['title']))
else:
logger.debug(
"data outdated skiped : [{}] ".format(published))
# return the number of updates ( to be displayed in the log )
yield From(q2.put(count_new_data))
def handle(self, *args, **options):
"""
run the main process
"""
default_provider.load_services()
trigger = TriggerService.objects.filter(status=True).select_related('consummer__name', 'provider__name')
if trigger:
for service in trigger:
# provider - the service that offer datas
service_name = str(service.provider.name.name)
service_provider = default_provider.get_service(service_name)
# consumer - the service which uses the data
service_name = str(service.consumer.name.name)
service_consumer = default_provider.get_service(service_name)
# First run
if service.date_triggered is None:
logger.debug("first run for %s => %s " % (str(
service.provider.name), str(service.consumer.name.name)))
asyncio.get_event_loop().run_until_complete(self.my_dummy_provider())
# another run
else:
asyncio.get_event_loop().run_until_complete(
self.my_provider(service_provider, service.provider.token, service.id, service.date_triggered))
asyncio.get_event_loop().run_until_complete(
self.my_consumer(service_consumer, service.consumer.token, service.id, service.date_triggered))
asyncio.get_event_loop().run_until_complete(self.update_trigger(service))
asyncio.get_event_loop().run_forever()
else:
print("No trigger set by any user")
| Python | 0.000005 | |
3536b98a3adf5087c78b92432585654bec40d64e | add problem 045 | problem_045.py | problem_045.py | #!/usr/bin/env python
#-*-coding:utf-8-*-
'''
'''
import math
import timeit
def is_pentagonal(n):
if (1+math.sqrt(1+24*n)) % 6 == 0:
return True
else:
return False
def calc():
i = 143
while True:
i += 1
n = i*(2*i-1)
if is_pentagonal(n):
return n
if __name__ == '__main__':
print calc()
print timeit.Timer('problem_045.calc()', 'import problem_045').timeit(1)
| Python | 0.000294 | |
ef95e8f3c9c9f12b7073b02e95c2a464ed26c8df | hard code the value of the service url | ceph_installer/cli/constants.py | ceph_installer/cli/constants.py |
server_address = 'http://localhost:8181/'
| Python | 0.999698 | |
91bb7506bd20ed22b8787e7a8b9975cc07e97175 | Add owners client to depot_tools. | owners_client.py | owners_client.py | # Copyright (c) 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class OwnersClient(object):
"""Interact with OWNERS files in a repository.
This class allows you to interact with OWNERS files in a repository both the
Gerrit Code-Owners plugin REST API, and the owners database implemented by
Depot Tools in owners.py:
- List all the owners for a change.
- Check if a change has been approved.
- Check if the OWNERS configuration in a change is valid.
All code should use this class to interact with OWNERS files instead of the
owners database in owners.py
"""
def __init__(self, host):
self._host = host
def ListOwnersForFile(self, project, branch, path):
"""List all owners for a file."""
raise Exception('Not implemented')
def IsChangeApproved(self, change_number):
"""Check if the latest patch set for a change has been approved."""
raise Exception('Not implemented')
def IsOwnerConfigurationValid(self, change_number, patch):
"""Check if the owners configuration in a change is valid."""
raise Exception('Not implemented')
| Python | 0.000001 | |
beae2bdc47949f78e95e3444d248ce035766e719 | Add ascii table test | smipyping/_asciitable.py | smipyping/_asciitable.py | # (C) Copyright 2017 Inova Development Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Internal module with utilities to write to ascii outputs.
"""
from __future__ import print_function, absolute_import
from textwrap import wrap
import six
from terminaltables import SingleTable
def print_ascii_table(title, table_header, table_data, inner_border=False,
outer_border=False):
""" Print table data as an ascii table. The input is a dictionary
of table data in the format used by terminaltable package.
title: list of strings defining the row titles
table_header:
list of strings defining the column names
table data:
List of lists of strings. Each list of strings represents the
data for a single row in the table
inner_border:
optional flag that tells table builder to create inner borders
outer_border:
Optional flag that tells table builder to create border around
complete table
NOTE: Currently this outputs in the terminatable SingleTable format
It may be extended in the future to allow other formats such as the
asciitable format, etc. However these only differ in the table
boundary character representation
"""
table_data = [table_header] + table_data
table_instance = SingleTable(table_data, title)
table_instance.inner_column_border = inner_border
table_instance.outer_border = outer_border
print(table_instance.table)
print()
def fold_cell(cell_string, max_cell_width):
""" Fold a string within a maximum width to fit within a table entry
Parameters:
cell_string:
The string of data to go into the cell
max_cell_width:
Maximum width of cell. Data is folded into multiple lines to
fit into this width.
Return:
String representing the folded string
"""
new_cell = cell_string
if isinstance(cell_string, six.string_types):
if max_cell_width < len(cell_string):
new_cell = '\n'.join(wrap(cell_string, max_cell_width))
return new_cell
| Python | 0.000007 | |
672e4378421d2014644e23195706ef011934ffdb | test for fixes on #55 | category_encoders/tests/test_basen.py | category_encoders/tests/test_basen.py | import category_encoders as ce
import unittest
import pandas as pd
__author__ = 'willmcginnis'
class TestBasen(unittest.TestCase):
"""
"""
def test_basen(self):
df = pd.DataFrame({'col1': ['a', 'b', 'c'], 'col2': ['d', 'e', 'f']})
df_1 = pd.DataFrame({'col1': ['a', 'b', 'd'], 'col2': ['d', 'e', 'f']})
enc = ce.BaseNEncoder(verbose=1)
enc.fit(df)
print(enc.transform(df_1))
| Python | 0 | |
a4c8818225941b84e6958dcf839fc78c2adc5cee | Create test_pxssh.py | test_pxssh.py | test_pxssh.py | # commandsshbotnet.py
# author: @shipcod3
#
# >> used for testing the pxssh module
import pxssh
import getpass
try:
s = pxssh.pxssh()
hostname = raw_input('SET HOST: ')
username = raw_input('SET USERNAME: ')
password = getpass.getpass('SET PASSWORD: ')
s.login (hostname, username, password)
s.sendline ('uptime') # run a command
s.prompt() # match the prompt
print s.before # print everything before the prompt.
s.sendline ('ls -l')
s.prompt()
print s.before
s.sendline ('df')
s.prompt()
print s.before
s.logout()
except pxssh.ExceptionPxssh, e:
print "pxssh failed on login."
print str(e)
| Python | 0.000005 | |
1be4e6f97b3d062c4fa07f70b05305bf32593fd4 | Add test cases for smudge | dotbriefs/tests/test_smudge.py | dotbriefs/tests/test_smudge.py | import unittest
from dotbriefs.smudge import SmudgeTemplate
class TestCleanSecret(unittest.TestCase):
def setUp(self):
self.secrets = {}
self.secrets['password'] = 's3cr3t'
self.secrets['question'] = 'h1dd3n 4g3nd4'
self.template = []
self.template.append(SmudgeTemplate('name', self.secrets))
def test_nosecret_sub(self):
self.assertEqual(self.template[0].sub('password = hi # comment'),
'password = hi # comment')
def test_nokey_sub(self):
self.assertEqual(self.template[0].sub('password = $DotBriefs: $ # comment'),
'password = $DotBriefs: $ # comment')
def test_nomatch_sub(self):
self.assertEqual(self.template[0].sub('password = $DotBriefs: notfound$ # comment'),
'password = $DotBriefs: notfound$ # comment')
def test_single_sub(self):
self.assertEqual(self.template[0].sub('password = $DotBriefs: password$ # comment'),
'password = s3cr3t # comment')
def test_double_sub(self):
self.assertEqual(self.template[0].sub('password = $DotBriefs: password$; security question = $DotBriefs: question$ # comment'),
'password = s3cr3t; security question = h1dd3n 4g3nd4 # comment')
if __name__ == '__main__':
unittest.main()
| Python | 0.000002 | |
aa2d97dfe52628e1bb7ab123890a895f7f630cda | add problem 070 | problem_070.py | problem_070.py | #!/usr/bin/env python
#-*-coding:utf-8-*-
'''
'''
from fractions import Fraction
import itertools
import math
import timeit
primes = [2, 3, 5, 7]
def is_prime(n):
for p in primes:
if n % p == 0:
return False
for i in range(max(primes), int(math.sqrt(n))+1):
if n % i == 0:
return False
return True
def factorize(n, factors):
for i in range(2, int(math.sqrt(n))+1):
if n % i == 0:
factors.add(i)
return factorize(int(n/i), factors)
factors.add(n)
return factors
def totient(n):
factors = factorize(n, set())
return n*reduce(lambda x,y: x*y, map(lambda x: 1-Fraction(1, x), factors))
# slow
def loop(n):
num, min_ratio = None, None
for i in range(2, n+1):
phi = totient(i)
if sorted(str(i)) == sorted(str(phi)):
ratio = n/phi
if min_ratio is None or ratio < min_ratio:
num = i
min_ratio = ratio
return num, min_ratio
def multiple_prime(n):
# prepare primes
for i in range(1000, 5000): # narrow search space
if is_prime(i):
primes.append(i)
# main loop
num, min_ratio = None, None
for i, j in itertools.combinations(primes, 2):
m = i*j
if m > n:
continue
phi = totient(m)
if sorted(str(m)) == sorted(str(phi)):
ratio = m/phi
if min_ratio is None or ratio < min_ratio:
num = m
min_ratio = ratio
return num, min_ratio
if __name__ == '__main__':
# print loop(10**7)
print multiple_prime(10**7)
print timeit.Timer('problem_070.multiple_prime(10**7)', 'import problem_070').timeit(1)
| Python | 0.000962 | |
16ccb2a670461e8ceb9934fd4ba8823b866c9d8e | Create plot.py | src/plot.py | src/plot.py | import pandas as pd
from matplotlib import pyplot as plt
from abc import ABCMeta, abstractmethod
class Plot(metaclass=ABCMeta):
@abstractmethod
def show(self):
plt.show()
class CsvPlot(Plot):
def __init__(self, parent_path):
self.parent_path = parent_path
def show(self, is_execute=False):
super().show() if is_execute else None
def plot(self, file_name, title, is_execute=False):
(lambda f, t: pd.read_csv(self.parent_path.format(f)).plot(title=t))(
file_name, title)
self.show(is_execute)
def plots(self, file_names, titles, is_execute=False):
[self.plot(f, t) for f, t in zip(file_names, titles)]
self.show(is_execute)
| Python | 0.000021 | |
254239102955bb8916aab98530251b5cdd79ce50 | Add script to write base signatures | cypher/siggen.py | cypher/siggen.py | #!/usr/bin/env python
import argparse
import subprocess
import os
import shutil
import sys
from util import write_signature
parser = argparse.ArgumentParser()
parser.add_argument(
"-l",
"--language",
help="Source code language.",
required=True
)
TEMP_DIR = os.path.join(os.getcwd(), "cypher", "temp")
if os.path.exists(TEMP_DIR):
shutil.rmtree(TEMP_DIR)
lang = vars(parser.parse_args())["language"]
if lang == "Python":
repo = "https://github.com/django/django.git"
ext = [".py"]
elif lang == "Ruby":
repo = "https://github.com/Homebrew/legacy-homebrew.git"
ext = [".rb"]
elif lang == "C":
repo = "https://github.com/git/git.git"
ext = [".c", ".h"]
elif lang == "C++":
repo = "https://github.com/apple/swift.git"
ext = [".cpp", ".cc", ".h"]
elif lang == "R":
repo = "https://github.com/rstudio/shiny.git"
ext = [".R", ".r"]
else:
print("{} not found.".format(lang))
sys.exit(0)
os.makedirs(TEMP_DIR)
pro = subprocess.Popen(
["git", "clone", repo],
cwd=TEMP_DIR,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
(out, error) = pro.communicate()
src_dir = os.path.join(TEMP_DIR, repo.split("/")[-1].split(".")[0])
write_signature(src_dir, lang, ext)
shutil.rmtree(TEMP_DIR)
| Python | 0.000001 | |
fe8d131a3cb9484cfe3f1b96102c0333077ffe89 | Add some basic tests | tests/test_basic.py | tests/test_basic.py | from cider import Cider
from mock import MagicMock, call
import pytest
import random
@pytest.mark.randomize(formulas=[str], cask=bool, force=bool)
def test_install(tmpdir, formulas, cask, force):
cider = Cider(cider_dir=str(tmpdir), cask=cask)
cider.brew = MagicMock()
cider.install(*formulas, force=force)
cider.brew.install.assert_called_once_with(*formulas, force=force)
key = "casks" if cask else "formulas"
for formula in formulas:
assert formula in cider.read_bootstrap().get(key, [])
@pytest.mark.randomize(formulas=[str], cask=bool)
def test_rm(tmpdir, formulas, cask):
cider = Cider(cider_dir=str(tmpdir), cask=cask)
cider.brew = MagicMock()
cider.rm(*formulas)
cider.brew.rm.assert_called_once_with(*formulas)
key = "casks" if cask else "formulas"
for formula in formulas:
assert formula not in cider.read_bootstrap().get(key, [])
@pytest.mark.randomize(
domain=str, key=str, values=[str, int, float], force=bool
)
def test_set_default(tmpdir, domain, key, values, force):
def expected(value):
return {
"true": True,
"false": False
}.get(value, value)
cider = Cider(cider_dir=str(tmpdir))
cider.defaults = MagicMock()
for value in values:
cider.set_default(domain, key, value, force=force)
cider.defaults.write.assert_called_with(
domain, key, expected(value), force
)
assert cider.read_defaults()[domain][key] == value
# Verify str(value) => defaults.write(value)
cider.set_default(domain, key, str(value), force=force)
_assert_roughly_called_with(
cider.defaults.write, domain, key, value, force
)
@pytest.mark.randomize(domain=str, key=str)
def test_remove_default(tmpdir, domain, key):
cider = Cider(cider_dir=str(tmpdir))
cider.defaults = MagicMock()
cider.remove_default(domain, key)
cider.defaults.delete.assert_called_with(domain, key)
assert key not in cider.read_defaults().get(domain, [])
@pytest.mark.randomize(tap=str)
def test_tap(tmpdir, tap):
cider = Cider(cider_dir=str(tmpdir))
cider.brew = MagicMock()
cider.tap(tap)
cider.brew.tap.assert_called_with(tap)
assert tap in cider.read_bootstrap().get("taps", [])
@pytest.mark.randomize(tap=str)
def test_untap(tmpdir, tap):
cider = Cider(cider_dir=str(tmpdir))
cider.brew = MagicMock()
cider.untap(tap)
cider.brew.untap.assert_called_with(tap)
assert tap not in cider.read_bootstrap().get("taps", [])
def _assert_roughly_called_with(mock_self, *args, **kwargs):
def assert_roughly_equal(actual, expected):
if isinstance(actual, float) and isinstance(expected, float):
assert abs(actual - expected) <= threshold
else:
assert actual == expected
threshold = 0.01
_, actual_args, actual_kwargs = mock_self.mock_calls[-1]
for actual, expected in zip(actual_args, args):
assert_roughly_equal(actual, expected)
for key, expected in kwargs.iteritems():
assert_roughly_equal(actual_kwargs.get(key), expected)
| Python | 0.000012 | |
19b13f0fb9b86ec99025bd1baf2c4d5fe757f809 | Add a test to make sure exception is raised | tests/test_tests.py | tests/test_tests.py | import pytest
def test_BeautifulSoup_methods_are_overridden(
client_request,
mock_get_service_and_organisation_counts,
):
client_request.logout()
page = client_request.get("main.index", _test_page_title=False)
with pytest.raises(AttributeError) as exception:
page.find("h1")
assert str(exception.value) == "Don’t use BeautifulSoup.find – try BeautifulSoup.select_one instead"
with pytest.raises(AttributeError) as exception:
page.find_all("h1")
assert str(exception.value) == "Don’t use BeautifulSoup.find_all – try BeautifulSoup.select instead"
| Python | 0 | |
1f4190a6d4ef002e75a8ac5ef80d326c712c749c | add test to verify the trace assignment | tests/test_trace.py | tests/test_trace.py | from __future__ import absolute_import
import pytest
from tchannel import TChannel, schemes
from tchannel.errors import BadRequestError
from tchannel.event import EventHook
@pytest.mark.gen_test
def test_error_trace():
tchannel = TChannel('test')
class ErrorEventHook(EventHook):
def __init__(self):
self.request_trace = None
self.error_trace = None
def before_receive_request(self, request):
self.request_trace = request.tracing
def after_send_error(self, error):
self.error_trace = error.tracing
hook = ErrorEventHook()
tchannel.hooks.register(hook)
tchannel.listen()
with pytest.raises(BadRequestError):
yield tchannel.call(
scheme=schemes.RAW,
service='test',
arg1='endpoint',
hostport=tchannel.hostport,
timeout=0.02,
)
assert hook.error_trace
assert hook.request_trace
assert hook.error_trace == hook.request_trace
| Python | 0 | |
7fa8417cb7635e238f1e95971fa0a86a95b64dca | Migrate deleted_at fields away | aleph/migrate/versions/aa486b9e627e_hard_deletes.py | aleph/migrate/versions/aa486b9e627e_hard_deletes.py | """Hard delete various model types.
Revision ID: aa486b9e627e
Revises: 9dcef7592cea
Create Date: 2020-07-31 08:56:43.679019
"""
from alembic import op
import sqlalchemy as sa
revision = "aa486b9e627e"
down_revision = "9dcef7592cea"
def upgrade():
meta = sa.MetaData()
meta.bind = op.get_bind()
meta.reflect()
for table_name in ("alert", "entity", "mapping", "permission"):
table = meta.tables[table_name]
q = sa.delete(table).where(table.c.deleted_at != None) # noqa
meta.bind.execute(q)
table = meta.tables["permission"]
q = sa.delete(table).where(table.c.read == False) # noqa
meta.bind.execute(q)
op.drop_column("alert", "deleted_at")
op.drop_column("entity", "deleted_at")
op.drop_column("mapping", "deleted_at")
op.drop_column("permission", "deleted_at")
op.alter_column("entityset", "label", existing_type=sa.VARCHAR(), nullable=True)
op.alter_column("role", "is_muted", existing_type=sa.BOOLEAN(), nullable=False)
def downgrade():
pass
| Python | 0 | |
c61d3d5b4b31912c48e86425fe7e4861fc2f8c28 | test for read_be_array that fails in Python 2.x (see GH-6) | tests/test_utils.py | tests/test_utils.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from io import BytesIO
from psd_tools.utils import read_be_array
def test_read_be_array_from_file_like_objects():
fp = BytesIO(b"\x00\x01\x00\x05")
res = read_be_array("H", 2, fp)
assert list(res) == [1, 5]
| Python | 0 | |
eb82e816e4dece07aeebd7b9112156dacdb2d9bc | Add set_global_setting.py, not sure how this file dissapeared | commands/set_global_setting.py | commands/set_global_setting.py | from .command import CmakeCommand
class CmakeSetGlobalSettingCommand(CmakeCommand):
def run(self):
self.server.global_settings()
| Python | 0.000002 | |
96bea6812919067c28e0c28883226434d81f6e8d | add locusattrs class | locuspocus/locusattrs.py | locuspocus/locusattrs.py |
class LocusAttrs():
# a restricted dict interface to attributes
def __init__(self,attrs=None):
self._attrs = attrs
def __len__(self):
if self.empty:
return 0
else:
return len(self._attrs)
def __eq__(self, other):
if self.empty and other.empty:
return True
elif len(self) != len(other):
# Short circuit on length
return False
else:
return sorted(self.items()) == sorted(other.items())
@property
def empty(self):
if self._attrs is None:
return True
else:
return False
def keys(self):
if self.empty:
return []
else:
return self._attrs.keys()
def values(self):
if self.empty:
return []
else:
return self._attrs.values()
def items(self):
if self.empty:
return {}
else:
return self._attrs.items()
def __contains__(self,key):
if self.empty:
return False
return key in self._attrs
def __getitem__(self,key):
if self.empty:
raise KeyError()
return self._attrs[key]
def __setitem__(self,key,val):
if self.empty:
self._attrs = {}
self._attrs[key] = val
def __repr__(self):
if self.empty:
return repr({})
return repr(self._attrs)
| Python | 0 | |
cebb3a9cdbdee7c02b0c86e1879d0c20d36b4276 | add example | examples/example_cityfynder.py | examples/example_cityfynder.py | # Which city would like to live?
# Created by City Fynders - University of Washington
import pandas as pd
import numpy as np
import geopy as gy
from geopy.geocoders import Nominatim
import data_processing as dp
from plotly_usmap import usmap
# import data
(natural, human, economy, tertiary) = dp.read_data()
# Add ranks in the DataFrame
(natural, human, economy, tertiary) = dp.data_rank(natural, human, economy, tertiary)
# Get location information
(Lat, Lon) = dp.find_loc(human)
# Create a rank DataFrame and save as csv file
rank = dp.create_rank(natural, human, economy, tertiary, Lat, Lon)
# Plot US city general ranking usmap
usmap(rank)
| Python | 0.000002 | |
9269afee9099ef172ac2ef55ea0af85b0c77587a | Add databases.py | py/database.py | py/database.py | import sqlalchemy
import sqlalchemy.orm
import uuid
import configmanager
class ConnectionManager():
_connections = {}
@staticmethod
def addConnection(self, connection, connectionName = uuid.uuid4().hex):
if type(connectionName) == str:
if type(connection) == DatabaseConnection:
_connections[connectionName] = connection
return connectionName
else if type(connection) == str:
c = DatabaseConnection(connection)
_connections[connectionName] = connection
return connectionName
else:
raise ValueError("connection must be of type str, not %s", type(connection))
else:
raise ValueError("connectionName must be of type str, not %s", type(name))
@staticmethod
def getConnection(self, connectionName):
if type(connectionName) == str:
try:
return _connections[connectionName]
except KeyError:
return None
@staticmethod
def closeConnection(self, connectionName):
if type(connectionName) == str:
_connections[connectionName].session.close()
_connections.close()
del _connections[connectionName]
else:
raise ValueError("connectionName must be of type str, not %s", type(name))
class DatabaseConnection():
def __init__(self, connectionString):
self.connectionString = configs['connection_string']
self.engine = sqlalchemy.create_engine(bind = self.connectionString)
self._Session = sqlalchemy.orm.create_session(bind = engine)
self.session = Session()
def getSelectedDatabase(self):
result = session.execute("SELECT DATABASE()").fetchone()
if result != None:
return result[0]
return None
| Python | 0.000001 | |
5dfb7ad67216b31544c5f4dc785930ef0d9ffd56 | add faceAssigned tester | python/medic/plugins/Tester/faceAssigned.py | python/medic/plugins/Tester/faceAssigned.py | from medic.core import testerBase
from maya import OpenMaya
class FaceAssigned(testerBase.TesterBase):
Name = "FaceAssigned"
def __init__(self):
super(FaceAssigned, self).__init__()
def Match(self, node):
return node.object().hasFn(OpenMaya.MFn.kDagNode)
def Test(self, node):
inst_grp = node.dg().findPlug("instObjGroups", True)
if not inst_grp:
return False
obj_grp = None
for i in range(inst_grp.numChildren()):
child = inst_grp.child(i)
if child.partialName() == "iog[-1].og":
obj_grp = child
break
if not obj_grp:
return False
if obj_grp.numConnectedElements() > 0:
return True
return False
Tester = FaceAssigned
| Python | 0 | |
971570b4288c9ac7131a1756e17574acbe6d1b9a | Add script for converting a solarized dark file to solarized dark high contrast | python/misc/solarized-dark-high-contrast.py | python/misc/solarized-dark-high-contrast.py | #!/usr/bin/env python
import sys
if sys.version_info < (3, 4):
sys.exit('ERROR: Requires Python 3.4')
from enum import Enum
def main():
Cases = Enum('Cases', 'lower upper')
infile_case = None
if len(sys.argv) < 2:
sys.stderr.write('ERROR: Must provide a file to modify\n')
sys.exit('Usage: {} FILE'.format(sys.argv[0]))
# Keep these in lists instead of a dict to preserve ordering
color_codes_dark = [
'eee8d5',
'93a1a1',
'839496',
'657b83',
'586e75',
]
color_codes_dark_high_contrast = [
'fdf6e3',
'eee8d5',
'93a1a1',
'839496',
'657b83',
]
with open(sys.argv[1], 'r') as infile:
outfile_data = infile.read()
# Figure out whether the input is using upper or lower case color codes
for color_code in color_codes_dark:
# Skip color codes that don't contain letters
if color_code.lower() == color_code.upper():
continue
if outfile_data.find(color_code.lower()) != -1:
infile_case = Cases.lower
# Use the first one we find as the decisive case
break
elif outfile_data.find(color_code.upper()) != -1:
infile_case = Cases.upper
break
for i in range(len(color_codes_dark)):
if infile_case == Cases.lower:
outfile_data = outfile_data.replace(color_codes_dark[i].lower(), color_codes_dark_high_contrast[i].lower())
outfile_data = outfile_data.replace(color_codes_dark[i].upper(), color_codes_dark_high_contrast[i].lower())
elif infile_case == Cases.upper:
outfile_data = outfile_data.replace(color_codes_dark[i].lower(), color_codes_dark_high_contrast[i].upper())
outfile_data = outfile_data.replace(color_codes_dark[i].upper(), color_codes_dark_high_contrast[i].upper())
with open('{}-high-contrast.{}'.format(*sys.argv[1].rsplit('.', 1)), 'w') as outfile:
outfile.write(outfile_data)
if __name__ == '__main__':
main() | Python | 0 | |
b72c421696b5714d256b7ac461833bc692ca5354 | Add an autonomous mode to strafe and shoot. Doesn't work | robot/robot/src/autonomous/hot_aim_shoot.py | robot/robot/src/autonomous/hot_aim_shoot.py |
try:
import wpilib
except ImportError:
from pyfrc import wpilib
import timed_shoot
class HotShootAutonomous(timed_shoot.TimedShootAutonomous):
'''
Based on the TimedShootAutonomous mode. Modified to allow
shooting based on whether the hot goal is enabled or not.
'''
DEFAULT = False
MODE_NAME = "Hot Aim shoot"
def __init__(self, components):
super().__init__(components)
wpilib.SmartDashboard.PutNumber('DriveStrafeSpeed', 0.5)
wpilib.SmartDashboard.PutBoolean('IsHotLeft', False)
wpilib.SmartDashboard.PutBoolean('IsHotRight', False)
def on_enable(self):
'''these are called when autonomous starts'''
super().on_enable()
self.drive_strafe_speed = wpilib.SmartDashboard.GetNumber('DriveStrafeSpeed')
print("-> Drive strafe:", self.drive_strafe_speed)
self.decided = False
self.start_time = None
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
'''The actual autonomous program'''
# decide if it's hot or not
if not self.decided:
self.hotLeft = wpilib.SmartDashboard.GetBoolean("IsHotLeft")
self.hotRight = wpilib.SmartDashboard.GetBoolean("IsHotRight")
if (self.hotLeft or self.hotRight) and not (self.hotLeft and self.hotRight):
self.decided = True
if self.hotLeft:
self.drive_strafe_speed *= -1
elif time_elapsed > 6:
# at 6 seconds, give up and shoot anyways
self.decided = True
# always keep the arm down
self.intake.armDown()
# wait a split second for the arm to come down, then
# keep bringing the catapult down so we're ready to go
if time_elapsed > 0.3:
self.catapult.pulldown()
# wait some period before we start driving
if time_elapsed < self.drive_wait:
pass
else:
if self.decided:
# only set this once, so we can calculate time from this
# point on
if self.start_time is None:
self.start_time = time_elapsed
time_elapsed = time_elapsed - self.start_time
if time_elapsed < self.drive_time:
# Drive slowly forward for N seconds
self.drive.move(self.drive_strafe_speed, self.drive_speed, 0)
elif time_elapsed < self.drive_time + 1.0:
# Finally, fire and keep firing for 1 seconds
self.catapult.launchNoSensor()
| Python | 0.000002 | |
3c046062af376603145545f37b917a5c927b3aba | Create mergesort_recursive.py | recursive_algorithms/mergesort_recursive.py | recursive_algorithms/mergesort_recursive.py | def merge_sort(array):
temp = []
if( len(array) == 1):
return array;
half = len(array) / 2
lower = merge_sort(array[:half])
upper = merge_sort(array[half:])
lower_len = len(lower)
upper_len = len(upper)
i = 0
j = 0
while i != lower_len or j != upper_len:
if( i != lower_len and (j == upper_len or lower[i] < upper[j])):
temp.append(lower[i])
i += 1
else:
temp.append(upper[j])
j += 1
return temp
array = [11, 12, 3, 28, 41, 62,16, 10]
ar = merge_sort(array)
print " ".join(str(x) for x in ar)
| Python | 0.000004 | |
06b0f93ecd5fac8eda02fce96c1e4ec0306a7989 | Increase coverage | test/test_google.py | test/test_google.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from base import web, model, Fixtures
import pybossa.view.google as google
class TestGoogle:
def setUp(self):
self.app = web.app
model.rebuild_db()
Fixtures.create()
def test_manage_user(self):
"""Test GOOGLE manage_user works."""
with self.app.test_request_context('/'):
# First with a new user
user_data = dict(id='1', name='google',
email='g@g.com')
token = 't'
user = google.manage_user(token, user_data, None)
assert user.email_addr == user_data['email'], user
assert user.name == user_data['name'], user
assert user.fullname == user_data['name'], user
assert user.google_user_id == user_data['id'], user
# Second with the same user
user = google.manage_user(token, user_data, None)
assert user.email_addr == user_data['email'], user
assert user.name == user_data['name'], user
assert user.fullname == user_data['name'], user
assert user.google_user_id == user_data['id'], user
# Finally with a user that already is in the system
user_data = dict(id='10', name=Fixtures.name,
email=Fixtures.email_addr)
token = 'tA'
user = google.manage_user(token, user_data, None)
assert user is None
| Python | 0 | |
17e2b9ecb67c8b1f3a6f71b752bc70b21584092e | Add initial tests for scriptserver. | tests/test_scriptserver.py | tests/test_scriptserver.py | import unittest
from mock import patch, Mock
import sys
sys.path.append(".")
from scriptserver import ZoneScriptRunner
class TestZoneScriptRunner(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.mongoengine_patch = patch('scriptserver.me')
cls.mongoengine_patch.start()
@classmethod
def tearDownClass(cls):
cls.mongoengine_patch.stop()
def test___init__(self):
zoneid = "zoneid"
with patch('scriptserver.Object'):
with patch.object(ZoneScriptRunner, 'load_scripts') as mock_load_scripts:
zone_script_runner = ZoneScriptRunner(zoneid)
self.assertTrue(zone_script_runner)
self.assertEqual(1, mock_load_scripts.call_count)
def test_load_scripts(self):
expected = {}
zoneid = "zoneid"
with patch.object(ZoneScriptRunner, 'load_scripts'):
with patch('scriptserver.Object'):
zone_script_runner = ZoneScriptRunner(zoneid)
with patch('scriptserver.ScriptedObject') as ScriptedObject:
MockThing = Mock()
with patch.dict('sys.modules', {'thing': MockThing, 'thing.fake': MockThing.fake,
'thing.fake.chicken': MockThing.fake.chicken}):
MockThing.fake.chicken.Chicken.tick = Mock()
MockScriptedObject = Mock()
MockScriptedObject.scripts = ['thing.fake.chicken']
ScriptedObject.objects.return_value = [MockScriptedObject]
result = zone_script_runner.load_scripts()
self.assertNotEqual(expected, result)
self.assertIn('thing.fake.chicken', result)
def test_start(self):
# zone_script_runner = ZoneScriptRunner(zoneid)
# self.assertEqual(expected, zone_script_runner.start())
pass # TODO: implement your test here
def test_tick(self):
# zone_script_runner = ZoneScriptRunner(zoneid)
# self.assertEqual(expected, zone_script_runner.tick())
pass # TODO: implement your test here
if __name__ == '__main__':
unittest.main()
| Python | 0 | |
81eea6809f369ccabc55f7b53edbc1c2b961f146 | Create get_random_generate_number_sendmessage.py | get_random_generate_number_sendmessage.py | get_random_generate_number_sendmessage.py | #!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
__author__ = 'https://github.com/password123456/'
import random
import numpy as np
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import requests
import urllib
import urllib2
import json
import datetime
import time
import dateutil.relativedelta as REL
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def computer_random():
"""let the computer create a list of 6 unique random integers from 1 to 50"""
ok = False
lotto_num_list = np.arange(1,45)
while not ok:
ci = np.random.choice(lotto_num_list,6,replace=False)
tmp = np.where(ci == 0)
(m, )= tmp[0].shape
if(m == 0):
ok = True
return ci
def user_random():
time_now = time.strftime('%Y-%m-%d %H:%M:%S')
print "============================="
print " 로또 번호 생성기 "
print "============================="
print "[+] 시작시간: %s" % time_now
print "[+] 번호생성: 1~45 중 임의의 번호 6 개를 만듭니다."
ok = False
lotto_num_list = np.arange(1,45)
while not ok:
ui = np.random.choice(lotto_num_list,6,replace=False)
tmp = np.where(ui == 0)
(m, )= tmp[0].shape
if(m == 0):
ok = True
return ui
def match_lists(list1 , list2):
"""to find the number of matching items in each list use sets"""
set1 = set(list1)
set2 = set(list2)
set3 = set1.intersection(set2)
#print '컴퓨터번호-> %s | 내 번호-> %s | 일치번호 개수 %d' % (set1,set2,len(set3))
return len(set3)
def calculate():
global user_list
# 사용자가 6개의 번호를 뽑는다.
user_list = user_random()
print "[+] 생성번호: %s" % user_list
global match3
global match4
global match5
global match6
match3 = 0
match4 = 0
match5 = 0
match6 = 0
# computer는 아래의 숫자만큼 번호를 다시 뽑는다.
#tickets_sold = 1000
tickets_sold = 8145060
print "[+] 번호분석: 1/%d 개의 난수를 생성하여 생성된 번호와 일치할 확률을 계산" % tickets_sold
for k in range(tickets_sold):
comp_list = computer_random()
# 뽑은번호를 서로 비교한다
matches = match_lists(comp_list, user_list)
if matches == 3:
match3 += 1
elif matches == 4:
match4 += 1
elif matches == 5:
match5 += 1
elif matches == 6:
match6 += 1
def get_next_saturday():
today = datetime.date.today()
rd = REL.relativedelta(days=1, weekday=REL.SA)
next_saturday = today + rd
return next_saturday
def read_file(saved_lotto_file):
f = open(saved_lotto_file, 'r')
lines = f.readlines()
data = ''
line_count = 0
for line in lines:
line_count += 1
data = data + '%s' % line
f.close()
return data,line_count
def delete_file(saved_lotto_file):
import os
if os.path.isfile(saved_lotto_file):
os.remove(saved_lotto_file)
else: ## Show an error ##
print("Error: %s file not found" % saved_lotto_file)
def main():
#saved_lotto_file = './lotto_numbers.txt'
count = 2
games = 0
while True:
calculate()
print "[+] 번호선택: 생성번호가 컴퓨터가 생성한 번호와 1개 이상 같을 경우 선택함"
print "[+] 분석결과"
print "----------------------------"
print " 1. 5등/3 개 번호일치: %d 번" % match3
print " 2. 4등/4 개 번호일치: %d 번" % match4
print " 3. 3등/5 개 번호일치: %d 번" % match5
print " 4. 1등/모두 일치: %d 번" % match6
print "----------------------------"
print ">>>>>>>>>>"
if (match6 >= count):
games += 1
print "[+] 생성 번호: %s" % user_list
print "[+] 6 개 번호가 모두 일치 하는 경우가 %d 번 탐지 / 이 번호 저장함." % (match6)
print "[+] 총5 게임을 진행합니다. 현재는 %d 게임째 입니다." % games
f = open(saved_lotto_file, 'a')
f.write('자 동 %s\n' % (user_list))
f.close()
else:
print " [+] 맞는 조건이 없어 처음부터 다시 번호를 뽑습니다."
print
continue
if games == 5:
print "[+] %d 게임이 완료되어 추첨을 종료합니다." % games
print "[+] 추첨된 번호는 $YOUR API 로 전송합니다."
next_saturday = get_next_saturday()
read_lotto_data, read_lotto_data_line_count = read_file(saved_lotto_file)
game_price = 1000
total_price = game_price * read_lotto_data_line_count
contents = '\
** 언제나 좋아요 로또 **\n\n\
「절반의 행운, 절반의 기부\n\
\t\t나눔 Lotto 6/45\n\
추 첨 일 : %s (토)\n\
-----------------------------------\n\
%s\
-----------------------------------\n\
총 %d 게임\n\
금액 %d원\n\
>> 걸리면 반띵알지?' % (next_saturday,read_lotto_data,read_lotto_data_line_count,total_price)
print
print ">>>>>> 메시지 시작 <<<<<<<"
print
print contents
print
print ">>>>>> 메시지 끝 <<<<<<<"
try:
# send contents your SMS, API
# to here
except Exception, e:
print '%s[-] Exception::%s%s' % (bcolors.WARNING, e, bcolors.ENDC)
pass
else:
delete_file(saved_lotto_file)
break
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
sys.exit(0)
except Exception, e:
print '%s[-] Exception::%s%s' % (bcolors.WARNING, e, bcolors.ENDC)
| Python | 0.000189 | |
e5243d0fb792e82825633f1afdd6e799238a90f3 | Add portable buildtools update script (#46) | tools/buildtools/update.py | tools/buildtools/update.py | #!/usr/bin/python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Pulls down tools required to build flutter."""
import os
import subprocess
import sys
SRC_ROOT = (os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
BUILDTOOLS = os.path.join(SRC_ROOT, 'buildtools')
sys.path.insert(0, os.path.join(SRC_ROOT, 'tools'))
import find_depot_tools
DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
def Update():
path = os.path.join(BUILDTOOLS, 'update.sh')
return subprocess.call(['/bin/bash', path, '--toolchain', '--gn'], cwd=SRC_ROOT)
def UpdateOnWindows():
sha1_file = os.path.join(BUILDTOOLS, 'win', 'gn.exe.sha1')
downloader_script = os.path.join(DEPOT_PATH, 'download_from_google_storage.py')
download_cmd = [
'python',
downloader_script,
'--no_auth',
'--no_resume',
'--quiet',
'--platform=win*',
'--bucket',
'chromium-gn',
'-s',
sha1_file
]
return subprocess.call(download_cmd)
def main(argv):
if sys.platform.startswith('win'):
return UpdateOnWindows()
return Update()
if __name__ == '__main__':
sys.exit(main(sys.argv))
| Python | 0 | |
d1f02226fe805fb80a17f1d22b84b748b65b4e7f | add sam2fq.py | sam2fq.py | sam2fq.py | import sys
from collections import namedtuple
Read = namedtuple('Read', ['name','qual','seq'])
read1 = None
left = open('pe_1.fq', 'w')
right = open('pe_2.fq', 'w')
unpaired = open('unpaired.fq', 'w')
for line in sys.stdin:
items = line.split('\t')
name, qual, seq = items[0], items[10], items[9]
if not read1:
read1 = Read(name, qual, seq)
continue
else:
read2 = Read(name, qual, seq)
if read1.name == read2.name:
print read1.name, '<-->', read2.name
#print >> left, '@%s\n%s\n+\n%s' % (read1.name, read1.seq, read1.qual)
#print >> right, '@%s\n%s\n+\n%s' % (read2.name, read2.seq, read2.qual)
read1 = None
else:
print read1.name
#print >> unpaired, '@%s\n%s\n+\n%s' % (read1.name, read1.seq, read1.qual)
read1 = read2
read2 = None
if read1:
print read1.name
#print >> unpaired, '@%s\n%s\n+\n%s' % (read1.name, read1.seq, read1.qual)
read1 = read2
read2 = None
| Python | 0.001125 | |
dbdfb1b5a703e0392ca67a03113e607678015a66 | add kattis/settlers2 | Kattis/settlers2.py | Kattis/settlers2.py | """
Problem: settlers2
Link: https://open.kattis.com/problems/settlers2
Source: NWERC 2009
"""
from collections import defaultdict
import math
MAXN = 10000
currentPosition = (0,0)
currentNum = 1
counter = defaultdict()
layers = 1
direction = 0
directionCounter = 0
limitDirectionCounter = [layers, layers-1, layers, layers, layers, layers]
transitionVectors = [(-1,1), (-2,0), (-1,-1), (1,-1), (2,0), (1,1)]
nMoves = 0
tilesMap = dict()
tilesMap[currentPosition] = currentNum
tilesList = [None, currentNum]
for num in [1,2,3,4,5]: counter[num] = 0
counter[currentNum] += 1
def add(position, vector):
return (position[0] + vector[0], position[1] + vector[1])
# Preprocess
while len(tilesList) - 1 < MAXN:
currentPosition = add(currentPosition, transitionVectors[direction])
directionCounter += 1
while limitDirectionCounter[direction] == directionCounter:
# Increase limit counter for next round
limitDirectionCounter[direction] += 1
# Change direction
direction = (direction + 1) % len(transitionVectors)
# Reset direction counter
directionCounter = 0
neighbors = [add(currentPosition, transitionVector) for transitionVector in transitionVectors]
possibilities = set([1,2,3,4,5])
# Eliminate similar tiles
for neighbor in neighbors:
if neighbor in tilesMap and tilesMap[neighbor] in possibilities:
possibilities.remove(tilesMap[neighbor])
# Keep only the least number of tiles
minCounter = math.inf
for possibility in possibilities:
minCounter = min(minCounter, counter[possibility])
possibilityToRemove = []
for possibility in possibilities:
if counter[possibility] != minCounter:
possibilityToRemove.append(possibility)
for possibility in possibilityToRemove:
possibilities.remove(possibility)
# Sort by number
possibilities = sorted(possibilities)
currentNum = possibilities[0]
tilesMap[currentPosition] = currentNum
tilesList.append(currentNum)
counter[currentNum] += 1
# Post-process
C = int(input())
for i in range(C):
n = int(input())
print(tilesList[n])
| Python | 0.001804 | |
48443f8a8f5a15b3116ba7b4a842189f5e659f26 | test script for pymatbridge | test_pymatbridge.py | test_pymatbridge.py | #!/usr/bin/python
from pymatbridge import Matlab
mlab = Matlab()
mlab.start()
print "Matlab started?", mlab.started
print "Matlab is connected?", mlab.is_connected()
mlab.run_code("conteo = 1:10")
mlab.run_code("magica = magic(5)")
mlab.stop()
| Python | 0 | |
4d08d50d73e8d3d3a954c9ef8ddffc23444d7d28 | Create script.py | script.py | script.py | #!/usr/bin/env python3
# première tentative de documenter l'API de coco.fr
import random
import requests
pseudo = "caecilius" # doit être en minuscule et de plus de 4 caractères
age = "22" # minimum "18"
sexe = "1" # "1" pour homme, "2" pour femme
codeville = "30929" # à récuperer ici http://coco.fr/cocoland/foo.js par exemple pour Paris 15 :
# http://coco.fr/cocoland/75015.js va donner "var cityco='30929*PARIS*'; procecodo();"
# le codeville est donc "30929"
referenz = "0" # aucune idée de ce que ça fait, pour l'instant la valeur est toujours "0"
salt = str(random.randrange(100000000, 999999999))
url = str("http://coco.fr#" + pseudo + "_" + sexe + "_" + age + "_" + codeville + "_0_" + salt + "_" + referenz)
r = requests.get(url)
| Python | 0.000002 | |
cc0d6a3b782c5646b9742ebe7308b42507ed2714 | Add python API draft interface | python/kmr4py.py | python/kmr4py.py | class MapReduce(object):
def reply_to_spawner(self):
pass
def get_spawner_communicator(self, index):
pass
def send_kvs_to_spawner(self, kvs):
pass
def concatenate_kvs(self, kvss):
pass
def map_once(self, mapfn, kvo_key_type=None,
rank_zero_only=False, nothreading=False,
inspect=False, keep_open=False, take_ckpt=False):
# HIGH PRIORITY
pass
def map_on_rank_zero(self, mapfn, kvo_key_type=None,
nothreading=False, inspect=False, keep_open=False,
take_ckpt=False):
# HIGH PRIORITY
pass
def read_files_reassemble(self, filename, color, offset, bytes):
pass
def read_file_by_segments(self, filename, color):
pass
class KVS(object):
def map(self, mapfn, kvo_key_type=None,
nothreading=False, inspect=False, keep_open=False, take_ckpt=False):
# HIGH PRIORITY
pass
def map_rank_by_rank(self, mapfn, opt):
pass
def map_ms(self, mapfn, opt):
pass
def map_ms_commands(self, mapfn, opt, sopt):
pass
def map_for_some(self, mapfn, opt):
pass
def map_via_spawn(self, mapfn, sopt):
pass
def map_processes(self, mapfn, nonmpi, sopt):
pass
def map_parallel_processes(self, mapfn, sopt):
pass
def map_serial_processes(self, mapfn, sopt):
pass
def reverse(self, kvo_key_type=None,
nothreading=False, inspect=False, keep_open=False,
take_ckpt=False):
# HIGH PRIORITY (as used in wordcount.py)
pass
def reduce(self, redfn, kvo_key_type=None,
nothreading=False, inspect=False, take_ckpt=False):
# HIGH PRIORITY
pass
def reduce_as_one(self, redfn, opt):
pass
def reduce_for_some(self, redfn, opt):
pass
def shuffle(self, kvo_key_type=None,
key_as_rank=False, take_ckpt=False):
# HIGH PRIORITY
pass
def replicate(self, kvo_key_type=None,
inspect=False, rank_zero=False, take_ckpt=False):
# HIGH PRIORITY
pass
def distribute(self, cyclic, opt):
pass
def sort_locally(self, shuffling, opt):
pass
def sort(self, inspect=False):
# HIGH PRIORITY (as used in wordcount.py)
pass
def sort_by_one(self, opt):
pass
def free(self):
# HIGH PRIORITY
pass
def add_kv(self, kv_tuple):
# HIGH PRIORITY
pass
def add_kv_done(self):
# HIGH PRIORITY
pass
def get_element_count(self):
# HIGH PRIORITY
pass
def local_element_count(self):
pass
def to_list(self):
# retrieve_kvs_entries
pass
# def from_list():
# pass
def dump(self, flag):
pass
def __str__(self):
self.dump(0)
def dump_stats(self, level):
pass
| Python | 0.000001 | |
dbacf8cd0c2bae394b6c67a810836668d510787d | test for index (re)generation | tests/test_index.py | tests/test_index.py | from cheeseprism.utils import resource_spec
from itertools import count
from path import path
from pprint import pprint
import unittest
class IndexTestCase(unittest.TestCase):
counter = count()
base = "egg:CheesePrism#tests/test-indexes"
def make_one(self, index_name='test-index'):
from cheeseprism import index
index_path = path(resource_spec(self.base)) / "%s-%s" %(next(self.counter), index_name)
return index.IndexManager(index_path)
def setUp(self):
self.im = self.make_one()
dummy = path(__file__).parent / "dummypackage/dist/dummypackage-0.0dev.tar.gz"
dummy.copy(self.im.path)
self.dummypath = self.im.path / dummy.name
def test_regenerate_index(self):
self.im.regenerate(self.im.path)
pth = self.im.path
file_structure = [(x.parent.name, x.name) for x in pth.walk()]
expected = [(u'0-test-index', u'dummypackage'),
(u'dummypackage', u'index.html'),
(u'0-test-index', u'dummypackage-0.0dev.tar.gz'),
(u'0-test-index', u'index.html')]
assert file_structure == expected, "File structure does not match:\nexpected: %s.\n actual: %s" %(pprint(expected), pprint(file_structure))
| Python | 0 | |
554c6490330760690fbbd1cd5ece3da563e342eb | update queen4.py | python/queen4.py | python/queen4.py | f = lambda A, x, y: y < 0 or (not (A[y] in (A[x], A[x] + (x - y), A[x] - (x - y))))
g = lambda A, x, y: (not x) or (f(A, x, y) and ((y < 0) or g(A, x, y - 1)))
h = lambda A, x: sum([ g(A, x, x - 1) and 1 or 0 for A[x] in range(len(A)) ])
q = lambda A, x: h(A, x) if (x == 7) else sum([ q(A, x + 1) for A[x] in range(8) if g(A, x, x - 1) ])
print(q([ 0 for i in range(8) ], 0))
| Python | 0.000001 | |
7d2c728cb121a0aefef11fd3c8ab7b7f700516e8 | read grove pi sensors | readSensors.py | readSensors.py | import time
import decimal
import grovepi
import math
from grovepi import *
from grove_rgb_lcd import *
sound_sensor = 0 # port A0
light_sensor = 1 # port A1
temperature_sensor = 2 # port D2
led = 4 # port D3
lastTemp = 0.1 # initialize a floating point temp variable
lastHum = 0.1 # initialize a floating Point humidity variable
lastLight = 0.1
lastSound = 0.1
tooLow = 16.0 # Too low temp
justRight = 20.0 # OK temp
tooHigh = 23.0 # Too high temp
grovepi.pinMode(led,"OUTPUT")
grovepi.analogWrite(led,255) #turn led to max to show readiness
def calcColorAdj(variance): # Calc the adjustment value of the background color
"Because there is 6 degrees mapping to 255 values, 42.5 is the factor for 12 degree spread"
factor = 42.5;
adj = abs(int(factor * variance));
if adj > 255:
adj = 255;
return adj;
def calcBG(ftemp):
"This calculates the color value for the background"
variance = ftemp - justRight; # Calculate the variance
adj = calcColorAdj(variance); # Scale it to 8 bit int
bgList = [0,0,0] # initialize the color array
if(variance < 0):
bgR = 0; # too cold, no red
bgB = adj; # green and blue slide equally with adj
bgG = 255 - adj;
elif(variance == 0): # perfect, all on green
bgR = 0;
bgB = 0;
bgG = 255;
elif(variance > 0): #too hot - no blue
bgB = 0;
bgR = adj; # Red and Green slide equally with Adj
bgG = 255 - adj;
bgList = [bgR,bgG,bgB] #build list of color values to return
return bgList;
while True:
# Error handling in case of problems communicating with the GrovePi
try:
time.sleep(1)
light = grovepi.analogRead(light_sensor) / 10
sound = grovepi.analogRead(sound_sensor)
[t,h]=[0,0]
[t,h] = grovepi.dht(temperature_sensor,0)
grovepi.analogWrite(led,light*2)
if (h != lastHum) or (t != lastTemp) or (sound != lastSound) or (light != lastLight):
out_str ="Temperature:%d C; Humidity:%d %%; Light:%d; Sound:%d" %(t,h,light,sound)
print (out_str)
bgList = calcBG(t) # Calculate background colors
setRGB(bgList[0],bgList[1],bgList[2]) # parse our list into the color settings
out_str ="Tmp:%d Hum:%d\nLght:%d Snd:%d" %(t,h,light,sound)
setText(out_str)
lastHum = h
lastTemp = t
lastSound = sound
lastLight = light
except IOError:
print("IO Error")
except KeyboardInterrupt:
print("EXITNG")
setRGB(0,0,0)
grovepi.analogWrite(led,0)
exit()
except Exception as e:
print("Error: {}".format(e))
| Python | 0.000001 | |
076fcbb4876bd76887f7d64b533fec66f8366b70 | Add tests for cancellation | openprocurement/tender/esco/tests/cancellation.py | openprocurement/tender/esco/tests/cancellation.py | # -*- coding: utf-8 -*-
import unittest
from openprocurement.api.tests.base import snitch
from openprocurement.tender.belowthreshold.tests.cancellation import (
TenderCancellationResourceTestMixin,
TenderCancellationDocumentResourceTestMixin
)
from openprocurement.tender.belowthreshold.tests.cancellation_blanks import (
# TenderLotsCancellationResourceTest
create_tender_lots_cancellation,
patch_tender_lots_cancellation,
# TenderLotCancellationResourceTest
create_tender_lot_cancellation,
patch_tender_lot_cancellation,
)
from openprocurement.tender.openua.tests.cancellation_blanks import (
# TenderCancellationResourceTest
create_tender_cancellation,
patch_tender_cancellation,
)
from openprocurement.tender.esco.tests.base import (
BaseESCOEUContentWebTest,
test_bids,
test_lots
)
class TenderCancellationResourceTest(BaseESCOEUContentWebTest, TenderCancellationResourceTestMixin):
initial_auth = ('Basic', ('broker', ''))
test_create_tender_cancellation = snitch(create_tender_cancellation)
test_patch_tender_cancellation = snitch(patch_tender_cancellation)
class TenderLotCancellationResourceTest(BaseESCOEUContentWebTest):
initial_lots = test_lots
initial_auth = ('Basic', ('broker', ''))
test_create_tender_cancellation = snitch(create_tender_lot_cancellation)
test_patch_tender_cancellation = snitch(patch_tender_lot_cancellation)
class TenderLotsCancellationResourceTest(BaseESCOEUContentWebTest):
initial_lots = 2 * test_lots
initial_auth = ('Basic', ('broker', ''))
test_create_tender_cancellation = snitch(create_tender_lots_cancellation)
test_patch_tender_cancellation = snitch(patch_tender_lots_cancellation)
class TenderCancellationDocumentResourceTest(BaseESCOEUContentWebTest, TenderCancellationDocumentResourceTestMixin):
initial_auth = ('Basic', ('broker', ''))
def setUp(self):
super(TenderCancellationDocumentResourceTest, self).setUp()
# Create cancellation
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(
self.tender_id, self.tender_token), {'data': {'reason': 'cancellation reason'}})
cancellation = response.json['data']
self.cancellation_id = cancellation['id']
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TenderCancellationDocumentResourceTest))
suite.addTest(unittest.makeSuite(TenderCancellationResourceTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| Python | 0 | |
77c582939734866eee09b55e9db02437b42c5451 | Create stemming.py | stemming.py | stemming.py | # -*- coding: utf-8 -*-
# Портирован с Java по мотивам http://www.algorithmist.ru/2010/12/porter-stemmer-russian.html
import re
class Porter:
PERFECTIVEGROUND = re.compile(u"((ив|ивши|ившись|ыв|ывши|ывшись)|((?<=[ая])(в|вши|вшись)))$")
REFLEXIVE = re.compile(u"(с[яь])$")
ADJECTIVE = re.compile(u"(ее|ие|ые|ое|ими|ыми|ей|ий|ый|ой|ем|им|ым|ом|его|ого|ему|ому|их|ых|ую|юю|ая|яя|ою|ею)$")
PARTICIPLE = re.compile(u"((ивш|ывш|ующ)|((?<=[ая])(ем|нн|вш|ющ|щ)))$")
VERB = re.compile(u"((ила|ыла|ена|ейте|уйте|ите|или|ыли|ей|уй|ил|ыл|им|ым|ен|ило|ыло|ено|ят|ует|уют|ит|ыт|ены|ить|ыть|ишь|ую|ю)|((?<=[ая])(ла|на|ете|йте|ли|й|л|ем|н|ло|но|ет|ют|ны|ть|ешь|нно)))$")
NOUN = re.compile(u"(а|ев|ов|ие|ье|е|иями|ями|ами|еи|ии|и|ией|ей|ой|ий|й|иям|ям|ием|ем|ам|ом|о|у|ах|иях|ях|ы|ь|ию|ью|ю|ия|ья|я)$")
RVRE = re.compile(u"^(.*?[аеиоуыэюя])(.*)$")
DERIVATIONAL = re.compile(u".*[^аеиоуыэюя]+[аеиоуыэюя].*ость?$")
DER = re.compile(u"ость?$")
SUPERLATIVE = re.compile(u"(ейше|ейш)$")
I = re.compile(u"и$")
P = re.compile(u"ь$")
NN = re.compile(u"нн$")
def stem(word):
word = word.lower()
word = word.replace(u'ё', u'е')
m = re.match(Porter.RVRE, word)
if m.groups():
pre = m.group(1)
rv = m.group(2)
temp = Porter.PERFECTIVEGROUND.sub('', rv, 1)
if temp == rv:
rv = Porter.REFLEXIVE.sub('', rv, 1)
temp = Porter.ADJECTIVE.sub('', rv, 1)
if temp != rv:
rv = temp
rv = Porter.PARTICIPLE.sub('', rv, 1)
else:
temp = Porter.VERB.sub('', rv, 1)
if temp == rv:
rv = Porter.NOUN.sub('', rv, 1)
else:
rv = temp
else:
rv = temp
rv = Porter.I.sub('', rv, 1)
if re.match(Porter.DERIVATIONAL, rv):
rv = Porter.DER.sub('', rv, 1)
temp = Porter.P.sub('', rv, 1)
if temp == rv:
rv = Porter.SUPERLATIVE.sub('', rv, 1)
rv = Porter.NN.sub(u'н', rv, 1)
else:
rv = temp
word = pre+rv
return word
stem=staticmethod(stem)
if __name__ == '__main__':
print Porter.stem(u'устойчивость')
| Python | 0.000001 | |
abe586ac1275901fc9d9cf1bde05b225a9046ab7 | add admin tests | test/test_admin.py | test/test_admin.py | from werkzeug.exceptions import Unauthorized
from flask import url_for
from flask_login import current_user
from .conftest import logged_in, assert_logged_in, assert_not_logged_in, create_user
from app.user import random_string
from app.form.login import ERROR_ACCOUNT_DISABLED
USERNAME = 'cyberwehr87654321'
PASSWORD = random_string()
EMAIL = '{}@cyber.cyber'.format(USERNAME)
@create_user(username=USERNAME, password=PASSWORD)
@logged_in
def test_delete_user(db, client):
resp = client.post(url_for('delete_user', username=USERNAME), follow_redirects=True,
data=dict(confirm='confirm'))
resp = client.post(url_for('logout'), follow_redirects=True)
assert_not_logged_in(resp)
resp = client.post(url_for('login'), follow_redirects=True,
data=dict(username=USERNAME, password=PASSWORD))
assert_not_logged_in(resp, status_code=Unauthorized.code)
@logged_in
def test_create_user(db, client):
resp = client.post(url_for('create_user'), follow_redirects=True,
data=dict(username=USERNAME, password=PASSWORD,
email=EMAIL, active=True))
assert resp.status_code == 200
resp = client.post(url_for('logout'), follow_redirects=True)
assert_not_logged_in(resp)
resp = client.post(url_for('login'), follow_redirects=True,
data=dict(username=USERNAME, password=PASSWORD))
assert_logged_in(resp)
assert USERNAME == current_user.name
@create_user(username=USERNAME, password=PASSWORD)
@logged_in
def test_edit_user(db, client):
resp = client.post(url_for('edit_user', username=USERNAME), follow_redirects=True,
data=dict(username=USERNAME, email=EMAIL, password=PASSWORD))
assert resp.status_code == 200
resp = client.post(url_for('logout'), follow_redirects=True)
assert_not_logged_in(resp)
resp = client.post(url_for('login'), data={'username': USERNAME, 'password': PASSWORD})
assert_not_logged_in(resp, status_code=Unauthorized.code)
assert ERROR_ACCOUNT_DISABLED in resp.data.decode()
| Python | 0 | |
b63e65b1a41f809caf1c2dcd689955df76add20f | Add a plot just of backscatter phase vs. diameter. | test/test_delta.py | test/test_delta.py | import matplotlib.pyplot as plt
import numpy as np
import scattering
import scipy.constants as consts
def plot_csec(scatterer, d, var, name):
plt.plot(d / consts.centi, var,
label='%.1f cm' % (scatterer.wavelength / consts.centi))
plt.xlabel('Diameter (cm)')
plt.ylabel(name)
def plot_csecs(d, scatterers):
for s in scatterers:
plt.subplot(1,1,1)
plot_csec(s, d, np.rad2deg(np.unwrap(-np.angle(-s.S_bkwd[0,0].conj() *
s.S_bkwd[1,1]).squeeze())), 'delta')
plt.gca().set_ylim(-4, 20)
d = np.linspace(0.01, 0.7, 200).reshape(200, 1) * consts.centi
sband = 3e8 / 2.8e9
cband = 3e8 / 5.4e9
xband = 3e8 / 9.4e9
temp = 10.0
x_fixed = scattering.scatterer(xband, temp, 'water', diameters=d, shape='oblate')
x_fixed.set_scattering_model('tmatrix')
c_fixed = scattering.scatterer(cband, temp, 'water', diameters=d, shape='oblate')
c_fixed.set_scattering_model('tmatrix')
s_fixed = scattering.scatterer(sband, temp, 'water', diameters=d, shape='oblate')
s_fixed.set_scattering_model('tmatrix')
plot_csecs(d, [x_fixed, c_fixed, s_fixed])
plt.legend(loc = 'upper left')
plt.show()
| Python | 0 | |
e1d8c17746497a46c864f352823cd86b2216781c | Add commit ID milestone helper script (#7100) | docs/_bin/get-milestone-prs.py | docs/_bin/get-milestone-prs.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import requests
import subprocess
import sys
import time
if len(sys.argv) != 5:
sys.stderr.write('usage: program <github-username> <upstream-remote> <previous-release-branch> <current-release-branch>\n')
sys.stderr.write(" e.g., program myusername upstream 0.13.0-incubating 0.14.0-incubating")
sys.stderr.write(" It is also necessary to set a GIT_TOKEN environment variable containing a personal access token.")
sys.exit(1)
github_username = sys.argv[1]
upstream_remote = sys.argv[2]
previous_branch = sys.argv[3]
release_branch = sys.argv[4]
master_branch = "master"
upstream_master = "{}/{}".format(upstream_remote, master_branch)
upstream_previous = "{}/{}".format(upstream_remote, previous_branch)
upstream_release = "{}/{}".format(upstream_remote, release_branch)
command = "git log {}..{} --oneline | tail -1".format(upstream_master, upstream_previous)
# Find the commit where the previous release branch was cut from master
previous_branch_first_commit = subprocess.check_output(command, shell=True).decode('UTF-8')
match_result = re.match("(\w+) .*", previous_branch_first_commit)
previous_branch_first_commit = match_result.group(1)
print("Previous branch: {}, first commit: {}".format(upstream_previous, previous_branch_first_commit))
# Find all commits between that commit and the current release branch
command = "git rev-list {}..{}".format(previous_branch_first_commit, upstream_release)
all_release_commits = subprocess.check_output(command, shell=True).decode('UTF-8')
for commit_id in all_release_commits.splitlines():
try:
# wait 3 seconds between calls to avoid hitting the rate limit
time.sleep(3)
search_url = "https://api.github.com/search/issues?q=type:pr+is:merged+is:closed+repo:apache/incubator-druid+SHA:{}"
resp = requests.get(search_url.format(commit_id), auth=(github_username, os.environ["GIT_TOKEN"]))
resp_json = resp.json()
milestone_found = False
closed_pr_nums = []
if (resp_json.get("items") is None):
print("Could not get PRs for commit ID {}, resp: {}".format(commit_id, resp_json))
continue
for pr in resp_json["items"]:
closed_pr_nums.append(pr["number"])
milestone = pr["milestone"]
if milestone is not None:
milestone_found = True
print("COMMIT: {}, PR#: {}, MILESTONE: {}".format(commit_id, pr["number"], pr["milestone"]["url"]))
if not milestone_found:
print("NO MILESTONE FOUND FOR COMMIT: {}, CLOSED PRs: {}".format(commit_id, closed_pr_nums))
except Exception as e:
print("Got exception for commitID: {} ex: {}".format(commit_id, e))
continue
| Python | 0 | |
55768b5133d8155b16e798a335cc0f46930aab12 | create my own .py for question 5 | totaljfb/Q5.py | totaljfb/Q5.py | #-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: Jason Zhang
#
# Created: 15/11/2017
# Copyright: (c) Jason Zhang 2017
# Licence: <your licence>
#-------------------------------------------------------------------------------
def main():
pass
if __name__ == '__main__':
main()
import re
#create a URL class
class URL:
def __init__(self, url_scheme, url_netloc, url_path, url_query_params, url_fragment):
self.scheme = url_scheme
self.netloc = url_netloc
self.path = url_path
self.query_params = url_query_params
self.fragment = url_fragment
def display_result(self):
print 'scheme: ' + self.scheme
print 'netloc: ' + self.netloc
print 'path: ' + self.path
print 'query_params: '+ self.query_params
print 'fragment: '+ self.fragment
#the parsing function to parse the url address
def url_parse(url):
regex = re.compile(r'''(
\w*) #scheme
:\/\/ #://, separator
(.*) #netloc
(\/.*) #path
\? #?, separator
(.*) #query_params
\# # #, separator
(.* #fragment
)''',re.VERBOSE)
result = regex.search(url)
#TODO: parse the query_params to get a dictionary
return URL(result.group(1),result.group(2),result.group(3),result.group(4),result.group(5))
url = raw_input("Enter an url address to parse: ")
test = url_parse(url)
test.display_result()
| Python | 0.000092 | |
3cb42b54fa8ed2cac6e05aa521a3a61a037a35ee | add rest cliant on python | rest/client.py | rest/client.py | # pip install requests
import requests
resp = requests.post("http://127.0.0.1:8008/api/v1/addrecord/3", json='{"id":"name"}')
print resp.status_code
print resp.text
resp = requests.get("http://127.0.0.1:8008/api/v1/getrecord/3")
print resp.status_code
print resp.json()
resp = requests.get("http://127.0.0.1:8008/api/v1/getrecord/4")
print resp.status_code
print resp.json() | Python | 0 | |
d0f6167cb7e95c17997bc42af6cd1766b1ac7864 | add related_name migration | paralapraca/migrations/0005_auto_20171204_1006.py | paralapraca/migrations/0005_auto_20171204_1006.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('paralapraca', '0004_contract_classes'),
]
operations = [
migrations.AlterField(
model_name='contract',
name='classes',
field=models.ManyToManyField(related_name='contract', to='core.Class'),
),
migrations.AlterField(
model_name='contract',
name='groups',
field=models.ManyToManyField(help_text='Groups created to enforce this contract restrictions in several other models', related_name='contract', verbose_name='groups', to='auth.Group', blank=True),
),
]
| Python | 0.000002 | |
e84d19bdc580f4d392f5b7abdc4eb8eb30919cf5 | add example: negative binomial maximum likelihood via newton's method | examples/negative_binomial_maxlike.py | examples/negative_binomial_maxlike.py | from __future__ import division, print_function
import autograd.numpy as np
import autograd.numpy.random as npr
from autograd.scipy.special import gammaln
from autograd import grad
import scipy.optimize
# The code in this example implements a method for finding a stationary point of
# the negative binomial likelihood via Newton's method, described here:
# https://en.wikipedia.org/wiki/Negative_binomial_distribution#Maximum_likelihood_estimation
def newton(f, x0):
# wrap scipy.optimize.newton with our automatic derivatives
return scipy.optimize.newton(f, x0, fprime=grad(f), fprime2=grad(grad(f)))
def negbin_loglike(r, p, x):
# the negative binomial log likelihood we want to maximize
return gammaln(r+x) - gammaln(r) - gammaln(x+1) + x*np.log(p) + r*np.log(1-p)
def fit_maxlike(x, r_guess):
assert np.var(x) > np.mean(x), "Likelihood-maximizing parameters don't exist!"
loglike = lambda r, p: np.sum(negbin_loglike(r, p, x))
p = lambda r: np.sum(x) / np.sum(r+x)
rprime = lambda r: grad(loglike)(r, p(r))
r = newton(rprime, r_guess)
return r, p(r)
def negbin_sample(r, p, size):
# a negative binomial is a gamma-compound-Poisson
return npr.poisson(npr.gamma(r, p/(1-p), size=size))
if __name__ == "__main__":
# generate data
npr.seed(0)
data = negbin_sample(r=5, p=0.5, size=1000)
# fit likelihood-extremizing parameters
r, p = fit_maxlike(data, r_guess=1)
print('Check that we are at a local stationary point:')
print(grad(lambda rp: np.sum(negbin_loglike(rp[0], rp[1], data)))((r, p)))
print('Fit parameters:')
print('r={r}, p={p}'.format(r=r, p=p))
# plot data and fit
import matplotlib.pyplot as plt
xm = data.max()
plt.figure()
plt.hist(data, bins=np.arange(xm+1)-0.5, normed=True, label='normed data counts')
plt.xlim(0,xm)
plt.plot(np.arange(xm), np.exp(negbin_loglike(r, p, np.arange(xm))), label='maxlike fit')
plt.xlabel('k')
plt.ylabel('p(k)')
plt.legend(loc='best')
plt.show()
| Python | 0.003656 | |
6d96e9d67e50d7806be175577968ec8fed8393d7 | Create libBase.py | test/libBase.py | test/libBase.py | # ./test/testCommon.py
''' There are some assumptions made by this unittest
the directory structure
+ ./
| files -> lib*.py
+----./local/*
| | files -> *.ini
| | files -> *.json
| | files ->*.csv
+----./log/*
| | files -> *.log
+----./test/*
| files -> test*.py
+----./test_input/*
| see ../local
+----./test_output/*
'''
import sys
sys.path.append('../')
import logging as log
import unittest
import libCommon as TEST
class TestFILL_IN_THE_BLANK(unittest.TestCase) :
def setUp(self) : pass
def testEnviron(self) :
log.debug(TEST.load_environ())
def FindFiles(self) :
log.debug(TEST.find_files('test*.py'))
log.debug(TEST.find_files('test_input/*'))
log.debug(TEST.find_files('test_input/'))
def testBuildArgs(self) :
expected = 'test102020'
results = TEST.build_arg('test',10,2020)
log.debug(results)
self.assertTrue( results == expected)
expected = "test102020{'something': 10}"
results = TEST.build_arg('test',10,2020,{'something' : 10})
log.debug(results)
self.assertTrue( results == expected)
def testBuidlPath(self) :
expected = 'test/10/2020'
results = TEST.build_path('test',10,2020)
log.debug(results)
self.assertTrue( results == expected)
expected = "test/10/2020/{'something': 10}"
results = TEST.build_path('test',10,2020,{'something' : 10})
log.debug(results)
self.assertTrue( results == expected)
def testBuildCommand(self) :
expected = 'test 10 2020'
results = TEST.build_command('test',10,2020)
log.debug(results)
self.assertTrue( results == expected)
expected = "test 10 2020 {'something': 10}"
results = TEST.build_command('test',10,2020,{'something' : 10})
log.debug(results)
self.assertTrue( results == expected)
def testJson(self) :
log.debug(TEST.pretty_print(TEST.load_json('test_input/json_test.json')))
def testConfig(self) :
log.debug(TEST.pretty_print(TEST.load_config('test_input/conf_test.ini')))
if __name__ == '__main__' :
log_file = TEST.build_arg(*sys.argv).replace('.py','') + '.log'
log_file = TEST.build_path('../log',log_file)
TEST.remove_file(log_file)
log.basicConfig(filename=log_file, format=TEST.LOG_FORMAT_TEST, level=log.DEBUG)
unittest.main()
| Python | 0.000001 | |
4172b35bfdd1b4b12592d81b07843ca2c902a732 | solved 45 | 045/eul045.py | 045/eul045.py | #! /usr/bin/python
from __future__ import print_function
from time import time
from math import sqrt
# Project Euler # 45
# Since an integer x is triangular if and only if 8x + 1 is a square
def is_triangle(x):
return sqrt(8 * x + 1).is_integer()
def is_pentagonal(x):
if x < 1:
return False
n = (sqrt(24 * x + 1) + 1) / 6
if n.is_integer():
return n
else:
return False
if __name__ == '__main__':
start = time()
# start one higher than H143
n = 144
while True:
Hn = n * (2 * n - 1)
if is_triangle(Hn) and is_pentagonal(Hn):
break
n += 1
print("TP and H value:", Hn, "in", time() - start, "seconds")
| Python | 0.999983 | |
b209c45fe32ee7b73bddff5419c1931a16da0bbd | Test file request.py | test/request.py | test/request.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib2
import threading
def worker():
urllib2.urlopen('http://localhost:8080').read()
if __name__ == '__main__':
for i in xrange(1024):
threading.Thread(target=worker).start()
print 'Partiti...'
| Python | 0.000003 | |
be544817908ba3f9377d24a61047496c3dbf4f7a | Add test | test_rlev_model.py | test_rlev_model.py | import os
import unittest
from click.testing import CliRunner
from rlev_model import cli
class TestCli(unittest.TestCase):
def test_cli(self):
runner = CliRunner()
sample_filename = os.path.join('data', 'sample-data.txt')
result = runner.invoke(cli, [sample_filename])
assert result.exit_code == 0
output_filename = os.path.join('data', 'sample-output.txt')
with open(output_filename) as fp:
expected_output = fp.read()
assert result.output == expected_output
if __name__ == '__main__':
unittest.main()
| Python | 0.000005 | |
63e28f211d49a5f5d7be011071e5e16595f8b881 | Create SixChannelReader.py | SixChannelReader.py | SixChannelReader.py | """
Interface between python and arduino for 6-channel data logger
Author: James Keaveney
18/05/2015
"""
import time
import csv
import serial
import sys
import cPickle as pickle
import numpy as np
import matplotlib.pyplot as plt
nchannels = 7 # number of total channels (time axis + ADC channels)
datalen = 200 # numbers in each array that serial.print does in arduino
class SerialDataLogger:
"""
class for interfacing with the Arduino Data Logger
The data logger runs on an Arduino DUE; the sketch is "SixChannelLogger.ino"
and should also be in this directory
"""
def __init__(self,recording_time=30,verbose=True):
self.recording_time = recording_time
self.verbose = verbose
self.time_axis = None
def get_data(self):
"""
Initialise serial port and listen for data until timeout.
Convert the bytestream into numpy arrays for each channel
Returns:
7 numpy arrays (1D) representing time and ADC channels 0-5
"""
# setup serial port - it's the native USB port so baudrate is irrelevant,
# the data is always transferred at full USB speed
ser = serial.Serial(
port='COM4',
baudrate=115200,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS,
timeout=self.recording_time # seconds - should be the same amount of time as the arduino will send data for + 1
)
#testing - repeat serial read to confirm data arrays are always predictable
#n_reps = 2
#for i in range(n_reps):
st = time.clock()
self._handshake(ser)
data = ser.read(2**27) # this number should be larger than the number of
# bytes that will actually be sent
ser.close() # close serial port
et = time.clock() - st
if self.verbose:
print 'Elapsed time reading data (s): ', et
#make string into list of strings, comma separated
data_list = data.split(',')
# remove new line characters (are any present?)
#data_list = filter(lambda a: a != '\n', data_list)
# make list of strings into 1D numpy array of floats (ignore last point as it's an empty string)
data_array = np.array([float(i) for i in data_list[:-1]])
if self.verbose:
print 'Length of array:', len(data_array)
# reshape array into 3D array
data_array_3d = data_array.reshape(-1,nchannels,datalen)
# then separate 1d arrays
self.time_axis = data_array_3d[0][0]
for i in range(1,len(data_array_3d)):
self.time_axis = np.append(self.time_axis, data_array_3d[i][0])
# convert time axis into ms, and zero the axis
self.time_axis = (self.time_axis - self.time_axis[0])/1e3
self.channel1 = data_array_3d[0][1]
for i in range(1,len(data_array_3d)):
self.channel1 = np.append(self.channel1, data_array_3d[i][1])
self.channel2 = data_array_3d[0][2]
for i in range(1,len(data_array_3d)):
self.channel2 = np.append(self.channel2, data_array_3d[i][2])
self.channel3 = data_array_3d[0][3]
for i in range(1,len(data_array_3d)):
self.channel3 = np.append(self.channel3, data_array_3d[i][3])
self.channel4 = data_array_3d[0][4]
for i in range(1,len(data_array_3d)):
self.channel4 = np.append(self.channel4, data_array_3d[i][4])
self.channel5 = data_array_3d[0][5]
for i in range(1,len(data_array_3d)):
self.channel5 = np.append(self.channel5, data_array_3d[i][5])
self.channel6 = data_array_3d[0][6]
for i in range(1,len(data_array_3d)):
self.channel6 = np.append(self.channel6, data_array_3d[i][6])
if self.verbose:
print 'Data acquisition complete.'
return self.time_axis,self.channel1,self.channel2,self.channel3, \
self.channel4,self.channel5,self.channel6
def _handshake(self,serialinst):
""" Send/receive pair of bytes to synchronize data gathering """
nbytes = serialinst.write('A') # can write anything here, just a single byte (any ASCII char)
if self.verbose:
print 'Wrote bytes to serial port: ', nbytes
#wait for byte to be received before returning
st = time.clock()
byte_back = serialinst.readline()
et = time.clock()
if self.verbose:
print 'Received handshake data from serial port: ',byte_back
print 'Time between send and receive: ',et-st
def save_data(self,filename):
""" Save generated data to pickle file for use later """
if self.time_axis is not None:
timestamp = time.strftime("-%Y-%m-%d-%H%M")
full_filename = filename + timestamp + '.pkl'
#check if the file already exists - either overwrite or append
if os.path.isfile(full_filename):
print '\n\n WARNING - FILE ALREADY EXISTS !!!'
if raw_input('Overwrite (y/n)?') in ['y','Y']:
pass
else:
full_filename = full_filename[:-4] + '_new.pkl'
with open(full_filename,'wb') as fileobj:
pickle.dump((self.time_axis,self.channel1,self.channel2,self.channel3, \
self.channel4,self.channel5,self.channel6), fileobj)
if self.verbose:
print 'Output saved'
else:
print 'No data to save yet'
def load_data(self,full_filename):
""" Load previously generated and pickled data and return it """
with open(full_filename,'rb') as fileobj:
self.time_axis,self.channel1,self.channel2,self.channel3, \
self.channel4,self.channel5,self.channel6 = pickle.load(fileobj)
return self.time_axis,self.channel1,self.channel2,self.channel3, \
self.channel4,self.channel5,self.channel6
def cleanup(self):
# delete serial port instance?
pass
def main():
""" Grab data once and save it to file, with current timestamp """
SR = SerialDataLogger(recording_time=6)
filename = "TestData"
t, C1, C2, C3, C4, C5, C6 = SR.get_data()
SR.save_data(filename)
if __name__ == '__main__':
main()
| Python | 0 | |
f211fc4b0467c2d12d0ee60caed4c76910684f65 | Create game.py | Source-Code/game.py | Source-Code/game.py | # ******************************************************************************
# Bingo
#
# @author: Elisha Lai
# @desciption: Program that allows a player to play Bingo
# @version: 1.3 12/03/2014
# ******************************************************************************
# negate_mini_bingo_card: (listof Int) (listof Int) -> (listof Int)
# Conditions:
# PRE: lst1 and lst2 must be non-empty lists.
# len(lst1) = 9
# The first three values in lst1 are between 1 and 15 inclusively.
# The next three values in lst1 are between 16 and 30 inclusively.
# The last three values in lst1 are between 31 and 45 inclusively.
# len(lst2) = 5
# The values in numbers_called are between 1 and 45 inclusively.
# POST: The length of the produced list is the same length as lst1
# Purpose: Consumes two lists of integers, lst1 and lst2. Produces a list of
# integers.
# Effects: Mutates lst1 so that all values in lst1, which are also in lst2, are
# negated.
def negate_mini_bingo_card(lst1,lst2,ind):
if ind<len(lst2):
if lst2[ind] in lst1:
lst1[lst1.index(lst2[ind])] = -lst2[ind]
negate_mini_bingo_card(lst1,lst2,ind+1)
else:
negate_mini_bingo_card(lst1,lst2,ind+1)
# is_row_negative: (listof Int) -> Bool
# Conditions:
# PRE: card must be a non-empty list.
# len(card) = 9
# No values in card are zero.
# Purpose: Consumes a list of integers, card. Produces True if any row of card
# contains three negated numbers. Otherwise, False is produced.
def is_row_negative (card):
if (card[0] < 0 and card[3] < 0 and card[6] < 0) or\
(card[1] < 0 and card[4] < 0 and card[7] < 0) or\
(card[2] < 0 and card[5] < 0 and card[8] < 0):
return True
else:
return False
# is_col_negative: (listof Int) -> Bool
# Conditions:
# PRE: card must be a non-empty list.
# len(card) = 9
# No values in card are zero.
# Purpose: Consumes a list of integers, card. Produces True if any column of
# card contains three negated numbers. Otherwise, False is produced.
def is_col_negative (card):
if (card[0] < 0 and card[1] < 0 and card[2] < 0) or\
(card[3] < 0 and card[4] < 0 and card[5] < 0) or\
(card[6] < 0 and card[7] < 0 and card[8] < 0):
return True
else:
return False
# is_diag_negative: (listof Int) -> Bool
# Conditions:
# PRE: card must be a non-empty list.
# len(card) = 9
# No values in card are zero.
# Purpose: Consumes a list of integers, card. Produces True if diagonal of
# card contains three negated numbers. Otherwise, False is produced.
def is_diag_negative (card):
if (card[0] < 0 and card[4] < 0 and card[8] < 0) or\
(card[2] < 0 and card[4] < 0 and card[6] < 0):
return True
else:
return False
# mini_bingo: (listof Int) (listof Int) -> Bool
# Conditions:
# PRE: mini_bingo_card and numbers_called must be non-empty lists.
# len(mini_bingo_card) = 9
# The first three values in lst1 are between 1 and 15 inclusively.
# The next three values in lst1 are between 16 and 30 inclusively.
# The last three values in lst1 are between 31 and 45 inclusively.
# len(numbers_called) = 5
# The values in numbers_called are between 1 and 45 inclusively.
# Purpose: Consumes two lists of integers, mini_bingo_card and numbers_called.
# Produces True if one or more rows, columns, or diagonals have all
# negative numbers. Otherwise, False is produced.
# Effects: Mutates mini_bingo_card so that all values in mini_bingo_card, which
# are also in numbers_called, are negated. Prints out the mutated
# mini_bingo_card only if one or more rows, columns, or diagonals have
# all negative numbers.
# Examples:
# mini_bingo([5,2,9,17,23,26,33,38,44],[5,10,23,31,44]) will print the
# following screen output:
# -5 17 33
# 2 -23 38
# 9 26 -44
# and True is produced.
# mini_bingo([5,2,9,17,23,26,33,38,44],[1,2,3,4,5]) will have no screen output
# and False is produced.
def mini_bingo(mini_bingo_card, numbers_called):
negate_mini_bingo_card(mini_bingo_card, numbers_called, 0)
if is_row_negative(mini_bingo_card) == True or\
is_col_negative(mini_bingo_card) == True or\
is_diag_negative(mini_bingo_card) == True:
print mini_bingo_card[0],mini_bingo_card[3],mini_bingo_card[6]
print mini_bingo_card[1],mini_bingo_card[4],mini_bingo_card[7]
print mini_bingo_card[2],mini_bingo_card[5],mini_bingo_card[8]
return True
else:
return False
| Python | 0 | |
da2a4fa9e618b212ddbb2fcbc079fa37970ae596 | Add handler for concurrently logging to a file | tfd/loggingutil.py | tfd/loggingutil.py |
'''
Utilities to assist with logging in python
'''
import logging
class ConcurrentFileHandler(logging.Handler):
"""
A handler class which writes logging records to a file. Every time it
writes a record it opens the file, writes to it, flushes the buffer, and
closes the file. Perhaps this could create problems in a very tight loop.
This handler is an attempt to overcome concurrent write issues that
the standard FileHandler has when multiple processes distributed across
a cluster are all writing to the same log file. Specifically, the records
can become interleaved/garbled with one another.
"""
def __init__(self, filename, mode="a"):
"""
Open the specified file and use it as the stream for logging.
:param mode: defaults to 'a', append.
"""
logging.Handler.__init__(self)
# keep the absolute path, otherwise derived classes which use this
# may come a cropper when the current directory changes
self.filename = os.path.abspath(filename)
self.mode = mode
def _openWriteClose(self, msg):
f = open(self.filename, self.mode)
f.write(msg)
f.flush() # improves consistency of writes in a concurrent environment
f.close()
def emit(self, record):
"""
Emit a record.
If a formatter is specified, it is used to format the record.
The record is then written to the stream with a trailing newline
[N.B. this may be removed depending on feedback]. If exception
information is present, it is formatted using
traceback.print_exception and appended to the stream.
"""
try:
msg = self.format(record)
fs = "%s\n"
self._openWriteClose(fs % msg)
except:
self.handleError(record)
| Python | 0 | |
ba599deb23c75a6dbcbc0de897afedc287c2ea94 | Create 02str_format.py | 02str_format.py | 02str_format.py | age = 38
name = 'Murphy Wan'
print('{0} is {1} yeaers old'.format(name, age))
print('why is {0} playing with that python?'.format(name))
| Python | 0.000019 | |
464f011b2a87d18ea3e8d16339898987cf190a72 | Task2_1 | test_example.py | test_example.py | import pytest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
#from selenium.webdriver.support import expected_conditions as EC
@pytest.fixture
def driver(request):
wd = webdriver.Safari()
print(wd.capabilities)
request.addfinalizer(wd.quit)
return wd
def test_example(driver):
driver.get("http://www.google.com/")
WebDriverWait(driver, 10)
# driver.find_element_by_name("q").send_keys("webdriver")
# driver.find_element_by_name("btnG").click()
# WebDriverWait(driver, 10).until(EC.title_is("webdriver - Поиск в Google"))
# Вопрос тренеру: тест отрабатывает c результатом "1 test passed", я вижу, что браузер Safari запускается,
# но по окончании окно браузера не закрывается, а показывает домашнюю страницу. Это ожидаемо? Или что-то не так? | Python | 0.999999 | |
700db5c742be8a893b1c362ae0955a934b88c39b | Add test_learning_journal.py with test_app() for configuring the app for testing | test_journal.py | test_journal.py | # -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
| Python | 0.000005 | |
59d51e90203a20f9e0b01eda43afc268311009e7 | Comment about JSON | firefox/src/py/extensionconnection.py | firefox/src/py/extensionconnection.py | # Copyright 2008-2009 WebDriver committers
# Copyright 2008-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Communication with the firefox extension."""
import logging
import socket
import time
try:
import json
except ImportError: # Python < 2.6
import simplejson as json
# Some old JSON libraries don't have "dumps", make sure we have a good one
if not hasattr(json, 'dumps'):
import simplejson as json
from selenium.remote.command import Command
from selenium.remote.remote_connection import RemoteConnection
_DEFAULT_TIMEOUT = 20
_DEFAULT_PORT = 7055
LOGGER = logging.getLogger("webdriver.ExtensionConnection")
class ExtensionConnection(RemoteConnection):
"""This class maintains a connection to the firefox extension.
"""
def __init__(self, timeout=_DEFAULT_TIMEOUT):
RemoteConnection.__init__(
self, "http://localhost:%d/hub" % _DEFAULT_PORT)
LOGGER.debug("extension connection initiated")
self.timeout = timeout
def quit(self, sessionId=None):
self.execute(Command.QUIT, {'sessionId':sessionId})
while self.is_connectable():
logging.info("waiting to quit")
time.sleep(1)
def connect(self):
"""Connects to the extension and retrieves the session id."""
return self.execute(Command.NEW_SESSION, {'desiredCapabilities':{
'browserName': 'firefox',
'platform': 'ANY',
'version': '',
'javascriptEnabled': True}})
def connect_and_quit(self):
"""Connects to an running browser and quit immediately."""
self._request('%s/extensions/firefox/quit' % self._url)
def is_connectable(self):
"""Trys to connect to the extension but do not retrieve context."""
try:
socket_ = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket_.settimeout(1)
socket_.connect(("localhost", _DEFAULT_PORT))
socket_.close()
return True
except socket.error:
return False
class ExtensionConnectionError(Exception):
"""An internal error occurred int the extension.
Might be caused by bad input or bugs in webdriver
"""
pass
| # Copyright 2008-2009 WebDriver committers
# Copyright 2008-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Communication with the firefox extension."""
import logging
import socket
import time
try:
import json
except ImportError: # Python < 2.6
import simplejson as json
# FIXME: What is this?
if not hasattr(json, 'dumps'):
import simplejson as json
from selenium.remote.command import Command
from selenium.remote.remote_connection import RemoteConnection
_DEFAULT_TIMEOUT = 20
_DEFAULT_PORT = 7055
LOGGER = logging.getLogger("webdriver.ExtensionConnection")
class ExtensionConnection(RemoteConnection):
"""This class maintains a connection to the firefox extension.
"""
def __init__(self, timeout=_DEFAULT_TIMEOUT):
RemoteConnection.__init__(
self, "http://localhost:%d/hub" % _DEFAULT_PORT)
LOGGER.debug("extension connection initiated")
self.timeout = timeout
def quit(self, sessionId=None):
self.execute(Command.QUIT, {'sessionId':sessionId})
while self.is_connectable():
logging.info("waiting to quit")
time.sleep(1)
def connect(self):
"""Connects to the extension and retrieves the session id."""
return self.execute(Command.NEW_SESSION, {'desiredCapabilities':{
'browserName': 'firefox',
'platform': 'ANY',
'version': '',
'javascriptEnabled': True}})
def connect_and_quit(self):
"""Connects to an running browser and quit immediately."""
self._request('%s/extensions/firefox/quit' % self._url)
def is_connectable(self):
"""Trys to connect to the extension but do not retrieve context."""
try:
socket_ = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket_.settimeout(1)
socket_.connect(("localhost", _DEFAULT_PORT))
socket_.close()
return True
except socket.error:
return False
class ExtensionConnectionError(Exception):
"""An internal error occurred int the extension.
Might be caused by bad input or bugs in webdriver
"""
pass
| Python | 0 |
1f3c1af308be68393ac8f7caab17d04cdd632d2b | Add the get_arguments function in include | survey_creation/include/get_arguments.py | survey_creation/include/get_arguments.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import getopt
"""
Short script to parse
the argments from the command line
"""
def get_arguments(argv):
"""
"""
country = None
year = None
try:
opts, args = getopt.getopt(argv, 'hc:y:', ['country=', 'year='])
except getopt.GetoptError:
print('run.py -c <country> -y <year>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('run.py -c <country> -y <year>')
sys.exit()
elif opt in ('-c', '--country'):
country = arg
elif opt in ('-y', '--year'):
year = arg
if country and year:
# folder_path = os.path.join(year, country)
return year, country
else:
print('Need a country and a year. Please use the following command:\n' +
'\trun.py -c <country> -y <year>')
sys.exit(2)
| Python | 0.000027 | |
0d1816fe41f0e380705fb156fd7b0cd175ad3552 | Add a command to import some initial candidate data | elections/ar_elections_2015/management/commands/ar_elections_2015_import_candidates.py | elections/ar_elections_2015/management/commands/ar_elections_2015_import_candidates.py | # -*- coding: utf-8 -*-
from datetime import date
import dateutil.parser
import json
from os.path import dirname, join
import re
import requests
from slumber.exceptions import HttpClientError
from django.conf import settings
from django.contrib.auth.models import User
from django.core.files.storage import FileSystemStorage
from django.core.management.base import BaseCommand, CommandError
from candidates.cache import get_post_cached
from candidates.election_specific import MAPIT_DATA, PARTY_DATA, AREA_POST_DATA
from candidates.models import PopItPerson
from candidates.popit import create_popit_api_object
from candidates.utils import strip_accents
from candidates.views.version_data import get_change_metadata
from moderation_queue.models import QueuedImage
UNKNOWN_PARTY_ID = 'unknown'
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 '
'(KHTML, like Gecko) Ubuntu Chromium/38.0.2125.111 '
'Chrome/38.0.2125.111Safari/537.36'
)
def get_post_data(api, json_election_id, json_election_id_to_name):
json_election_name = json_election_id_to_name[json_election_id]
ynr_election_id = {
u'Pre-candidatos a Presidente':
'presidentes-argentina-paso-2015',
u'Pre-candidatos a Gobernador de Buenos Aires':
'gobernadores-argentina-paso-2015',
u'Pre-candidatos a Gobernador de Tucumán':
'gobernadores-argentina-paso-2015',
u'Pre-candidatos a Gobernador de Entre Ríos':
'gobernadores-argentina-paso-2015',
u'Pre-Candidatos a Gobernador de San Juan':
'gobernadores-argentina-paso-2015',
}[json_election_name]
ynr_election_data = settings.ELECTIONS[ynr_election_id]
ynr_election_data['id'] = ynr_election_id
province = None
m = re.search(r'a Gobernador de (?P<province>.*)', json_election_name)
if m:
province = m.group('province')
mapit_areas_by_name = MAPIT_DATA.areas_by_name[('PRV', 1)]
mapit_area = mapit_areas_by_name[strip_accents(province).upper()]
post_id = AREA_POST_DATA.get_post_id(
ynr_election_id, mapit_area['type'], mapit_area['id']
)
else:
# It must be the presidential election:
post_id = 'presidente'
post_data = get_post_cached(api, post_id)['result']
return ynr_election_data, post_data
def enqueue_image(person, user, image_url):
r = requests.get(
image_url,
headers={
'User-Agent': USER_AGENT,
},
stream=True
)
if not r.status_code == 200:
message = "HTTP status code {0} when downloading {1}"
raise Exception, message.format(r.status_code, image_url)
storage = FileSystemStorage()
suggested_filename = \
'queued_image/{d.year}/{d.month:02x}/{d.day:02x}/ci-upload'.format(
d=date.today()
)
storage_filename = storage.save(suggested_filename, r.raw)
QueuedImage.objects.create(
why_allowed=QueuedImage.OTHER,
justification_for_use="Downloaded from {0}".format(image_url),
decision=QueuedImage.UNDECIDED,
image=storage_filename,
popit_person_id=person.id,
user=user
)
class Command(BaseCommand):
args = 'USERNAME-FOR-UPLOAD'
help = "Import inital candidate data"
def handle(self, username=None, **options):
if username is None:
message = "You must supply the name of a user to be associated with the image uploads."
raise CommandError(message)
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
message = "No user with the username '{0}' could be found"
raise CommandError(message.format(username))
api = create_popit_api_object()
json_filename = join(
dirname(__file__), '..', '..','data', 'candidates.json'
)
with open(json_filename) as f:
all_data = json.load(f)
# This map is needed to get getting YNR election data from
# the election ID used in the JSON file.
json_election_id_to_name = {
e['pk']: e['fields']['name']
for e in all_data if e['model'] == 'elections.election'
}
person_dict = {
e['pk']: e['fields']
for e in all_data if e['model'] == 'popolo.person'
}
candidate_list = [
dict(person_id=e['pk'], election_id=e['fields']['election'])
for e in all_data if e['model'] == 'elections.candidate'
]
for candidate in candidate_list:
vi_person_id = candidate['person_id']
person_data = person_dict[vi_person_id]
election_data, post_data = get_post_data(
api, candidate['election_id'], json_election_id_to_name
)
birth_date = None
if person_data['birth_date']:
birth_date = str(dateutil.parser.parse(
person_data['birth_date'], dayfirst=True
).date())
name = person_data['name']
gender = person_data['gender']
image_url = person_data['image']
# For the moment, assume the person doesn't exist:
person = PopItPerson()
person.name = name
person.gender = gender
if birth_date:
person.birth_date = str(birth_date)
standing_in_election = {
'post_id': post_data['id'],
'name': AREA_POST_DATA.shorten_post_label(
election_data['id'],
post_data['label'],
),
}
if 'area' in post_data:
standing_in_election['mapit_url'] = post_data['area']['identifier']
person.standing_in = {
election_data['id']: standing_in_election
}
person.party_memberships = {
election_data['id']: {
'id': UNKNOWN_PARTY_ID,
'name': PARTY_DATA.party_id_to_name[UNKNOWN_PARTY_ID],
}
}
change_metadata = get_change_metadata(
None,
'Imported candidate from JSON',
)
person.record_version(change_metadata)
try:
person.save_to_popit(api)
if image_url:
enqueue_image(person, user, image_url)
except HttpClientError as hce:
print "Got an HttpClientError:", hce.content
raise
| Python | 0.000002 | |
4276e1b991ea923a2a3bdd227bb3d98ced1fd4e2 | add alias function | thefuck/main.py | thefuck/main.py | from imp import load_source
from pathlib import Path
from os.path import expanduser
from subprocess import Popen, PIPE
import os
import sys
from psutil import Process, TimeoutExpired
import colorama
from . import logs, conf, types
def setup_user_dir():
"""Returns user config dir, create it when it doesn't exist."""
user_dir = Path(expanduser('~/.thefuck'))
rules_dir = user_dir.joinpath('rules')
if not rules_dir.is_dir():
rules_dir.mkdir(parents=True)
conf.initialize_settings_file(user_dir)
return user_dir
def load_rule(rule):
"""Imports rule module and returns it."""
rule_module = load_source(rule.name[:-3], str(rule))
return types.Rule(rule.name[:-3], rule_module.match,
rule_module.get_new_command,
getattr(rule_module, 'enabled_by_default', True))
def get_rules(user_dir, settings):
"""Returns all enabled rules."""
bundled = Path(__file__).parent \
.joinpath('rules') \
.glob('*.py')
user = user_dir.joinpath('rules').glob('*.py')
for rule in sorted(list(bundled)) + list(user):
if rule.name != '__init__.py':
loaded_rule = load_rule(rule)
if loaded_rule in settings.rules:
yield loaded_rule
def wait_output(settings, popen):
"""Returns `True` if we can get output of the command in the
`wait_command` time.
Command will be killed if it wasn't finished in the time.
"""
proc = Process(popen.pid)
try:
proc.wait(settings.wait_command)
return True
except TimeoutExpired:
for child in proc.get_children(recursive=True):
child.kill()
proc.kill()
return False
def get_command(settings, args):
"""Creates command from `args` and executes it."""
if sys.version_info[0] < 3:
script = ' '.join(arg.decode('utf-8') for arg in args[1:])
else:
script = ' '.join(args[1:])
if not script:
return
result = Popen(script, shell=True, stdout=PIPE, stderr=PIPE,
env=dict(os.environ, LANG='C'))
if wait_output(settings, result):
return types.Command(script, result.stdout.read().decode('utf-8'),
result.stderr.read().decode('utf-8'))
def get_matched_rule(command, rules, settings):
"""Returns first matched rule for command."""
for rule in rules:
try:
if rule.match(command, settings):
return rule
except Exception:
logs.rule_failed(rule, sys.exc_info(), settings)
def confirm(new_command, settings):
"""Returns `True` when running of new command confirmed."""
if not settings.require_confirmation:
logs.show_command(new_command, settings)
return True
logs.confirm_command(new_command, settings)
try:
sys.stdin.read(1)
return True
except KeyboardInterrupt:
logs.failed('Aborted', settings)
return False
def run_rule(rule, command, settings):
"""Runs command from rule for passed command."""
new_command = rule.get_new_command(command, settings)
if confirm(new_command, settings):
print(new_command)
def is_second_run(command):
"""Is it the second run of `fuck`?"""
return command.script.startswith('fuck')
def alias():
print("\nalias fuck='eval $(thefuck $(fc -ln -1))'\n")
def main():
colorama.init()
user_dir = setup_user_dir()
settings = conf.get_settings(user_dir)
command = get_command(settings, sys.argv)
if command:
if is_second_run(command):
logs.failed("Can't fuck twice", settings)
return
rules = get_rules(user_dir, settings)
matched_rule = get_matched_rule(command, rules, settings)
if matched_rule:
run_rule(matched_rule, command, settings)
return
logs.failed('No fuck given', settings)
| from imp import load_source
from pathlib import Path
from os.path import expanduser
from subprocess import Popen, PIPE
import os
import sys
from psutil import Process, TimeoutExpired
import colorama
from . import logs, conf, types
def setup_user_dir():
"""Returns user config dir, create it when it doesn't exist."""
user_dir = Path(expanduser('~/.thefuck'))
rules_dir = user_dir.joinpath('rules')
if not rules_dir.is_dir():
rules_dir.mkdir(parents=True)
conf.initialize_settings_file(user_dir)
return user_dir
def load_rule(rule):
"""Imports rule module and returns it."""
rule_module = load_source(rule.name[:-3], str(rule))
return types.Rule(rule.name[:-3], rule_module.match,
rule_module.get_new_command,
getattr(rule_module, 'enabled_by_default', True))
def get_rules(user_dir, settings):
"""Returns all enabled rules."""
bundled = Path(__file__).parent \
.joinpath('rules') \
.glob('*.py')
user = user_dir.joinpath('rules').glob('*.py')
for rule in sorted(list(bundled)) + list(user):
if rule.name != '__init__.py':
loaded_rule = load_rule(rule)
if loaded_rule in settings.rules:
yield loaded_rule
def wait_output(settings, popen):
"""Returns `True` if we can get output of the command in the
`wait_command` time.
Command will be killed if it wasn't finished in the time.
"""
proc = Process(popen.pid)
try:
proc.wait(settings.wait_command)
return True
except TimeoutExpired:
for child in proc.get_children(recursive=True):
child.kill()
proc.kill()
return False
def get_command(settings, args):
"""Creates command from `args` and executes it."""
if sys.version_info[0] < 3:
script = ' '.join(arg.decode('utf-8') for arg in args[1:])
else:
script = ' '.join(args[1:])
if not script:
return
result = Popen(script, shell=True, stdout=PIPE, stderr=PIPE,
env=dict(os.environ, LANG='C'))
if wait_output(settings, result):
return types.Command(script, result.stdout.read().decode('utf-8'),
result.stderr.read().decode('utf-8'))
def get_matched_rule(command, rules, settings):
"""Returns first matched rule for command."""
for rule in rules:
try:
if rule.match(command, settings):
return rule
except Exception:
logs.rule_failed(rule, sys.exc_info(), settings)
def confirm(new_command, settings):
"""Returns `True` when running of new command confirmed."""
if not settings.require_confirmation:
logs.show_command(new_command, settings)
return True
logs.confirm_command(new_command, settings)
try:
sys.stdin.read(1)
return True
except KeyboardInterrupt:
logs.failed('Aborted', settings)
return False
def run_rule(rule, command, settings):
"""Runs command from rule for passed command."""
new_command = rule.get_new_command(command, settings)
if confirm(new_command, settings):
print(new_command)
def is_second_run(command):
"""Is it the second run of `fuck`?"""
return command.script.startswith('fuck')
def main():
colorama.init()
user_dir = setup_user_dir()
settings = conf.get_settings(user_dir)
command = get_command(settings, sys.argv)
if command:
if is_second_run(command):
logs.failed("Can't fuck twice", settings)
return
rules = get_rules(user_dir, settings)
matched_rule = get_matched_rule(command, rules, settings)
if matched_rule:
run_rule(matched_rule, command, settings)
return
logs.failed('No fuck given', settings)
| Python | 0.000005 |
fc103544a7fcd8506e4d1612f70ff4b5d3eb6dfe | add command to set prices and commissions of teacher levels | server/app/management/commands/set_level_price.py | server/app/management/commands/set_level_price.py | from django.core.management.base import BaseCommand
from app.models import Region, Grade, Subject, Ability, Level, Price
class Command(BaseCommand):
help = "设置教师级别的价格和佣金比例\n" \
"例如: \n" \
" python manage.py set_level_price 郑州市 --percentages '20,20,20,20,20,20,20,20,20,20' --prices '2000,3000,4000,5000,6000,7000,8000,9000,10000,11000'"
def create_parser(self, prog_name, subcommand):
from argparse import RawTextHelpFormatter
parser = super(Command, self).create_parser(prog_name, subcommand)
parser.formatter_class = RawTextHelpFormatter
return parser
def add_arguments(self, parser):
parser.add_argument(
'region_name',
help='地区名称',
)
parser.add_argument(
'--open',
type=int,
default=1,
help='是否设置开发此地区. 0[默认] or 1',
)
parser.add_argument(
'--prices',
required=True,
help='价格数字串, 英文逗号分隔\n单位是分',
)
parser.add_argument(
'--percentages',
required=True,
help='佣金比例数字串, 引文逗号分隔\n每个数在0-100之间',
)
def handle(self, *args, **options):
# print(args)
# print(options)
region_name = options.get('region_name')
is_open = options.get('open') and True or False
prices = options.get('prices')
percentages = options.get('percentages')
price_cfg = [int(p) for p in prices.split(',')]
commission_percentages = [int(p) for p in percentages.split(',')]
if len(price_cfg) != len(commission_percentages):
print("价格和佣金比例个数不同")
return -1
levels = list(Level.objects.all())
if len(levels) != len(price_cfg):
print("价格和佣金比例个数和现有级别数不同")
return -2
region = Region.objects.get(name=region_name)
if is_open != region.opened:
region.opened = is_open
region.save()
abilities = Ability.objects.all()
for level in levels:
# print(" {level_name}".format(level_name=level.name))
i = level.id - 1
for ability in abilities:
c = price_cfg[i]
price, _ = Price.objects.get_or_create(region=region, level=level, ability=ability,
defaults={'price': c})
price.price = c
price.commission_percentage = commission_percentages[i]
price.save()
print('设置完毕')
return 0
| Python | 0 | |
d7945d85dcce968d6430e079662b1ef9fc464c97 | update ukvi org branding spelling | migrations/versions/0047_ukvi_spelling.py | migrations/versions/0047_ukvi_spelling.py | """empty message
Revision ID: 0047_ukvi_spelling
Revises: 0046_organisations_and_branding
Create Date: 2016-08-22 16:06:32.981723
"""
# revision identifiers, used by Alembic.
revision = '0047_ukvi_spelling'
down_revision = '0046_organisations_and_branding'
from alembic import op
def upgrade():
op.execute("""
UPDATE organisation
SET name = 'UK Visas & Immigration'
WHERE id = '9d25d02d-2915-4e98-874b-974e123e8536'
""")
def downgrade():
op.execute("""
UPDATE organisation
SET name = 'UK Visas and Immigration'
WHERE id = '9d25d02d-2915-4e98-874b-974e123e8536'
""")
| Python | 0 | |
a2463270e6850b0e7df210c03946bfba449f29d7 | Add a test for simultaneous device access | test/parallel_test.py | test/parallel_test.py | """
mbed CMSIS-DAP debugger
Copyright (c) 2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
from pyOCD.board import MbedBoard
from pyOCD.pyDAPAccess import DAPAccess
import threading
import multiprocessing
def run_in_parallel(function, args_list):
"""Create and run a thread in parallel for each element in args_list
Wait until all threads finish executing. Throw an exception if an exception
occurred on any of the threads.
"""
def _thread_helper(idx, func, args):
"""Run the function and set result to True if there was not error"""
func(*args)
result_list[idx] = True
result_list = [False] * len(args_list)
thread_list = []
for idx, args in enumerate(args_list):
thread = threading.Thread(target=_thread_helper,
args=(idx, function, args))
thread.start()
thread_list.append(thread)
for thread in thread_list:
thread.join()
for result in result_list:
if result is not True:
raise Exception("Running in thread failed")
def run_in_processes(function, args_list):
"""Create and run a processes in parallel for each element in args_list
Wait until all processes finish executing. Throw an exception if an
exception occurred on any of the processes.
"""
process_list = []
for args in args_list:
process = multiprocessing.Process(target=function, args=args)
process.start()
process_list.append(process)
error_during_run = False
for process in process_list:
process.join()
if process.exitcode != 0:
error_during_run = True
if error_during_run:
raise Exception("Running in process failed")
def list_boards(id_list):
"""List all connected DAPLink boards repeatedly
Assert that they are the same as the id list passed in.
"""
for _ in range(0, 20):
device_list = DAPAccess.get_connected_devices()
found_id_list = [device.get_unique_id() for device in device_list]
found_id_list.sort()
assert id_list == found_id_list, "Expected %s, got %s" % \
(id_list, found_id_list)
def search_and_lock(board_id):
"""Repeatedly lock a board with the given ID"""
for _ in range(0, 20):
device = DAPAccess.get_device(board_id)
device.open()
device.close()
with MbedBoard.chooseBoard(board_id=board_id):
pass
def open_already_opened(board_id):
"""Open a device that is already open to verify it gives an error"""
device = DAPAccess.get_device(board_id)
try:
device.open()
assert False
except DAPAccess.DeviceError:
pass
def parallel_test():
"""Test that devices can be found and opened in parallel"""
device_list = DAPAccess.get_connected_devices()
id_list = [device.get_unique_id() for device in device_list]
id_list.sort()
if len(id_list) < 2:
print("Need at least 2 boards to run the parallel test")
exit(-1)
# Goal of this file is to test that:
# -The process of listing available boards does not interfere
# with other processes enumerating, opening, or using boards
# -Opening and using a board does not interfere with another process
# processes which is enumerating, opening, or using boards as
# long as that is not the current board
print("Listing board from multiple threads at the same time")
args_list = [(id_list,) for _ in range(5)]
run_in_parallel(list_boards, args_list)
print("Listing board from multiple processes at the same time")
run_in_processes(list_boards, args_list)
print("Opening same board from multiple threads at the same time")
device = DAPAccess.get_device(id_list[0])
device.open()
open_already_opened(id_list[0])
args_list = [(id_list[0],) for _ in range(5)]
run_in_parallel(open_already_opened, args_list)
device.close()
print("Opening same board from multiple processes at the same time")
device = DAPAccess.get_device(id_list[0])
device.open()
open_already_opened(id_list[0])
args_list = [(id_list[0],) for _ in range(5)]
run_in_processes(open_already_opened, args_list)
device.close()
print("Opening different boards from different threads")
args_list = [(board_id,) for board_id in id_list]
run_in_parallel(search_and_lock, args_list)
print("Opening different boards from different processes")
run_in_processes(search_and_lock, args_list)
print("Test passed")
if __name__ == "__main__":
parallel_test()
| Python | 0 | |
be3acc4a869c9e45e4d1fdd563571da0d12ae85f | Add modify code Hello World code | HelloWorld.py | HelloWorld.py | print("HelloWorld")
text="HelloWorld_Text"
print(text) | Python | 0.000857 | |
f2864289f02a6e221d6fafbb1885d20aa26417fd | Add default conftest | test/unit/conftest.py | test/unit/conftest.py | # :coding: utf-8
import os
import shutil
import tempfile
import uuid
import pytest
@pytest.fixture()
def unique_name():
"""Return a unique name."""
return "unique-{0}".format(uuid.uuid4())
@pytest.fixture()
def temporary_file(request):
"""Return a temporary file path."""
file_handle, path = tempfile.mkstemp()
os.close(file_handle)
def cleanup():
"""Remove temporary file."""
try:
os.remove(path)
except OSError:
pass
request.addfinalizer(cleanup)
return path
@pytest.fixture()
def temporary_directory(request):
"""Return a temporary directory path."""
path = tempfile.mkdtemp()
def cleanup():
"""Remove temporary directory."""
shutil.rmtree(path)
request.addfinalizer(cleanup)
return path
@pytest.fixture()
def code_folder():
return os.path.join(os.path.dirname(__file__), "example")
@pytest.fixture()
def docs_folder(temporary_directory):
path = os.path.join(temporary_directory, "docs")
os.makedirs(path)
# Create minimal conf.py file
conf_file = os.path.join(path, "conf.py")
with open(conf_file, "w") as f:
f.write(
"# :coding: utf-8\n"
"extensions=['sphinxcontrib.es6']\n"
"source_suffix = '.rst'\n"
"master_doc = 'index'\n"
"author = u'Jeremy Retailleau'\n"
"exclude_patterns = ['Thumbs.db', '.DS_Store']\n"
"js_source='./example'"
)
return path
| Python | 0.000001 | |
1dba0fb24f98bb09bd0c918439c0457c603e1386 | Create ullman.py | ullman.py | ullman.py | import requests
from multiprocessing import Pool
import time
startURL = 'http://i.stanford.edu/~ullman/focs/ch'
extension = '.pdf'
savePath = '' #enter the path for the pdfs to be stored on your system
chapters = range(1,15) #chapters 1-14
def chapterStringManipulate(chapter):
if chapter < 10 :
download('0{}'.format(chapter))
else :
download('{}'.format(chapter))
return None
def download(chapter):
url = '{}{}{}'.format(startURL, chapter, extension)
r = requests.get(url, stream=True)
path = '{}{}{}'.format(savePath, chapter, extension)
with open(path, 'wb') as fd:
for chunk in r.iter_content(2048):
fd.write(chunk)
print '{} downloaded'.format(chapter)
return None
if __name__ == '__main__':
pool = Pool(processes=len(chapters))
results = pool.map(chapterStringManipulate, chapters)
| Python | 0.000001 | |
9d3925c4809791d2366bc1d6fd6b04bc8a710c9b | add fmt to repo | toolshed/fmt.py | toolshed/fmt.py | import re
def fmt2header(fmt):
"""
Turn a python format string into a usable header:
>>> fmt = "{chrom}\t{start:d}\t{end:d}\t{pvalue:.4g}"
>>> fmt2header(fmt)
'chrom start end pvalue'
>>> fmt.format(chrom='chr1', start=1234, end=3456, pvalue=0.01232432)
'chr1 1234 3456 0.01232'
"""
return re.sub("{|(?:\:.+?)?}", "", fmt)
if __name__ == "__main__":
import doctest
print(doctest.testmod())
| Python | 0 | |
dd9b683b24cea02c93a6e23a163065c0f26f6a68 | Test manager | tests/test.manager.py | tests/test.manager.py | import opensim
model_path = "../osim/models/arm2dof6musc.osim"
model = opensim.Model(model_path)
model.setUseVisualizer(True)
state = model.initSystem()
manager = opensim.Manager(model)
muscleSet = model.getMuscles()
stepsize = 0.01
for i in range(10):
for j in range(muscleSet.getSize()):
# muscleSet.get(j).setActivation(state, 1.0)
muscleSet.get(j).setExcitation(state, 1.0)
t = state.getTime()
manager.setInitialTime(stepsize * i)
manager.setFinalTime(stepsize * (i + 1))
manager.integrate(state)
model.realizeDynamics(state)
print("%f %f" % (t,muscleSet.get(0).getActivation(state)))
| Python | 0.000003 | |
5d18f7c7145bf8d5e7248392d644e521222929b8 | add tests for _extras | tests/test__extras.py | tests/test__extras.py | """Tests for ``_extras`` module."""
import pytest
from rstcheck import _compat, _extras
class TestInstallChecker:
"""Test ``is_installed_with_supported_version``."""
@staticmethod
@pytest.mark.skipif(_extras.SPHINX_INSTALLED, reason="Test for absence of sphinx.")
def test_false_on_missing_sphinx_package() -> None:
"""Test install-checker returns ``False`` when ``sphinx`` is missing."""
result = _extras.is_installed_with_supported_version("sphinx")
assert result is False
@staticmethod
@pytest.mark.skipif(not _extras.SPHINX_INSTALLED, reason="Test for presence of sphinx.")
def test_true_on_installed_sphinx_package() -> None:
"""Test install-checker returns ``True`` when ``sphinx`` is installed with good version."""
result = _extras.is_installed_with_supported_version("sphinx")
assert result is True
@staticmethod
@pytest.mark.skipif(not _extras.SPHINX_INSTALLED, reason="Test for presence of sphinx.")
def test_false_on_installed_sphinx_package_too_old(monkeypatch: pytest.MonkeyPatch) -> None:
"""Test install-checker returns ``False`` when ``sphinx`` is installed with bad version."""
monkeypatch.setattr(_compat, "metadata", lambda _: {"Version": "0.0"})
result = _extras.is_installed_with_supported_version("sphinx")
assert result is False
class TestInstallGuard:
"""Test ``install_guard``."""
@staticmethod
@pytest.mark.skipif(_extras.SPHINX_INSTALLED, reason="Test for absence of sphinx.")
def test_false_on_missing_sphinx_package() -> None:
"""Test install-guard raises exception when ``sphinx`` is missing."""
with pytest.raises(ModuleNotFoundError):
_extras.install_guard("sphinx") # act
@staticmethod
@pytest.mark.skipif(not _extras.SPHINX_INSTALLED, reason="Test for presence of sphinx.")
def test_true_on_installed_sphinx_package() -> None:
"""Test install-guard doesn't raise when ``sphinx`` is installed."""
_extras.install_guard("sphinx") # act
| Python | 0.000001 | |
eb1c7d1c2bfaa063c98612d64bbe35dedf217143 | Add initial tests for alerter class | tests/test_alerter.py | tests/test_alerter.py | import unittest
import datetime
import Alerters.alerter
class TestAlerter(unittest.TestCase):
def test_groups(self):
config_options = {'groups': 'a,b,c'}
a = Alerters.alerter.Alerter(config_options)
self.assertEqual(['a', 'b', 'c'], a.groups)
def test_times_always(self):
config_options = {'times_type': 'always'}
a = Alerters.alerter.Alerter(config_options)
self.assertEqual(a.times_type, 'always')
self.assertEqual(a.time_info, [None, None])
def test_times_only(self):
config_options = {
'times_type': 'only',
'time_lower': '10:00',
'time_upper': '11:00'
}
a = Alerters.alerter.Alerter(config_options)
self.assertEqual(a.times_type, 'only')
self.assertEqual(a.time_info, [
datetime.time(10, 00), datetime.time(11, 00)
])
| Python | 0 | |
33658163b909073aae074b5b2cdae40a0e5c44e8 | Add unit tests for asyncio coroutines | tests/test_asyncio.py | tests/test_asyncio.py | from trollius import test_utils
from trollius import From, Return
import trollius
import unittest
try:
import asyncio
except ImportError:
from trollius.test_utils import SkipTest
raise SkipTest('need asyncio')
# "yield from" syntax cannot be used directly, because Python 2 should be able
# to execute this file (to raise SkipTest)
code = '''
@asyncio.coroutine
def asyncio_noop(value):
yield from []
return (value,)
@asyncio.coroutine
def asyncio_coroutine(coro, value):
res = yield from coro
return res + (value,)
'''
exec(code)
@trollius.coroutine
def trollius_noop(value):
yield From(None)
raise Return((value,))
@trollius.coroutine
def trollius_coroutine(coro, value):
res = yield trollius.From(coro)
raise trollius.Return(res + (value,))
class AsyncioTests(test_utils.TestCase):
def setUp(self):
policy = trollius.get_event_loop_policy()
self.loop = policy.new_event_loop()
self.set_event_loop(self.loop)
asyncio_policy = asyncio.get_event_loop_policy()
self.addCleanup(asyncio.set_event_loop_policy, asyncio_policy)
asyncio.set_event_loop_policy(policy)
def test_policy(self):
trollius.set_event_loop(self.loop)
self.assertIs(asyncio.get_event_loop(), self.loop)
def test_asyncio(self):
coro = asyncio_noop("asyncio")
res = self.loop.run_until_complete(coro)
self.assertEqual(res, ("asyncio",))
def test_asyncio_in_trollius(self):
coro1 = asyncio_noop(1)
coro2 = asyncio_coroutine(coro1, 2)
res = self.loop.run_until_complete(trollius_coroutine(coro2, 3))
self.assertEqual(res, (1, 2, 3))
def test_trollius_in_asyncio(self):
coro1 = trollius_noop(4)
coro2 = trollius_coroutine(coro1, 5)
res = self.loop.run_until_complete(asyncio_coroutine(coro2, 6))
self.assertEqual(res, (4, 5, 6))
if __name__ == '__main__':
unittest.main()
| Python | 0.000001 | |
72ff3bfcbfb9e4144d43ca03c77f0692cccd0fc2 | add small interface for DHT Adafruit lib (in rpi.py) | gsensors/rpi.py | gsensors/rpi.py | #-*- coding:utf-8 -*-
""" Drivers for common sensors on a rPi
"""
import sys
import gevent
from gsensors import AutoUpdateValue
class DHTTemp(AutoUpdateValue):
def __init__(self, pin, stype="2302", name=None):
update_freq = 10 #seconds
super(DHTTemp, self).__init__(name=name, unit="°C", update_freq=update_freq)
import Adafruit_DHT
self.Adafruit_DHT = Adafruit_DHT #XXX:mv dans un module a part pour éviter import merdique ici
TYPES = {
'11': Adafruit_DHT.DHT11,
'22': Adafruit_DHT.DHT22,
'2302': Adafruit_DHT.AM2302
}
self.sensor = TYPES.get(stype, stype) #TODO: check stype
self.pin = pin
def update(self):
humidity, temperature = self.Adafruit_DHT.read_retry(self.sensor, self.pin)
self.value = temperature
def main():
sources = [
DHTTemp(18, "22"),
]
def change_callback(src):
print("%s: %s %s" % (src.name, src.value, src.unit))
# plug change callback
for src in sources:
src.on_change(change_callback)
for src in sources:
src.start()
gevent.wait()
if __name__ == '__main__':
sys.exit(main())
| Python | 0 | |
3bc341036730ab8e9fd5ac61e10556af028813e2 | Add migration -Remove dupes -Add index preventing dupe creation | osf/migrations/0044_basefilenode_uniqueness_index.py | osf/migrations/0044_basefilenode_uniqueness_index.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from django.db import connection
from django.db import migrations
logger = logging.getLogger(__name__)
def remove_duplicate_filenodes(*args):
from osf.models.files import BaseFileNode
sql = """
SELECT id
FROM (SELECT
*,
LEAD(row, 1)
OVER () AS nextrow
FROM (SELECT
*,
ROW_NUMBER()
OVER (w) AS row
FROM (SELECT *
FROM osf_basefilenode
WHERE (node_id IS NULL OR name IS NULL OR parent_id IS NULL OR type IS NULL OR _path IS NULL) AND
type NOT IN ('osf.trashedfilenode', 'osf.trashedfile', 'osf.trashedfolder')) AS null_files
WINDOW w AS (
PARTITION BY node_id, name, parent_id, type, _path
ORDER BY id )) AS x) AS y
WHERE row > 1 OR nextrow > 1;
"""
visited = []
with connection.cursor() as cursor:
cursor.execute(sql)
dupes = BaseFileNode.objects.filter(id__in=[t[0] for t in cursor.fetchall()])
logger.info('\nFound {} dupes, merging and removing'.format(dupes.count()))
for dupe in dupes:
visited.append(dupe.id)
force = False
next_dupe = dupes.exclude(id__in=visited).filter(node_id=dupe.node_id, name=dupe.name, parent_id=dupe.parent_id, type=dupe.type, _path=dupe._path).first()
if dupe.node_id is None:
# Bad data, force-delete
force = True
if not next_dupe:
# Last one, don't delete
continue
if dupe.versions.count() > 1:
logger.warn('{} Expected 0 or 1 versions, got {}'.format(dupe.id, dupe.versions.count()))
# Don't modify versioned files
continue
for guid in list(dupe.guids.all()):
guid.referent = next_dupe
guid.save()
if force:
BaseFileNode.objects.filter(id=dupe.id).delete()
else:
dupe.delete()
with connection.cursor() as cursor:
logger.info('Validating clean-up success...')
cursor.execute(sql)
dupes = BaseFileNode.objects.filter(id__in=cursor.fetchall())
if dupes.exists():
logger.error('Dupes exist after migration, failing\n{}'.format(dupes.values_list('id', flat=True)))
logger.info('Indexing...')
def noop(*args):
pass
class Migration(migrations.Migration):
dependencies = [
('osf', '0043_set_share_title'),
]
operations = [
migrations.RunPython(remove_duplicate_filenodes, noop),
migrations.RunSQL(
[
"""
CREATE UNIQUE INDEX active_file_node_path_name_type_unique_index
ON public.osf_basefilenode (node_id, _path, name, type)
WHERE (type NOT IN ('osf.trashedfilenode', 'osf.trashedfile', 'osf.trashedfolder')
AND parent_id IS NULL);
"""
], [
"""
DROP INDEX IF EXISTS active_file_node_path_name_type_unique_index RESTRICT;
"""
]
)
]
| Python | 0 | |
e322daa6d92a9dad8db9b8c9b6085aded90bef39 | add beta release script | scripts/release_beta.py | scripts/release_beta.py |
import argparse
import subprocess
parser = argparse.ArgumentParser(description='Release Mixpanel Objective-C SDK')
parser.add_argument('--old', help='old version number', action="store")
parser.add_argument('--new', help='new version number for the release', action="store")
args = parser.parse_args()
def bump_version():
replace_version('Mixpanel.podspec', args.old, args.new)
replace_version('Mixpanel/Mixpanel.m', args.old, args.new)
subprocess.call('git add Mixpanel.podspec', shell=True)
subprocess.call('git add Mixpanel/Mixpanel.m', shell=True)
subprocess.call('git commit -m "Version {}"'.format(args.new), shell=True)
subprocess.call('git push', shell=True)
def replace_version(file_name, old_version, new_version):
with open(file_name) as f:
file_str = f.read()
assert(old_version in file_str)
file_str = file_str.replace(old_version, new_version)
with open(file_name, "w") as f:
f.write(file_str)
def add_tag():
subprocess.call('git tag -a v{} -m "version {}"'.format(args.new, args.new), shell=True)
subprocess.call('git push origin --tags', shell=True)
def main():
bump_version()
add_tag()
print("Congratulations, done!")
if __name__ == '__main__':
main() | Python | 0 | |
c783c26c6362cdd0702211552578a09f380e9dac | Add tags module. | src/pu/tags.py | src/pu/tags.py | #
# Copyright (c) 2013 Joshua Hughes <kivhift@gmail.com>
#
import sys
class Tag(object):
_name = ''
def __init__(self, *a, **kw):
super(Tag, self).__init__()
self.content = list(a)
self.attributes = kw
def __str__(self):
name = self._name
content = ''.join(str(c) for c in self.content)
atts = ''.join(
' {}="{}"'.format(k, v) for k, v in self.attributes.iteritems())
if content:
return '<{0}{1}>{2}</{0}>'.format(name, atts, content)
else:
return '<{0}{1}/>'.format(name, atts)
def add(self, *content):
self.content.extend(content)
if 1 == len(content):
return content[0]
else:
return content
@staticmethod
def factory(name):
class NT(Tag): _name = name
NT.__name__ = name.replace('-', '_').replace('.', '_')
return NT.__name__, NT
@staticmethod
def vivify(tags, into = None):
if into is None:
into = sys.modules[__name__]
for tag in tags:
setattr(into, *Tag.factory(tag))
| Python | 0 | |
9c6130b5f9337b428f530cfae7036b7be2a9eea4 | test commit | etk2/DefaultDocumentSelector.py | etk2/DefaultDocumentSelector.py | import json
import jsonpath_rw
import re
from document import Document
class DefaultDocumentSelector(DocumentSelector):
def __init__(self):
pass
"""
A concrete implementation of DocumentSelector that supports commonly used methods for
selecting documents.
"""
"""
Args:
document ():
datasets (List[str]): test the "dataset" attribute in the doc contains any of the strings provided
url_patterns (List[str]): test the "url" of the doc matches any of the regexes using regex.search
website_patterns (List[str]): test the "website" of the doc contains any of the regexes using regex.search
json_paths (List[str]): test existence of any of the given JSONPaths in a document
json_paths_regex(List[str]): test that any of the values selected in 'json_paths' satisfy any of
the regexex provided using regex.search
Returns: True if the document satisfies all non None arguments.
Each of the arguments can be a list, in which case satisfaction means
that one of the elements in the list is satisfied, i.e., it is an AND of ORs
For efficiency, the class caches compiled representations of all regexes and json_paths given
that the same selectors will be used for consuming all documents in a stream.
"""
def select_document(self,
document: Document,
datasets: List[str] = None,
url_patterns: List[str] = None,
website_patterns: List[str] = None,
json_paths: List[str] = None,
json_paths_regex: List[str] = None) -> bool:
if (json_paths_regex is not None) and (json_paths is None):
# TODO: print out some error message here
if url_patterns is not None:
rw_url_patterns = map(lambda re: re.compile(), url_patterns)
if website_patterns is not None:
rw_website_patterns = map(lambda re: re.compile(), website_patterns)
if json_paths is not None:
rw_json_paths = map(lambda jsonpath_rw: jsonpath_rw.parse(), json_paths)
if json_paths_regex is not None:
rw_json_paths_regex = map(lambda re: re.compile(), json_paths_regex)
doc = Document.cdr_document
return False;
raise NotImplementedError
def check_datasets_condition(self, json_doc: dict, datasets: List[str]) -> bool:
raise NotImplementedError
def check_url_patterns_condition(self, json_doc: dict,
compiled_url_patterns: List[str]) -> bool:
raise NotImplementedError
def check_website_patterns_condition(self, json_doc: dict,
compiled_website_patterns: List[str]) -> bool:
raise NotImplementedError
def check_json_path_codition(self, json_doc: dict,
rw_json_paths: List[jsonpath_rw.jsonpath.Child],
compiled_json_paths_regex: List[str]) -> bool:
for json_path_expr in rw_json_paths:
json_path_values = [match.value for match in json_path_expr.find(json_doc)]
for
pass
raise NotImplementedError
| Python | 0.000001 | |
e5f77ccc2f51fdcaa32c55b56f6b801b3ae2e0e2 | Add triggered oscilloscope example | example/pyaudio_triggerscope.py | example/pyaudio_triggerscope.py | """
Simple demonstration of streaming data from a PyAudio device to a QOscilloscope
viewer.
Both device and viewer nodes are created locally without a manager.
"""
from pyacq.devices.audio_pyaudio import PyAudio
from pyacq.viewers import QTriggeredOscilloscope
import pyqtgraph as pg
# Start Qt application
app = pg.mkQApp()
# Create PyAudio device node
dev = PyAudio()
# Print a list of available input devices (but ultimately we will just use the
# default device).
default_input = dev.default_input_device()
print("\nAvaliable devices:")
for device in dev.list_device_specs():
index = device['index']
star = "*" if index == default_input else " "
print(" %s %d: %s" % (star, index, device['name']))
# Configure PyAudio device with a single (default) input channel.
dev.configure(nb_channel=1, sample_rate=44100., input_device_index=default_input,
format='int16', chunksize=1024)
dev.output.configure(protocol='tcp', interface='127.0.0.1', transfertmode='plaindata')
dev.initialize()
# Create a triggered oscilloscope to display data.
viewer = QTriggeredOscilloscope()
viewer.configure(with_user_dialog = True)
# Connect audio stream to oscilloscope
viewer.input.connect(dev.output)
viewer.initialize()
viewer.show()
#viewer.params['decimation_method'] = 'min_max'
#viewer.by_channel_params['Signal0', 'gain'] = 0.001
viewer.trigger.params['threshold'] = 1.
viewer.trigger.params['debounce_mode'] = 'after-stable'
viewer.trigger.params['front'] = '+'
viewer.trigger.params['debounce_time'] = 0.1
viewer.triggeraccumulator.params['stack_size'] = 3
viewer.triggeraccumulator.params['left_sweep'] = -.2
viewer.triggeraccumulator.params['right_sweep'] = .5
# Start both nodes
dev.start()
viewer.start()
if __name__ == '__main__':
import sys
if sys.flags.interactive == 0:
app.exec_()
| Python | 0.000001 | |
5c5c2e9ae69b6543830975239068d34620205119 | add context logger | logging/logger_reload_with_context.py | logging/logger_reload_with_context.py | import logging
import traceback
from yaml_config import yaml_config
yaml_config('yaml.conf')
class ContextLogger(object):
def __init__(self, name):
self.logger = logging.getLogger(name)
def _context(self):
stack = traceback.extract_stack()
(filename, line, procname, text) = stack[-3]
return ' [loc] ' + filename + ':' + procname + ':' + str(line)
def critical(self, msg):
self.logger.critical('[msg] ' + str(msg) + self._context())
def error(self, msg):
self.logger.error('[msg] ' + str(msg) + self._context())
def warning(self, msg):
self.logger.warning('[msg] ' + str(msg) + self._context())
def info(self, msg):
self.logger.info('[msg] ' + str(msg) + self._context())
def debug(self, msg):
self.logger.debug('[msg] ' + str(msg) + self._context())
logger = ContextLogger('root') # logging.getLogger('test')
class A(object):
def test(self):
try:
raise Exception('WTF!')
except Exception as e:
logger.error(e)
a = A()
a.test()
| Python | 0.000007 | |
bc7f1363a7da1375b62f70caf441423af2718641 | Create example.py | BMP085/example.py | BMP085/example.py | # Continuously polls the BMP180 Pressure Sensor
import pyb
import BMP085
# creating objects
blue = pyb.LED(4)
bmp180 = BMP085.BMP085(port=2,address=0x77,mode=3,debug=False)
while 1:
blue.toggle()
temperature = bmp180.readTemperature()
print("%f celcius" % temperature)
pressure = bmp180.readPressure()
print("%f pascal" % pressure)
altitude = bmp180.readAltitude()
print("%f meters" % altitude)
pyb.delay(100)
| Python | 0.000001 | |
91827cdbc202950d05053cd82147828e02b861c0 | Add migration to backfill flowpathcounts | temba/flows/migrations/0076_auto_20161202_2114.py | temba/flows/migrations/0076_auto_20161202_2114.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.db.models import Func, Value, Count
from temba.utils import chunk_list
from django_redis import get_redis_connection
from django.db import connection
import time
HIGHPOINT_KEY = 'flowpathcount_backfill_highpoint'
CHUNK_SIZE = 200000
MAX_INT = 2147483647
LAST_SQUASH_KEY = 'last_flowpathcount_squash'
class DateTrunc(Func):
function = 'DATE_TRUNC'
def __init__(self, trunc_type, field_expression, **extra):
super(DateTrunc, self).__init__(Value(trunc_type), field_expression, **extra)
def dictfetchall(cursor):
"Return all rows from a cursor as a dict"
columns = [col[0] for col in cursor.description]
return [
dict(zip(columns, row))
for row in cursor.fetchall()
]
def squash_counts(FlowPathCount):
# get the id of the last count we squashed
r = get_redis_connection()
last_squash = r.get(LAST_SQUASH_KEY)
if not last_squash:
last_squash = 0
# get the unique ids for all new ones
start = time.time()
squash_count = 0
for count in FlowPathCount.objects.filter(id__gt=last_squash).order_by('flow_id', 'from_uuid', 'to_uuid', 'period') \
.distinct('flow_id', 'from_uuid', 'to_uuid', 'period'):
# perform our atomic squash in SQL by calling our squash method
with connection.cursor() as c:
c.execute("SELECT temba_squash_flowpathcount(%s, uuid(%s), uuid(%s), %s);",
(count.flow_id, count.from_uuid, count.to_uuid, count.period))
squash_count += 1
# insert our new top squashed id
max_id = FlowPathCount.objects.all().order_by('-id').first()
if max_id:
r.set(LAST_SQUASH_KEY, max_id.id)
print "Squashed flowpathcounts for %d combinations in %0.3fs" % (squash_count, time.time() - start)
def do_populate(Contact, FlowRun, FlowStep, FlowPathCount):
r = get_redis_connection()
highpoint = r.get(HIGHPOINT_KEY)
if highpoint is None:
highpoint = 0
else:
highpoint = int(highpoint)
last_add = None
print '\nStarting at %d' % highpoint
while last_add is None or last_id < MAX_INT:
test_contacts = Contact.objects.filter(is_test=True).values_list('id', flat=True)
counts = []
last_id = highpoint + CHUNK_SIZE
# jump to the end if we didnt record any last time
if last_add == 0:
last_id = MAX_INT
query = "SELECT fr.flow_id as \"flow_id\", step_uuid, next_uuid, rule_uuid, count(*), date_trunc('hour', left_on) as \"period\" "
query += "FROM flows_flowstep fs, flows_flowrun fr, contacts_contact c "
query += "WHERE fs.run_id = fr.id AND fs.contact_id = c.id AND c.is_test = False "
query += "AND fs.id > %s AND fs.id <= %s AND fs.next_uuid IS NOT NULL GROUP BY fr.flow_id, fs.step_uuid, fs.next_uuid, fs.rule_uuid, period;"
with connection.cursor() as cursor:
cursor.execute(query, [highpoint, last_id])
results = dictfetchall(cursor)
for result in results:
from_uuid = result.get('rule_uuid')
if not from_uuid:
from_uuid = result.get('step_uuid')
counts.append(FlowPathCount(flow_id=result.get('flow_id'),
from_uuid=from_uuid,
to_uuid=result.get('next_uuid'),
period=result.get('period'),
count=result.get('count')))
FlowPathCount.objects.bulk_create(counts)
last_add = len(counts)
print 'Added %d counts' % len(counts)
counts = []
squash_counts(FlowPathCount)
highpoint += CHUNK_SIZE
r.delete(HIGHPOINT_KEY)
def apply_manual():
from temba.flows.models import FlowRun, FlowStep, FlowPathCount
from temba.contacts.models import Contact
do_populate(Contact, FlowRun, FlowStep, FlowPathCount)
def apply_as_migration(apps, schema_editor):
FlowRun = apps.get_model('flows', 'FlowRun')
FlowStep = apps.get_model('flows', 'FlowStep')
FlowPathCount = apps.get_model('flows', 'FlowPathCount')
Contact = apps.get_model('contacts', 'Contact')
do_populate(Contact, FlowRun, FlowStep, FlowPathCount)
class Migration(migrations.Migration):
dependencies = [
('flows', '0075_auto_20161201_1536'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
| Python | 0 | |
153d36cd0c9eb4229156ac944c2ba64f2f9e72d6 | fix test | msmbuilder/tests/test_featurizer.py | msmbuilder/tests/test_featurizer.py | import numpy as np
from mdtraj import compute_dihedrals, compute_phi
from mdtraj.testing import eq
from msmbuilder.example_datasets import fetch_alanine_dipeptide
from msmbuilder.featurizer import get_atompair_indices, FunctionFeaturizer, \
DihedralFeaturizer, AtomPairsFeaturizer, SuperposeFeaturizer, \
RMSDFeaturizer, VonMisesFeaturizer, Slicer
def test_function_featurizer():
dataset = fetch_alanine_dipeptide()
trajectories = dataset["trajectories"]
trj0 = trajectories[0]
# use the dihedral to compute phi for ala
atom_ind = [[4, 6, 8, 14]]
func = compute_dihedrals
# test with args
f = FunctionFeaturizer(func, func_args={"indices": atom_ind})
res1 = f.transform([trj0])
# test with function in a fucntion without any args
def funcception(trj):
return compute_phi(trj)[1]
f = FunctionFeaturizer(funcception)
res2 = f.transform([trj0])
# know results
f3 = DihedralFeaturizer(['phi'], sincos=False)
res3 = f3.transform([trj0])
# compare all
for r in [res2, res3]:
np.testing.assert_array_almost_equal(res1, r)
def test_that_all_featurizers_run():
# TODO: include all featurizers, perhaps with generator tests
dataset = fetch_alanine_dipeptide()
trajectories = dataset["trajectories"]
trj0 = trajectories[0][0]
atom_indices, pair_indices = get_atompair_indices(trj0)
featurizer = AtomPairsFeaturizer(pair_indices)
X_all = featurizer.transform(trajectories)
featurizer = SuperposeFeaturizer(np.arange(15), trj0)
X_all = featurizer.transform(trajectories)
featurizer = DihedralFeaturizer(["phi", "psi"])
X_all = featurizer.transform(trajectories)
featurizer = VonMisesFeaturizer(["phi", "psi"])
X_all = featurizer.transform(trajectories)
# Below doesn't work on ALA dipeptide
# featurizer = msmbuilder.featurizer.ContactFeaturizer()
# X_all = featurizer.transform(trajectories)
featurizer = RMSDFeaturizer(trj0)
X_all = featurizer.transform(trajectories)
def test_von_mises_featurizer():
dataset = fetch_alanine_dipeptide()
trajectories = dataset["trajectories"]
featurizer = VonMisesFeaturizer(["phi", "psi"], n_bins=18)
X_all = featurizer.transform(trajectories)
n_frames = trajectories[0].n_frames
assert X_all[0].shape == (n_frames, 36), ("unexpected shape returned: (%s, %s)" %
X_all[0].shape)
featurizer = VonMisesFeaturizer(["phi", "psi"], n_bins=10)
X_all = featurizer.transform(trajectories)
assert X_all[0].shape == (n_frames, 20), ("unexpected shape returned: (%s, %s)" %
X_all[0].shape)
def test_slicer():
X = ([np.random.normal(size=(50, 5), loc=np.arange(5))]
+ [np.random.normal(size=(10, 5), loc=np.arange(5))])
slicer = Slicer(index=[0, 1])
Y = slicer.transform(X)
eq(len(Y), len(X))
eq(Y[0].shape, (50, 2))
slicer = Slicer(first=2)
Y2 = slicer.transform(X)
eq(len(Y2), len(X))
eq(Y2[0].shape, (50, 2))
eq(Y[0], Y2[0])
eq(Y[1], Y2[1])
| import numpy as np
from mdtraj import compute_dihedrals, compute_phi
from mdtraj.testing import eq
from msmbuilder.example_datasets import fetch_alanine_dipeptide
from msmbuilder.featurizer import get_atompair_indices, FunctionFeaturizer, \
DihedralFeaturizer, AtomPairsFeaturizer, SuperposeFeaturizer, \
RMSDFeaturizer, VonMisesFeaturizer, Slicer
def test_function_featurizer():
dataset = fetch_alanine_dipeptide()
trajectories = dataset["trajectories"]
trj0 = trajectories[0]
# use the dihedral to compute phi for ala
atom_ind = [[4, 6, 8, 14]]
func = compute_dihedrals
# test with args
f = FunctionFeaturizer(func, func_args={"indices": atom_ind})
res1 = f.transform([trj0])
# test with function in a fucntion without any args
def funcception(trj):
return compute_phi(trj)[1]
f = FunctionFeaturizer(funcception)
res2 = f.transform([trj0])
# know results
f3 = DihedralFeaturizer(['phi'], sincos=False)
res3 = f3.transform([trj0])
# compare all
for r in [res2, res3]:
np.testing.assert_array_almost_equal(res1, r)
def test_that_all_featurizers_run():
# TODO: include all featurizers, perhaps with generator tests
dataset = fetch_alanine_dipeptide()
trajectories = dataset["trajectories"]
trj0 = trajectories[0][0]
atom_indices, pair_indices = get_atompair_indices(trj0)
featurizer = AtomPairsFeaturizer(pair_indices)
X_all = featurizer.transform(trajectories)
featurizer = SuperposeFeaturizer(np.arange(15), trj0)
X_all = featurizer.transform(trajectories)
featurizer = DihedralFeaturizer(["phi", "psi"])
X_all = featurizer.transform(trajectories)
featurizer = VonMisesFeaturizer(["phi", "psi"])
X_all = featurizer.transform(trajectories)
# Below doesn't work on ALA dipeptide
# featurizer = msmbuilder.featurizer.ContactFeaturizer()
# X_all = featurizer.transform(trajectories)
featurizer = RMSDFeaturizer(trj0)
X_all = featurizer.transform(trajectories)
def test_von_mises_featurizer():
dataset = fetch_alanine_dipeptide()
trajectories = dataset["trajectories"]
featurizer = VonMisesFeaturizer(["phi", "psi"], n_bins=18)
X_all = featurizer.transform(trajectories)
assert X_all[0].shape == (9999, 36), ("unexpected shape returned: (%s, %s)" %
X_all[0].shape)
featurizer = VonMisesFeaturizer(["phi", "psi"], n_bins=10)
X_all = featurizer.transform(trajectories)
assert X_all[0].shape == (9999, 20), ("unexpected shape returned: (%s, %s)" %
X_all[0].shape)
def test_slicer():
X = ([np.random.normal(size=(50, 5), loc=np.arange(5))]
+ [np.random.normal(size=(10, 5), loc=np.arange(5))])
slicer = Slicer(index=[0, 1])
Y = slicer.transform(X)
eq(len(Y), len(X))
eq(Y[0].shape, (50, 2))
slicer = Slicer(first=2)
Y2 = slicer.transform(X)
eq(len(Y2), len(X))
eq(Y2[0].shape, (50, 2))
eq(Y[0], Y2[0])
eq(Y[1], Y2[1])
| Python | 0.000002 |
6c08209ee26210df07b9d80da45d815b595205ae | test for new Wigner function | test_wigner.py | test_wigner.py | from qutip import *
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
from pylab import *
N = 20;
psi=(coherent(N,-2-2j)+coherent(N,2+2j)).unit()
#psi = ket2dm(basis(N,0))
xvec = linspace(-5.,5.,200)
yvec = xvec
X,Y = meshgrid(xvec, yvec)
W = wigner(psi,xvec,xvec);
fig2 = plt.figure(figsize=(9, 6))
ax = Axes3D(fig2,azim=-107,elev=49)
surf=ax.plot_surface(X, Y, W, rstride=1, cstride=1, cmap=cm.jet, alpha=1.0,linewidth=0.05)
fig2.colorbar(surf, shrink=0.65, aspect=20)
savefig("test.png")
#show()
| Python | 0.000001 | |
1ffdfc3c7ae11c583b2ea4d45b50136996bcf3e3 | Add mock HTTP server to respond to requests from web UI | tests/mocks.py | tests/mocks.py | from http.server import BaseHTTPRequestHandler, HTTPServer
import json
import socket
from threading import Thread
import requests
# https://realpython.com/blog/python/testing-third-party-apis-with-mock-servers/
class MockHTTPServerRequestHandler(BaseHTTPRequestHandler):
def do_OPTIONS(self):
# add response codes
self.send_response(requests.codes.okay)
# add response headers
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
self.send_header('Access-Control-Allow-Headers', 'dataType, accept, authoriziation')
self.end_headers()
def do_GET(self):
# add response codes
self.send_response(requests.codes.ok)
# add response headers
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# add response content
response_content = json.dumps({'Message': 'Success'})
self.wfile.write(response_content.encode('utf-8'))
return
def do_POST(self):
# add response codes
self.send_response(requests.codes.created)
# add response headers
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Access=Control-Allow-Methods', 'POST, GET, OPTIONS, DELETE, PUT')
self.end_headers()
# add response content
response_content = json.dumps({'Message': {'task_ids': [1234]}})
self.wfile.write(response_content.encode('utf-8'))
return
def get_free_server_port():
s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
s.bind(('localhost', 0))
address, port = s.getsockname()
s.close()
return port
def start_mock_server(port=8080):
mock_server = HTTPServer(('localhost', port), MockHTTPServerRequestHandler)
mock_server_thread = Thread(target=mock_server.serve_forever)
mock_server_thread.setDaemon(True)
mock_server_thread.start()
| Python | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.