commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
73bccf55b9d6882fed29ab38e0188d7c62370664 | Create prueba.py | aescoda/TFG | prueba.py | prueba.py | from flask import Flask
from flask import request
import os
import xml.etree.ElementTree as ET
from threading import Thread
app = Flask(__name__)
def send_email(xml):
print "2"
prueba()
print xml
return None
@app.route('/webhook', methods=['POST','GET'])
def webhook():
print "webhook"
xml = "hola"
t = Thread(target=send_email, args=(xml,))
t.start()
print "acabando"
#Jasper resend the notification unless it receives a status 200 confirming the reception
return '',200
app.route('/response', methods=['POST','GET'])
def response():
print xml #Comprobar como comparto la variable.
return "Acabamos de procesar su peticion, en breve recibira un email con los detalles"
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
app.run(debug=True, port=port, host='0.0.0.0', threaded=True)
| from flask import Flask
from flask import request
import xml.etree.ElementTree as ET
from threading import Thread
app = Flask(__name__)
def send_email(xml):
print "2"
prueba()
print xml
return None
@app.route('/webhook', methods=['POST','GET'])
def webhook():
print "webhook"
xml = "hola"
t = Thread(target=send_email, args=(xml,))
t.start()
print "acabando"
#Jasper resend the notification unless it receives a status 200 confirming the reception
return '',200
app.route('/response', methods=['POST','GET'])
def response():
print xml #Comprobar como comparto la variable.
return "Acabamos de procesar su peticion, en breve recibira un email con los detalles"
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
app.run(debug=True, port=port, host='0.0.0.0', threaded=True)
| apache-2.0 | Python |
b9261b8be00d431bdfe4b3090d0d21904ba29620 | Fix ulozto account | vuolter/pyload,vuolter/pyload,vuolter/pyload | module/plugins/accounts/UlozTo.py | module/plugins/accounts/UlozTo.py | # -*- coding: utf-8 -*-
import re
import urlparse
from module.plugins.internal.Account import Account
class UlozTo(Account):
__name__ = "UlozTo"
__type__ = "account"
__version__ = "0.22"
__status__ = "testing"
__description__ = """Uloz.to account plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz"),
("pulpe", None),
("ondrej", "git@ondrej.it"),]
TRAFFIC_LEFT_PATTERN = r'<span class="user"><i class="fi fi-user"></i> <em>.+</em> \(([^ ]+) ([MGT]+B)\)</span>'
def grab_info(self, user, password, data):
html = self.load("https://www.ulozto.net/")
m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
trafficleft = self.parse_traffic(m.group(1), m.group(2))
premium = True if trafficleft else False
return {'validuntil': -1, 'trafficleft': trafficleft, 'premium': premium}
def signin(self, user, password, data):
login_page = self.load('https://www.ulozto.net/?do=web-login')
action = re.findall('<form action="(.+?)"', login_page)[1].replace('&', '&')
token = re.search('_token_" value="(.+?)"', login_page).group(1)
html = self.load(urlparse.urljoin("https://www.ulozto.net/", action),
post={'_token_' : token,
'_do' : "loginForm-submit",
'login' : u"Submit",
'password': password,
'username': user})
if '<div class="flash error">' in html:
self.fail_login()
| # -*- coding: utf-8 -*-
import re
import urlparse
from module.plugins.internal.Account import Account
class UlozTo(Account):
__name__ = "UlozTo"
__type__ = "account"
__version__ = "0.22"
__status__ = "testing"
__description__ = """Uloz.to account plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz"),
("pulpe", None),
("ondrej", "git@ondrej.it"),]
TRAFFIC_LEFT_PATTERN = r'<span class="user"><i class="fi fi-user"></i> <em>.+</em> \(([^ ]+) ([MGT]+B)\)</span>'
def grab_info(self, user, password, data):
html = self.load("https://www.ulozto.net/")
m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
print m
trafficleft = self.parse_traffic(m.group(1), m.group(2))
premium = True if trafficleft else False
return {'validuntil': -1, 'trafficleft': trafficleft, 'premium': premium}
def signin(self, user, password, data):
login_page = self.load('https://www.ulozto.net/?do=web-login')
action = re.findall('<form action="(.+?)"', login_page)[1].replace('&', '&')
token = re.search('_token_" value="(.+?)"', login_page).group(1)
html = self.load(urlparse.urljoin("https://www.ulozto.net/", action),
post={'_token_' : token,
'_do' : "loginForm-submit",
'login' : u"Submit",
'password': password,
'username': user})
if '<div class="flash error">' in html:
self.fail_login()
| agpl-3.0 | Python |
7fbeb4c9823b03c00ad73acd53085caab304917c | upgrade will now detect if the installation is from git and update accordingly | purduesigbots/purdueros-cli,purduesigbots/pros-cli | proscli/upgrade.py | proscli/upgrade.py | import click
from proscli.utils import default_cfg
import os
import os.path
import subprocess
import sys
import json
@click.group()
def upgrade_cli():
pass
def get_upgrade_command():
if getattr(sys, 'frozen', False):
cmd = os.path.abspath(os.path.join(sys.executable, '..', '..', 'updater.exe'))
if os.path.exists(cmd):
return [cmd, '/silentall', '-nofreqcheck']
else:
return False
else:
try:
from pip._vendor import pkg_resources
results = [p for p in pkg_resources.working_set if p.project_name == 'pros-cli']
if os.path.exists(os.path.join(results[0].location, '.git')):
click.echo('Development environment detected.')
with open(os.devnull) as devnull:
if subprocess.run('where git', stdout=devnull).returncode == 0:
click.echo('Using git.exe')
return ['git', '-C', results[0].location, 'pull']
else:
click.echo('No suitable Git executable found.')
return False
if len(results) == 0 or not hasattr(results[0], 'location'):
return False
else:
return ['pip', 'install', '-U', '-t', results[0].location, 'pros-cli']
except Exception:
return False
@upgrade_cli.command('upgrade', help='Provides a facility to run upgrade the PROS CLI')
@default_cfg
def upgrade(cfg):
cmd = get_upgrade_command()
if cmd is False:
click.echo('Could not determine installation type.')
sys.exit(1)
return
elif not cfg.machine_output:
try:
for line in execute(cmd):
click.echo(line)
except subprocess.CalledProcessError:
click.echo('An error occurred. Aborting...')
sys.exit(1)
sys.exit()
else:
for piece in cmd:
click.echo(piece)
def execute(cmd):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
for stdout_line in iter(p.stdout.readline, ""):
yield stdout_line
p.stdout.close()
r = p.wait()
if r:
raise subprocess.CalledProcessError(r, cmd)
| import click
from proscli.utils import default_cfg
import os
import os.path
import subprocess
import sys
import json
@click.group()
def upgrade_cli():
pass
def get_upgrade_command():
if getattr(sys, 'frozen', False):
cmd = os.path.abspath(os.path.join(sys.executable, '..', '..', 'updater.exe'))
if os.path.exists(cmd):
return [cmd, '/silentall', '-nofreqcheck']
else:
return False
else:
try:
from pip._vendor import pkg_resources
results = [p for p in pkg_resources.working_set if p.project_name == 'pros-cli']
if os.path.exists(os.path.join(results[0].location, '.git')):
if subprocess.run('where git').returncode == 0:
return ['git', 'pull']
elif subprocess.run('where bash').returncode == 0:
return ['bash', '-c', 'git pull']
else:
return False
if len(results) == 0 or not hasattr(results[0], 'location'):
return False
else:
return ['pip', 'install', '-U', '-t', results[0].location, 'pros-cli']
except Exception:
return False
@upgrade_cli.command('upgrade', help='Provides a facility to run upgrade the PROS CLI')
@default_cfg
def upgrade(cfg):
cmd = get_upgrade_command()
if cmd is False:
click.echo('Could not determine installation type.')
sys.exit(1)
return
elif not cfg.machine_output:
sys.exit(subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).returncode
else:
for piece in cmd:
click.echo(piece)
| bsd-3-clause | Python |
8c93043e4e3c27fd42873766b6cc677cd60818db | Remove now obsolete comment. | StackStorm/st2,StackStorm/st2,tonybaloney/st2,StackStorm/st2,Plexxi/st2,tonybaloney/st2,Plexxi/st2,Plexxi/st2,lakshmi-kannan/st2,peak6/st2,Plexxi/st2,peak6/st2,nzlosh/st2,lakshmi-kannan/st2,nzlosh/st2,StackStorm/st2,nzlosh/st2,lakshmi-kannan/st2,nzlosh/st2,peak6/st2,tonybaloney/st2 | st2common/st2common/models/db/pack.py | st2common/st2common/models/db/pack.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mongoengine as me
from st2common.models.db import MongoDBAccess
from st2common.models.db import stormbase
from st2common.constants.types import ResourceType
__all__ = [
'PackDB',
'ConfigSchemaDB',
'ConfigDB'
]
class PackDB(stormbase.StormFoundationDB, stormbase.UIDFieldMixin):
"""
System entity which represents a pack.
"""
RESOURCE_TYPE = ResourceType.PACK
UID_FIELDS = ['ref']
ref = me.StringField(required=True, unique=True)
name = me.StringField(required=True, unique=True)
description = me.StringField(required=True)
keywords = me.ListField(field=me.StringField())
version = me.StringField(required=True)
author = me.StringField(required=True)
email = me.EmailField(required=True)
files = me.ListField(field=me.StringField())
meta = {
'indexes': stormbase.UIDFieldMixin.get_indexes()
}
def __init__(self, *args, **values):
super(PackDB, self).__init__(*args, **values)
self.uid = self.get_uid()
class ConfigSchemaDB(stormbase.StormFoundationDB):
"""
System entity representing a config schema for a particular pack.
"""
pack = me.StringField(
required=True,
unique=True,
help_text='Name of the content pack this schema belongs to.')
attributes = stormbase.EscapedDynamicField(
help_text='The specification for config schema attributes.')
class ConfigDB(stormbase.StormFoundationDB):
"""
System entity representing pack config.
"""
pack = me.StringField(
required=True,
unique=True,
help_text='Name of the content pack this config belongs to.')
values = stormbase.EscapedDynamicField(
help_text='Config values.')
# specialized access objects
pack_access = MongoDBAccess(PackDB)
config_schema_access = MongoDBAccess(ConfigSchemaDB)
config_access = MongoDBAccess(ConfigDB)
MODELS = [PackDB, ConfigSchemaDB, ConfigDB]
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mongoengine as me
from st2common.models.db import MongoDBAccess
from st2common.models.db import stormbase
from st2common.constants.types import ResourceType
__all__ = [
'PackDB',
'ConfigSchemaDB',
'ConfigDB'
]
class PackDB(stormbase.StormFoundationDB, stormbase.UIDFieldMixin):
"""
System entity which represents a pack.
"""
RESOURCE_TYPE = ResourceType.PACK
UID_FIELDS = ['ref']
ref = me.StringField(required=True, unique=True)
name = me.StringField(required=True, unique=True)
description = me.StringField(required=True)
keywords = me.ListField(field=me.StringField())
version = me.StringField(required=True) # TODO: Enforce format
author = me.StringField(required=True)
email = me.EmailField(required=True)
files = me.ListField(field=me.StringField())
meta = {
'indexes': stormbase.UIDFieldMixin.get_indexes()
}
def __init__(self, *args, **values):
super(PackDB, self).__init__(*args, **values)
self.uid = self.get_uid()
class ConfigSchemaDB(stormbase.StormFoundationDB):
"""
System entity representing a config schema for a particular pack.
"""
pack = me.StringField(
required=True,
unique=True,
help_text='Name of the content pack this schema belongs to.')
attributes = stormbase.EscapedDynamicField(
help_text='The specification for config schema attributes.')
class ConfigDB(stormbase.StormFoundationDB):
"""
System entity representing pack config.
"""
pack = me.StringField(
required=True,
unique=True,
help_text='Name of the content pack this config belongs to.')
values = stormbase.EscapedDynamicField(
help_text='Config values.')
# specialized access objects
pack_access = MongoDBAccess(PackDB)
config_schema_access = MongoDBAccess(ConfigSchemaDB)
config_access = MongoDBAccess(ConfigDB)
MODELS = [PackDB, ConfigSchemaDB, ConfigDB]
| apache-2.0 | Python |
c28de15fd8cade476fa8d7af904826dcea3c0f3e | Add Python note on simple unit test | erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes | python.py | python.py | # Python Notes
# Version 2.7
# for loop
for i in range(10):
print i
# check list elements of matching string
randList = ["a", "ab", "bc", "de", "abc"]
toFind = "a"
print [x for x in randList if toFind in x]
# read file
with open("filename.txt", "r") as fh:
data = fh.readline() # read line by line
# data = fh.read() # read entire file
# reload local package if modified
reload(package)
# check if file/directory exists
import os.path
os.path.isfile(FILENAME) # tests specifically files
os.path.exists(ANYTHING) # tests files and directories
# create generator with yield command
def createGenerator():
mylist = range(3)
for i in mylist:
yield i * i
# clone instead of point to a set object
setA = set([1, 2, 3, 4])
setB = set(setA)
# unit testing with unittest
def fun(x):
return x + 1
class TestAddingMethod(unittest.TestCase):
def test_three(self):
self.assertEqual(fun(3), 4)
| # Python Notes
# Version 2.7
# for loop
for i in range(10):
print i
# check list elements of matching string
randList = ["a", "ab", "bc", "de", "abc"]
toFind = "a"
print [x for x in randList if toFind in x]
# read file
with open("filename.txt", "r") as fh:
data = fh.readline() # read line by line
# data = fh.read() # read entire file
# reload local package if modified
reload(package)
# check if file/directory exists
import os.path
os.path.isfile(FILENAME) # tests specifically files
os.path.exists(ANYTHING) # tests files and directories
# create generator with yield command
def createGenerator():
mylist = range(3)
for i in mylist:
yield i * i
# clone instead of point to a set object
setA = set([1, 2, 3, 4])
setB = set(setA)
| cc0-1.0 | Python |
80cdc54dbe41c243c4620472aa8ba5c6ece40324 | Add target_table attribute to DataRow | pantheon-systems/etl-framework | etl_framework/DataTable.py | etl_framework/DataTable.py | class DataRow(dict):
"""object for holding row of data"""
def __init__(self, *args, **kwargs):
"""creates instance of DataRow"""
super(DataRow, self).__init__(*args, **kwargs)
self.target_table = None
def row_values(self, field_names, default_value=None):
"""returns row value of specified field_names"""
return tuple(self.get(field_name, default_value) for field_name in field_names)
def set_target_table(self, target_table):
"""sets target table attribute"""
self.target_table = target_table
def get_target_table(self):
"""returns target table attribute"""
return self.target_table
class DataTable(object):
"""object for holding data"""
def __init__(self, data, keys=None):
"""instantiates Table object with rows(which should be a list of dictionaries)"""
self.rows = list(data)
#set keys as _keys of first row by default
if keys:
self._keys = keys
else:
self._keys = self.rows[0].keys()
def keys(self):
"""returns keys of Table"""
return self._keys
def append_row(self, row):
"""adds another row to table"""
self.rows.append(row)
def iterrows(self, field_names, default_value=None):
"""generator that yields specified fields for each row"""
for row in self.rows:
yield tuple(row.get(field_name, default_value) for field_name in field_names)
| class DataRow(dict):
"""object for holding row of data"""
def row_values(self, field_names, default_value=None):
"""returns row value of specified field_names"""
return tuple(self.get(field_name, default_value) for field_name in field_names)
class DataTable(object):
"""object for holding data"""
def __init__(self, data, keys=None):
"""instantiates Table object with rows(which should be a list of dictionaries)"""
self.rows = list(data)
#set keys as _keys of first row by default
if keys:
self._keys = keys
else:
self._keys = self.rows[0].keys()
def keys(self):
"""returns keys of Table"""
return self._keys
def append_row(self, row):
"""adds another row to table"""
self.rows.append(row)
def iterrows(self, field_names, default_value=None):
"""generator that yields specified fields for each row"""
for row in self.rows:
yield tuple(row.get(field_name, default_value) for field_name in field_names)
| mit | Python |
9d1d8ed852b329d9f9465218b516840f308c9340 | Remove errant comma in capabilities policies | mahak/cinder,openstack/cinder,phenoxim/cinder,mahak/cinder,j-griffith/cinder,Datera/cinder,Datera/cinder,openstack/cinder,j-griffith/cinder,phenoxim/cinder | cinder/policies/capabilities.py | cinder/policies/capabilities.py | # Copyright (c) 2017 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from cinder.policies import base
CAPABILITIES_POLICY = "volume_extension:capabilities"
capabilities_policies = [
policy.DocumentedRuleDefault(
name=CAPABILITIES_POLICY,
check_str=base.RULE_ADMIN_API,
description="Show backend capabilities.",
operations=[
{
'method': 'GET',
'path': '/capabilities/{host_name}'
}
])
]
def list_rules():
return capabilities_policies
| # Copyright (c) 2017 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from cinder.policies import base
CAPABILITIES_POLICY = "volume_extension:capabilities",
capabilities_policies = [
policy.DocumentedRuleDefault(
name=CAPABILITIES_POLICY,
check_str=base.RULE_ADMIN_API,
description="Show backend capabilities.",
operations=[
{
'method': 'GET',
'path': '/capabilities/{host_name}'
}
])
]
def list_rules():
return capabilities_policies
| apache-2.0 | Python |
782e0305c1a385775eda129f8d526e4a58d78b7b | Add new Ozwillo footer link | ozwillo/ckanext-ozwillo-theme,ozwillo/ckanext-ozwillo-theme,ozwillo/ckanext-ozwillo-theme,ozwillo/ckanext-ozwillo-theme | ckanext/ozwillo_theme/plugin.py | ckanext/ozwillo_theme/plugin.py | import requests
import xml.etree.ElementTree as ET
from slugify import slugify
from pylons import config as pconfig
import ckan.plugins as plugins
import ckan.plugins.toolkit as toolkit
from ckan.lib.app_globals import set_app_global
from ckan.lib.plugins import DefaultTranslation
def footer_links():
url = 'https://www.ozwillo.com/footer.xml'
langs = {}
response = requests.get(url)
menuset = ET.fromstring(response.text.encode('utf-8'))
items = ('Association', 'Governance', 'Community', 'Team',
'Data', 'Portal', 'Projects',
'Genesis', 'Contributions', 'Developers',
'News', 'Contact', 'Legal Notices', 'Terms')
for menu in menuset.findall('menu'):
locale = menu.find('locale').text
c = 0
langs[locale] = {}
for item in menu.findall('item'):
if 'href' in item.attrib:
langs[locale][slugify(items[c])] = item.get('href')
c += 1
return langs
class OzwilloThemePlugin(plugins.SingletonPlugin, DefaultTranslation):
plugins.implements(plugins.ITranslation)
plugins.implements(plugins.IConfigurer)
def update_config(self, config_):
set_app_global('ckan.ozwillo_url',
pconfig.get('%s.ozwillo_url' % __name__))
set_app_global('ckan.ozwillo_portal_url',
pconfig.get('%s.ozwillo_portal_url' % __name__))
set_app_global('ckan.ozwillo_ckan_app_id',
pconfig.get('%s.ozwillo_ckan_app_id' % __name__))
set_app_global('ckan.localized_links', footer_links())
toolkit.add_template_directory(config_, 'templates')
toolkit.add_public_directory(config_, 'public')
toolkit.add_resource('fanstatic', 'theme')
| import requests
import xml.etree.ElementTree as ET
from slugify import slugify
from pylons import config as pconfig
import ckan.plugins as plugins
import ckan.plugins.toolkit as toolkit
from ckan.lib.app_globals import set_app_global
from ckan.lib.plugins import DefaultTranslation
def footer_links():
url = 'https://www.ozwillo.com/footer.xml'
langs = {}
response = requests.get(url)
menuset = ET.fromstring(response.text.encode('utf-8'))
items = ('Association', 'Governance', 'Community', 'Team',
'Data', 'Portal', 'Projects',
'Genesis', 'Contributions', 'Developers',
'News', 'Contact', 'Legal Notices')
for menu in menuset.findall('menu'):
locale = menu.find('locale').text
c = 0
langs[locale] = {}
for item in menu.findall('item'):
if 'href' in item.attrib:
langs[locale][slugify(items[c])] = item.get('href')
c += 1
return langs
class OzwilloThemePlugin(plugins.SingletonPlugin, DefaultTranslation):
plugins.implements(plugins.ITranslation)
plugins.implements(plugins.IConfigurer)
def update_config(self, config_):
set_app_global('ckan.ozwillo_url',
pconfig.get('%s.ozwillo_url' % __name__))
set_app_global('ckan.ozwillo_portal_url',
pconfig.get('%s.ozwillo_portal_url' % __name__))
set_app_global('ckan.ozwillo_ckan_app_id',
pconfig.get('%s.ozwillo_ckan_app_id' % __name__))
set_app_global('ckan.localized_links', footer_links())
toolkit.add_template_directory(config_, 'templates')
toolkit.add_public_directory(config_, 'public')
toolkit.add_resource('fanstatic', 'theme')
| agpl-3.0 | Python |
10116f3166d7754fadebda12c23235001447fce9 | Add pls to __init__ | treycausey/scikit-learn,jm-begon/scikit-learn,Barmaley-exe/scikit-learn,sonnyhu/scikit-learn,YinongLong/scikit-learn,dsullivan7/scikit-learn,hsiaoyi0504/scikit-learn,hainm/scikit-learn,ycaihua/scikit-learn,fzalkow/scikit-learn,466152112/scikit-learn,mattgiguere/scikit-learn,tmhm/scikit-learn,rvraghav93/scikit-learn,0asa/scikit-learn,MartinSavc/scikit-learn,beepee14/scikit-learn,AnasGhrab/scikit-learn,manhhomienbienthuy/scikit-learn,JosmanPS/scikit-learn,giorgiop/scikit-learn,Windy-Ground/scikit-learn,liangz0707/scikit-learn,Windy-Ground/scikit-learn,yonglehou/scikit-learn,IndraVikas/scikit-learn,shangwuhencc/scikit-learn,xiaoxiamii/scikit-learn,victorbergelin/scikit-learn,AlexRobson/scikit-learn,moutai/scikit-learn,quheng/scikit-learn,wazeerzulfikar/scikit-learn,ashhher3/scikit-learn,hsuantien/scikit-learn,RPGOne/scikit-learn,mxjl620/scikit-learn,sonnyhu/scikit-learn,anirudhjayaraman/scikit-learn,trungnt13/scikit-learn,henrykironde/scikit-learn,Sentient07/scikit-learn,scikit-learn/scikit-learn,joernhees/scikit-learn,ominux/scikit-learn,zuku1985/scikit-learn,MatthieuBizien/scikit-learn,vigilv/scikit-learn,pv/scikit-learn,frank-tancf/scikit-learn,etkirsch/scikit-learn,LiaoPan/scikit-learn,aflaxman/scikit-learn,mjgrav2001/scikit-learn,DonBeo/scikit-learn,cauchycui/scikit-learn,kashif/scikit-learn,henridwyer/scikit-learn,PatrickOReilly/scikit-learn,aetilley/scikit-learn,Aasmi/scikit-learn,Achuth17/scikit-learn,chrisburr/scikit-learn,mattgiguere/scikit-learn,alexsavio/scikit-learn,B3AU/waveTree,akionakamura/scikit-learn,Achuth17/scikit-learn,Adai0808/scikit-learn,fbagirov/scikit-learn,abimannans/scikit-learn,macks22/scikit-learn,Windy-Ground/scikit-learn,nesterione/scikit-learn,xavierwu/scikit-learn,wazeerzulfikar/scikit-learn,idlead/scikit-learn,vybstat/scikit-learn,zhenv5/scikit-learn,UNR-AERIAL/scikit-learn,hugobowne/scikit-learn,henrykironde/scikit-learn,scikit-learn/scikit-learn,thientu/scikit-learn,fzalkow/scikit-learn,LiaoPan/scikit-learn,CforED/Machine-Learning,evgchz/scikit-learn,meduz/scikit-learn,krez13/scikit-learn,tmhm/scikit-learn,potash/scikit-learn,Lawrence-Liu/scikit-learn,arahuja/scikit-learn,lin-credible/scikit-learn,fyffyt/scikit-learn,zaxtax/scikit-learn,xuewei4d/scikit-learn,ephes/scikit-learn,anntzer/scikit-learn,BiaDarkia/scikit-learn,ChanderG/scikit-learn,icdishb/scikit-learn,sarahgrogan/scikit-learn,sanketloke/scikit-learn,hainm/scikit-learn,nomadcube/scikit-learn,arjoly/scikit-learn,betatim/scikit-learn,samuel1208/scikit-learn,hugobowne/scikit-learn,bhargav/scikit-learn,jmschrei/scikit-learn,xwolf12/scikit-learn,larsmans/scikit-learn,bhargav/scikit-learn,michigraber/scikit-learn,Akshay0724/scikit-learn,mfjb/scikit-learn,vortex-ape/scikit-learn,vinayak-mehta/scikit-learn,kylerbrown/scikit-learn,sgenoud/scikit-learn,466152112/scikit-learn,jaidevd/scikit-learn,Vimos/scikit-learn,lazywei/scikit-learn,herilalaina/scikit-learn,xwolf12/scikit-learn,yonglehou/scikit-learn,Srisai85/scikit-learn,costypetrisor/scikit-learn,nikitasingh981/scikit-learn,altairpearl/scikit-learn,vybstat/scikit-learn,0asa/scikit-learn,ChanChiChoi/scikit-learn,ChanderG/scikit-learn,RachitKansal/scikit-learn,lenovor/scikit-learn,jakobworldpeace/scikit-learn,cainiaocome/scikit-learn,glouppe/scikit-learn,deepesch/scikit-learn,andaag/scikit-learn,victorbergelin/scikit-learn,3manuek/scikit-learn,iismd17/scikit-learn,AIML/scikit-learn,abhishekkrthakur/scikit-learn,walterreade/scikit-learn,pythonvietnam/scikit-learn,xyguo/scikit-learn,Lawrence-Liu/scikit-learn,thilbern/scikit-learn,voxlol/scikit-learn,loli/semisupervisedforests,chrisburr/scikit-learn,zihua/scikit-learn,AlexRobson/scikit-learn,JsNoNo/scikit-learn,AlexandreAbraham/scikit-learn,Aasmi/scikit-learn,shyamalschandra/scikit-learn,ChanChiChoi/scikit-learn,mayblue9/scikit-learn,Windy-Ground/scikit-learn,victorbergelin/scikit-learn,anirudhjayaraman/scikit-learn,UNR-AERIAL/scikit-learn,Myasuka/scikit-learn,dsquareindia/scikit-learn,hrjn/scikit-learn,shahankhatch/scikit-learn,Clyde-fare/scikit-learn,mhdella/scikit-learn,DSLituiev/scikit-learn,btabibian/scikit-learn,ningchi/scikit-learn,xiaoxiamii/scikit-learn,mhdella/scikit-learn,DSLituiev/scikit-learn,0asa/scikit-learn,jayflo/scikit-learn,PatrickOReilly/scikit-learn,ChanChiChoi/scikit-learn,nomadcube/scikit-learn,ZenDevelopmentSystems/scikit-learn,ashhher3/scikit-learn,altairpearl/scikit-learn,mrshu/scikit-learn,Achuth17/scikit-learn,trungnt13/scikit-learn,wanggang3333/scikit-learn,hdmetor/scikit-learn,anirudhjayaraman/scikit-learn,glennq/scikit-learn,JsNoNo/scikit-learn,cwu2011/scikit-learn,wlamond/scikit-learn,ishanic/scikit-learn,DonBeo/scikit-learn,davidgbe/scikit-learn,kjung/scikit-learn,vermouthmjl/scikit-learn,stylianos-kampakis/scikit-learn,andrewnc/scikit-learn,joshloyal/scikit-learn,aminert/scikit-learn,jm-begon/scikit-learn,mhue/scikit-learn,etkirsch/scikit-learn,lbishal/scikit-learn,jayflo/scikit-learn,luo66/scikit-learn,carrillo/scikit-learn,rvraghav93/scikit-learn,huobaowangxi/scikit-learn,PrashntS/scikit-learn,pypot/scikit-learn,arahuja/scikit-learn,smartscheduling/scikit-learn-categorical-tree,pv/scikit-learn,thientu/scikit-learn,AlexanderFabisch/scikit-learn,hlin117/scikit-learn,krez13/scikit-learn,AnasGhrab/scikit-learn,f3r/scikit-learn,Fireblend/scikit-learn,victorbergelin/scikit-learn,yanlend/scikit-learn,ZenDevelopmentSystems/scikit-learn,untom/scikit-learn,mikebenfield/scikit-learn,zihua/scikit-learn,ahoyosid/scikit-learn,AlexanderFabisch/scikit-learn,CVML/scikit-learn,glennq/scikit-learn,thilbern/scikit-learn,Clyde-fare/scikit-learn,PrashntS/scikit-learn,hsuantien/scikit-learn,tomlof/scikit-learn,Srisai85/scikit-learn,mxjl620/scikit-learn,ndingwall/scikit-learn,aewhatley/scikit-learn,zuku1985/scikit-learn,mlyundin/scikit-learn,espg/scikit-learn,michigraber/scikit-learn,dsullivan7/scikit-learn,manashmndl/scikit-learn,Achuth17/scikit-learn,sergeyf/scikit-learn,rexshihaoren/scikit-learn,r-mart/scikit-learn,kjung/scikit-learn,TomDLT/scikit-learn,aabadie/scikit-learn,Adai0808/scikit-learn,jorik041/scikit-learn,yyjiang/scikit-learn,herilalaina/scikit-learn,pompiduskus/scikit-learn,HolgerPeters/scikit-learn,rajat1994/scikit-learn,AlexanderFabisch/scikit-learn,eg-zhang/scikit-learn,mrshu/scikit-learn,ElDeveloper/scikit-learn,DonBeo/scikit-learn,jseabold/scikit-learn,akionakamura/scikit-learn,kmike/scikit-learn,ephes/scikit-learn,henridwyer/scikit-learn,liangz0707/scikit-learn,hugobowne/scikit-learn,raghavrv/scikit-learn,loli/semisupervisedforests,simon-pepin/scikit-learn,AIML/scikit-learn,jjx02230808/project0223,dsquareindia/scikit-learn,0x0all/scikit-learn,olologin/scikit-learn,kagayakidan/scikit-learn,sarahgrogan/scikit-learn,pratapvardhan/scikit-learn,sgenoud/scikit-learn,cwu2011/scikit-learn,ngoix/OCRF,shangwuhencc/scikit-learn,ZenDevelopmentSystems/scikit-learn,ssaeger/scikit-learn,ngoix/OCRF,rohanp/scikit-learn,yask123/scikit-learn,loli/semisupervisedforests,rahul-c1/scikit-learn,tawsifkhan/scikit-learn,pratapvardhan/scikit-learn,evgchz/scikit-learn,rsivapr/scikit-learn,Djabbz/scikit-learn,qifeigit/scikit-learn,jereze/scikit-learn,hsiaoyi0504/scikit-learn,chrisburr/scikit-learn,untom/scikit-learn,andaag/scikit-learn,rohanp/scikit-learn,shenzebang/scikit-learn,jjx02230808/project0223,ankurankan/scikit-learn,pv/scikit-learn,lucidfrontier45/scikit-learn,deepesch/scikit-learn,cwu2011/scikit-learn,hdmetor/scikit-learn,jseabold/scikit-learn,pnedunuri/scikit-learn,mattilyra/scikit-learn,B3AU/waveTree,justincassidy/scikit-learn,NunoEdgarGub1/scikit-learn,ElDeveloper/scikit-learn,zorojean/scikit-learn,rahul-c1/scikit-learn,justincassidy/scikit-learn,abhishekgahlot/scikit-learn,zorroblue/scikit-learn,idlead/scikit-learn,tdhopper/scikit-learn,zhenv5/scikit-learn,dsquareindia/scikit-learn,mrshu/scikit-learn,JPFrancoia/scikit-learn,IndraVikas/scikit-learn,vinayak-mehta/scikit-learn,cl4rke/scikit-learn,xubenben/scikit-learn,mayblue9/scikit-learn,mhdella/scikit-learn,maheshakya/scikit-learn,466152112/scikit-learn,fabioticconi/scikit-learn,anntzer/scikit-learn,jereze/scikit-learn,Barmaley-exe/scikit-learn,CforED/Machine-Learning,NunoEdgarGub1/scikit-learn,fzalkow/scikit-learn,abhishekgahlot/scikit-learn,liyu1990/sklearn,jzt5132/scikit-learn,rahuldhote/scikit-learn,nhejazi/scikit-learn,cybernet14/scikit-learn,waterponey/scikit-learn,tosolveit/scikit-learn,cainiaocome/scikit-learn,potash/scikit-learn,fzalkow/scikit-learn,lesteve/scikit-learn,cainiaocome/scikit-learn,OshynSong/scikit-learn,ankurankan/scikit-learn,chrsrds/scikit-learn,sinhrks/scikit-learn,rishikksh20/scikit-learn,B3AU/waveTree,mrshu/scikit-learn,vybstat/scikit-learn,djgagne/scikit-learn,sinhrks/scikit-learn,mhue/scikit-learn,equialgo/scikit-learn,pompiduskus/scikit-learn,poryfly/scikit-learn,f3r/scikit-learn,depet/scikit-learn,YinongLong/scikit-learn,ssaeger/scikit-learn,lazywei/scikit-learn,billy-inn/scikit-learn,aewhatley/scikit-learn,RachitKansal/scikit-learn,loli/sklearn-ensembletrees,siutanwong/scikit-learn,eickenberg/scikit-learn,themrmax/scikit-learn,ElDeveloper/scikit-learn,appapantula/scikit-learn,quheng/scikit-learn,carrillo/scikit-learn,abimannans/scikit-learn,B3AU/waveTree,mayblue9/scikit-learn,xwolf12/scikit-learn,pianomania/scikit-learn,kjung/scikit-learn,Obus/scikit-learn,elkingtonmcb/scikit-learn,mjgrav2001/scikit-learn,betatim/scikit-learn,jblackburne/scikit-learn,tomlof/scikit-learn,jkarnows/scikit-learn,xyguo/scikit-learn,NelisVerhoef/scikit-learn,kaichogami/scikit-learn,murali-munna/scikit-learn,AlexandreAbraham/scikit-learn,alvarofierroclavero/scikit-learn,pratapvardhan/scikit-learn,lin-credible/scikit-learn,MartinDelzant/scikit-learn,jereze/scikit-learn,anurag313/scikit-learn,jseabold/scikit-learn,ishanic/scikit-learn,B3AU/waveTree,roxyboy/scikit-learn,procoder317/scikit-learn,xavierwu/scikit-learn,fabianp/scikit-learn,evgchz/scikit-learn,anntzer/scikit-learn,ycaihua/scikit-learn,wanggang3333/scikit-learn,mattilyra/scikit-learn,lucidfrontier45/scikit-learn,jaidevd/scikit-learn,billy-inn/scikit-learn,mwv/scikit-learn,depet/scikit-learn,shahankhatch/scikit-learn,manashmndl/scikit-learn,loli/sklearn-ensembletrees,mjudsp/Tsallis,phdowling/scikit-learn,cl4rke/scikit-learn,gclenaghan/scikit-learn,robin-lai/scikit-learn,JsNoNo/scikit-learn,rajat1994/scikit-learn,0x0all/scikit-learn,robin-lai/scikit-learn,glemaitre/scikit-learn,tosolveit/scikit-learn,dingocuster/scikit-learn,kevin-intel/scikit-learn,wlamond/scikit-learn,abhishekgahlot/scikit-learn,arjoly/scikit-learn,marcocaccin/scikit-learn,q1ang/scikit-learn,Vimos/scikit-learn,MartinSavc/scikit-learn,HolgerPeters/scikit-learn,tmhm/scikit-learn,depet/scikit-learn,kylerbrown/scikit-learn,huzq/scikit-learn,Akshay0724/scikit-learn,meduz/scikit-learn,ChanderG/scikit-learn,vigilv/scikit-learn,Barmaley-exe/scikit-learn,RayMick/scikit-learn,krez13/scikit-learn,amueller/scikit-learn,RachitKansal/scikit-learn,ivannz/scikit-learn,ky822/scikit-learn,ahoyosid/scikit-learn,huobaowangxi/scikit-learn,RachitKansal/scikit-learn,DSLituiev/scikit-learn,jayflo/scikit-learn,heli522/scikit-learn,bhargav/scikit-learn,ankurankan/scikit-learn,pompiduskus/scikit-learn,lin-credible/scikit-learn,ogrisel/scikit-learn,rexshihaoren/scikit-learn,dhruv13J/scikit-learn,frank-tancf/scikit-learn,rrohan/scikit-learn,liyu1990/sklearn,florian-f/sklearn,fabioticconi/scikit-learn,pianomania/scikit-learn,glemaitre/scikit-learn,PatrickChrist/scikit-learn,ilyes14/scikit-learn,mattilyra/scikit-learn,TomDLT/scikit-learn,anntzer/scikit-learn,MartinDelzant/scikit-learn,voxlol/scikit-learn,quheng/scikit-learn,nhejazi/scikit-learn,shusenl/scikit-learn,JPFrancoia/scikit-learn,bhargav/scikit-learn,NelisVerhoef/scikit-learn,ahoyosid/scikit-learn,samzhang111/scikit-learn,nvoron23/scikit-learn,pypot/scikit-learn,spallavolu/scikit-learn,amueller/scikit-learn,zuku1985/scikit-learn,mfjb/scikit-learn,treycausey/scikit-learn,murali-munna/scikit-learn,xavierwu/scikit-learn,billy-inn/scikit-learn,RayMick/scikit-learn,loli/sklearn-ensembletrees,mehdidc/scikit-learn,spallavolu/scikit-learn,jpautom/scikit-learn,liberatorqjw/scikit-learn,lesteve/scikit-learn,ivannz/scikit-learn,mfjb/scikit-learn,voxlol/scikit-learn,raghavrv/scikit-learn,djgagne/scikit-learn,fredhusser/scikit-learn,liangz0707/scikit-learn,loli/sklearn-ensembletrees,espg/scikit-learn,smartscheduling/scikit-learn-categorical-tree,marcocaccin/scikit-learn,fengzhyuan/scikit-learn,jm-begon/scikit-learn,pnedunuri/scikit-learn,devanshdalal/scikit-learn,manhhomienbienthuy/scikit-learn,f3r/scikit-learn,kylerbrown/scikit-learn,Fireblend/scikit-learn,mblondel/scikit-learn,harshaneelhg/scikit-learn,Vimos/scikit-learn,huzq/scikit-learn,macks22/scikit-learn,heli522/scikit-learn,shenzebang/scikit-learn,vibhorag/scikit-learn,glennq/scikit-learn,mwv/scikit-learn,fyffyt/scikit-learn,cdegroc/scikit-learn,ilyes14/scikit-learn,frank-tancf/scikit-learn,russel1237/scikit-learn,MartinDelzant/scikit-learn,fbagirov/scikit-learn,rishikksh20/scikit-learn,joshloyal/scikit-learn,nrhine1/scikit-learn,Garrett-R/scikit-learn,r-mart/scikit-learn,jakirkham/scikit-learn,ldirer/scikit-learn,Sentient07/scikit-learn,kmike/scikit-learn,RomainBrault/scikit-learn,deepesch/scikit-learn,JosmanPS/scikit-learn,fengzhyuan/scikit-learn,dsullivan7/scikit-learn,nesterione/scikit-learn,amueller/scikit-learn,joernhees/scikit-learn,sumspr/scikit-learn,depet/scikit-learn,vermouthmjl/scikit-learn,AlexandreAbraham/scikit-learn,maheshakya/scikit-learn,belltailjp/scikit-learn,yyjiang/scikit-learn,eickenberg/scikit-learn,sarahgrogan/scikit-learn,nmayorov/scikit-learn,belltailjp/scikit-learn,cybernet14/scikit-learn,espg/scikit-learn,ycaihua/scikit-learn,imaculate/scikit-learn,evgchz/scikit-learn,ZENGXH/scikit-learn,abhishekkrthakur/scikit-learn,Myasuka/scikit-learn,mattilyra/scikit-learn,huzq/scikit-learn,huzq/scikit-learn,jmschrei/scikit-learn,meduz/scikit-learn,thientu/scikit-learn,dsquareindia/scikit-learn,olologin/scikit-learn,schets/scikit-learn,jmetzen/scikit-learn,dsullivan7/scikit-learn,pompiduskus/scikit-learn,vortex-ape/scikit-learn,ChanChiChoi/scikit-learn,Lawrence-Liu/scikit-learn,pythonvietnam/scikit-learn,YinongLong/scikit-learn,MartinDelzant/scikit-learn,jpautom/scikit-learn,aflaxman/scikit-learn,JPFrancoia/scikit-learn,anurag313/scikit-learn,manashmndl/scikit-learn,sonnyhu/scikit-learn,hitszxp/scikit-learn,jseabold/scikit-learn,hitszxp/scikit-learn,yunfeilu/scikit-learn,kevin-intel/scikit-learn,jayflo/scikit-learn,yask123/scikit-learn,eg-zhang/scikit-learn,Jimmy-Morzaria/scikit-learn,sonnyhu/scikit-learn,nomadcube/scikit-learn,jm-begon/scikit-learn,betatim/scikit-learn,deepesch/scikit-learn,hrjn/scikit-learn,ltiao/scikit-learn,yyjiang/scikit-learn,kagayakidan/scikit-learn,mrshu/scikit-learn,pypot/scikit-learn,davidgbe/scikit-learn,shikhardb/scikit-learn,nmayorov/scikit-learn,lucidfrontier45/scikit-learn,ahoyosid/scikit-learn,tomlof/scikit-learn,saiwing-yeung/scikit-learn,sinhrks/scikit-learn,eickenberg/scikit-learn,jakobworldpeace/scikit-learn,wanggang3333/scikit-learn,jjx02230808/project0223,poryfly/scikit-learn,andaag/scikit-learn,mattgiguere/scikit-learn,trankmichael/scikit-learn,nelson-liu/scikit-learn,fyffyt/scikit-learn,OshynSong/scikit-learn,rsivapr/scikit-learn,kaichogami/scikit-learn,hdmetor/scikit-learn,ngoix/OCRF,trankmichael/scikit-learn,henrykironde/scikit-learn,elkingtonmcb/scikit-learn,RPGOne/scikit-learn,rahuldhote/scikit-learn,Fireblend/scikit-learn,idlead/scikit-learn,iismd17/scikit-learn,liberatorqjw/scikit-learn,lazywei/scikit-learn,procoder317/scikit-learn,DSLituiev/scikit-learn,jmschrei/scikit-learn,RomainBrault/scikit-learn,bthirion/scikit-learn,mfjb/scikit-learn,macks22/scikit-learn,shyamalschandra/scikit-learn,dingocuster/scikit-learn,walterreade/scikit-learn,samzhang111/scikit-learn,aetilley/scikit-learn,mojoboss/scikit-learn,stylianos-kampakis/scikit-learn,bthirion/scikit-learn,pv/scikit-learn,phdowling/scikit-learn,thientu/scikit-learn,CVML/scikit-learn,chrisburr/scikit-learn,shenzebang/scikit-learn,poryfly/scikit-learn,nesterione/scikit-learn,xuewei4d/scikit-learn,liberatorqjw/scikit-learn,massmutual/scikit-learn,thilbern/scikit-learn,florian-f/sklearn,rajat1994/scikit-learn,akionakamura/scikit-learn,anirudhjayaraman/scikit-learn,LohithBlaze/scikit-learn,liangz0707/scikit-learn,mhdella/scikit-learn,Titan-C/scikit-learn,nelson-liu/scikit-learn,trankmichael/scikit-learn,shikhardb/scikit-learn,iismd17/scikit-learn,kevin-intel/scikit-learn,BiaDarkia/scikit-learn,jorik041/scikit-learn,jkarnows/scikit-learn,ClimbsRocks/scikit-learn,xwolf12/scikit-learn,pianomania/scikit-learn,zhenv5/scikit-learn,kagayakidan/scikit-learn,btabibian/scikit-learn,mikebenfield/scikit-learn,xavierwu/scikit-learn,pythonvietnam/scikit-learn,h2educ/scikit-learn,ivannz/scikit-learn,anurag313/scikit-learn,robbymeals/scikit-learn,jorge2703/scikit-learn,theoryno3/scikit-learn,jmschrei/scikit-learn,fabianp/scikit-learn,ltiao/scikit-learn,jzt5132/scikit-learn,hrjn/scikit-learn,ClimbsRocks/scikit-learn,RPGOne/scikit-learn,petosegan/scikit-learn,florian-f/sklearn,saiwing-yeung/scikit-learn,Myasuka/scikit-learn,OshynSong/scikit-learn,rohanp/scikit-learn,toastedcornflakes/scikit-learn,Nyker510/scikit-learn,AlexRobson/scikit-learn,JeanKossaifi/scikit-learn,vshtanko/scikit-learn,hsiaoyi0504/scikit-learn,RayMick/scikit-learn,quheng/scikit-learn,AlexanderFabisch/scikit-learn,jakobworldpeace/scikit-learn,krez13/scikit-learn,lucidfrontier45/scikit-learn,MohammedWasim/scikit-learn,hdmetor/scikit-learn,elkingtonmcb/scikit-learn,imaculate/scikit-learn,ndingwall/scikit-learn,hitszxp/scikit-learn,MartinSavc/scikit-learn,mwv/scikit-learn,PatrickOReilly/scikit-learn,macks22/scikit-learn,zorroblue/scikit-learn,fyffyt/scikit-learn,jakirkham/scikit-learn,rexshihaoren/scikit-learn,scikit-learn/scikit-learn,Adai0808/scikit-learn,murali-munna/scikit-learn,robbymeals/scikit-learn,manashmndl/scikit-learn,mikebenfield/scikit-learn,altairpearl/scikit-learn,petosegan/scikit-learn,ZENGXH/scikit-learn,ishanic/scikit-learn,alexsavio/scikit-learn,rajat1994/scikit-learn,Obus/scikit-learn,arjoly/scikit-learn,Nyker510/scikit-learn,ycaihua/scikit-learn,vshtanko/scikit-learn,yanlend/scikit-learn,russel1237/scikit-learn,abhishekkrthakur/scikit-learn,ilyes14/scikit-learn,xzh86/scikit-learn,kaichogami/scikit-learn,appapantula/scikit-learn,mjudsp/Tsallis,imaculate/scikit-learn,hlin117/scikit-learn,petosegan/scikit-learn,zihua/scikit-learn,LiaoPan/scikit-learn,aetilley/scikit-learn,q1ang/scikit-learn,sumspr/scikit-learn,0asa/scikit-learn,moutai/scikit-learn,giorgiop/scikit-learn,vivekmishra1991/scikit-learn,bigdataelephants/scikit-learn,iismd17/scikit-learn,LohithBlaze/scikit-learn,nikitasingh981/scikit-learn,vigilv/scikit-learn,amueller/scikit-learn,mattilyra/scikit-learn,clemkoa/scikit-learn,ominux/scikit-learn,PrashntS/scikit-learn,mojoboss/scikit-learn,vortex-ape/scikit-learn,aminert/scikit-learn,Obus/scikit-learn,jpautom/scikit-learn,scikit-learn/scikit-learn,chrsrds/scikit-learn,mayblue9/scikit-learn,saiwing-yeung/scikit-learn,vortex-ape/scikit-learn,rishikksh20/scikit-learn,raghavrv/scikit-learn,IshankGulati/scikit-learn,treycausey/scikit-learn,3manuek/scikit-learn,bigdataelephants/scikit-learn,nesterione/scikit-learn,roxyboy/scikit-learn,q1ang/scikit-learn,andrewnc/scikit-learn,wzbozon/scikit-learn,jmetzen/scikit-learn,clemkoa/scikit-learn,kylerbrown/scikit-learn,icdishb/scikit-learn,robbymeals/scikit-learn,sgenoud/scikit-learn,Barmaley-exe/scikit-learn,PatrickOReilly/scikit-learn,tawsifkhan/scikit-learn,florian-f/sklearn,hainm/scikit-learn,luo66/scikit-learn,henridwyer/scikit-learn,PatrickChrist/scikit-learn,sergeyf/scikit-learn,lesteve/scikit-learn,raghavrv/scikit-learn,andaag/scikit-learn,stylianos-kampakis/scikit-learn,procoder317/scikit-learn,nikitasingh981/scikit-learn,IssamLaradji/scikit-learn,maheshakya/scikit-learn,Fireblend/scikit-learn,bnaul/scikit-learn,khkaminska/scikit-learn,equialgo/scikit-learn,ky822/scikit-learn,yanlend/scikit-learn,tawsifkhan/scikit-learn,bnaul/scikit-learn,jmetzen/scikit-learn,TomDLT/scikit-learn,mhue/scikit-learn,clemkoa/scikit-learn,marcocaccin/scikit-learn,mjgrav2001/scikit-learn,arabenjamin/scikit-learn,kashif/scikit-learn,qifeigit/scikit-learn,adamgreenhall/scikit-learn,siutanwong/scikit-learn,shyamalschandra/scikit-learn,ssaeger/scikit-learn,heli522/scikit-learn,RomainBrault/scikit-learn,vybstat/scikit-learn,Vimos/scikit-learn,Titan-C/scikit-learn,vigilv/scikit-learn,arabenjamin/scikit-learn,madjelan/scikit-learn,khkaminska/scikit-learn,alvarofierroclavero/scikit-learn,fabianp/scikit-learn,stylianos-kampakis/scikit-learn,bikong2/scikit-learn,terkkila/scikit-learn,ankurankan/scikit-learn,aminert/scikit-learn,ClimbsRocks/scikit-learn,aflaxman/scikit-learn,bnaul/scikit-learn,vivekmishra1991/scikit-learn,BiaDarkia/scikit-learn,larsmans/scikit-learn,zaxtax/scikit-learn,ogrisel/scikit-learn,ogrisel/scikit-learn,MechCoder/scikit-learn,samzhang111/scikit-learn,pythonvietnam/scikit-learn,fabianp/scikit-learn,yunfeilu/scikit-learn,nhejazi/scikit-learn,cainiaocome/scikit-learn,moutai/scikit-learn,cdegroc/scikit-learn,PrashntS/scikit-learn,jaidevd/scikit-learn,CforED/Machine-Learning,mehdidc/scikit-learn,pratapvardhan/scikit-learn,ldirer/scikit-learn,clemkoa/scikit-learn,ivannz/scikit-learn,yask123/scikit-learn,bikong2/scikit-learn,fredhusser/scikit-learn,wlamond/scikit-learn,toastedcornflakes/scikit-learn,potash/scikit-learn,ltiao/scikit-learn,Nyker510/scikit-learn,vermouthmjl/scikit-learn,f3r/scikit-learn,PatrickChrist/scikit-learn,simon-pepin/scikit-learn,rahuldhote/scikit-learn,schets/scikit-learn,plissonf/scikit-learn,pypot/scikit-learn,liyu1990/sklearn,glouppe/scikit-learn,equialgo/scikit-learn,IshankGulati/scikit-learn,kjung/scikit-learn,rahul-c1/scikit-learn,btabibian/scikit-learn,herilalaina/scikit-learn,3manuek/scikit-learn,harshaneelhg/scikit-learn,joshloyal/scikit-learn,Myasuka/scikit-learn,siutanwong/scikit-learn,aabadie/scikit-learn,ningchi/scikit-learn,arabenjamin/scikit-learn,jorik041/scikit-learn,yanlend/scikit-learn,Clyde-fare/scikit-learn,vivekmishra1991/scikit-learn,shangwuhencc/scikit-learn,fabioticconi/scikit-learn,eickenberg/scikit-learn,NunoEdgarGub1/scikit-learn,beepee14/scikit-learn,giorgiop/scikit-learn,wazeerzulfikar/scikit-learn,AnasGhrab/scikit-learn,mojoboss/scikit-learn,gotomypc/scikit-learn,djgagne/scikit-learn,hrjn/scikit-learn,jlegendary/scikit-learn,gclenaghan/scikit-learn,IshankGulati/scikit-learn,hsiaoyi0504/scikit-learn,glouppe/scikit-learn,hitszxp/scikit-learn,MatthieuBizien/scikit-learn,schets/scikit-learn,MohammedWasim/scikit-learn,xuewei4d/scikit-learn,kagayakidan/scikit-learn,xuewei4d/scikit-learn,pianomania/scikit-learn,Garrett-R/scikit-learn,maheshakya/scikit-learn,ilo10/scikit-learn,adamgreenhall/scikit-learn,gotomypc/scikit-learn,cwu2011/scikit-learn,lenovor/scikit-learn,vivekmishra1991/scikit-learn,cybernet14/scikit-learn,zhenv5/scikit-learn,AlexandreAbraham/scikit-learn,treycausey/scikit-learn,belltailjp/scikit-learn,marcocaccin/scikit-learn,phdowling/scikit-learn,lbishal/scikit-learn,untom/scikit-learn,0x0all/scikit-learn,toastedcornflakes/scikit-learn,tdhopper/scikit-learn,saiwing-yeung/scikit-learn,shusenl/scikit-learn,jjx02230808/project0223,michigraber/scikit-learn,moutai/scikit-learn,IndraVikas/scikit-learn,pkruskal/scikit-learn,0x0all/scikit-learn,bigdataelephants/scikit-learn,appapantula/scikit-learn,abimannans/scikit-learn,xzh86/scikit-learn,elkingtonmcb/scikit-learn,khkaminska/scikit-learn,wanggang3333/scikit-learn,jblackburne/scikit-learn,herilalaina/scikit-learn,MohammedWasim/scikit-learn,LohithBlaze/scikit-learn,russel1237/scikit-learn,terkkila/scikit-learn,r-mart/scikit-learn,Djabbz/scikit-learn,jakirkham/scikit-learn,wazeerzulfikar/scikit-learn,kmike/scikit-learn,MechCoder/scikit-learn,MohammedWasim/scikit-learn,ngoix/OCRF,dhruv13J/scikit-learn,HolgerPeters/scikit-learn,bikong2/scikit-learn,cl4rke/scikit-learn,liyu1990/sklearn,cdegroc/scikit-learn,tmhm/scikit-learn,plissonf/scikit-learn,JeanKossaifi/scikit-learn,nikitasingh981/scikit-learn,ilo10/scikit-learn,fbagirov/scikit-learn,Obus/scikit-learn,larsmans/scikit-learn,davidgbe/scikit-learn,abhishekgahlot/scikit-learn,bigdataelephants/scikit-learn,olologin/scikit-learn,glemaitre/scikit-learn,Garrett-R/scikit-learn,ycaihua/scikit-learn,themrmax/scikit-learn,Nyker510/scikit-learn,mlyundin/scikit-learn,maheshakya/scikit-learn,harshaneelhg/scikit-learn,OshynSong/scikit-learn,ishanic/scikit-learn,tdhopper/scikit-learn,treycausey/scikit-learn,abhishekgahlot/scikit-learn,waterponey/scikit-learn,ElDeveloper/scikit-learn,shyamalschandra/scikit-learn,davidgbe/scikit-learn,eg-zhang/scikit-learn,spallavolu/scikit-learn,devanshdalal/scikit-learn,RPGOne/scikit-learn,hsuantien/scikit-learn,jlegendary/scikit-learn,mjgrav2001/scikit-learn,larsmans/scikit-learn,rahul-c1/scikit-learn,jblackburne/scikit-learn,JPFrancoia/scikit-learn,vermouthmjl/scikit-learn,mehdidc/scikit-learn,Djabbz/scikit-learn,rvraghav93/scikit-learn,ilo10/scikit-learn,equialgo/scikit-learn,mjudsp/Tsallis,cl4rke/scikit-learn,robbymeals/scikit-learn,trungnt13/scikit-learn,plissonf/scikit-learn,luo66/scikit-learn,zuku1985/scikit-learn,JeanKossaifi/scikit-learn,jorik041/scikit-learn,abimannans/scikit-learn,DonBeo/scikit-learn,shahankhatch/scikit-learn,sgenoud/scikit-learn,zaxtax/scikit-learn,AlexRobson/scikit-learn,lbishal/scikit-learn,h2educ/scikit-learn,khkaminska/scikit-learn,Sentient07/scikit-learn,glouppe/scikit-learn,ephes/scikit-learn,kashif/scikit-learn,jpautom/scikit-learn,lin-credible/scikit-learn,MatthieuBizien/scikit-learn,mblondel/scikit-learn,massmutual/scikit-learn,nvoron23/scikit-learn,sergeyf/scikit-learn,alexeyum/scikit-learn,rrohan/scikit-learn,shikhardb/scikit-learn,mattgiguere/scikit-learn,alexsavio/scikit-learn,madjelan/scikit-learn,loli/semisupervisedforests,ephes/scikit-learn,xyguo/scikit-learn,trungnt13/scikit-learn,CforED/Machine-Learning,sanketloke/scikit-learn,fengzhyuan/scikit-learn,etkirsch/scikit-learn,dhruv13J/scikit-learn,huobaowangxi/scikit-learn,carrillo/scikit-learn,nvoron23/scikit-learn,MatthieuBizien/scikit-learn,AnasGhrab/scikit-learn,evgchz/scikit-learn,aetilley/scikit-learn,betatim/scikit-learn,arahuja/scikit-learn,andrewnc/scikit-learn,fengzhyuan/scikit-learn,Lawrence-Liu/scikit-learn,jzt5132/scikit-learn,bthirion/scikit-learn,ltiao/scikit-learn,sanketloke/scikit-learn,hugobowne/scikit-learn,untom/scikit-learn,tomlof/scikit-learn,themrmax/scikit-learn,florian-f/sklearn,MechCoder/scikit-learn,theoryno3/scikit-learn,ominux/scikit-learn,rrohan/scikit-learn,ilo10/scikit-learn,waterponey/scikit-learn,mehdidc/scikit-learn,etkirsch/scikit-learn,fredhusser/scikit-learn,jaidevd/scikit-learn,yyjiang/scikit-learn,xubenben/scikit-learn,vinayak-mehta/scikit-learn,madjelan/scikit-learn,roxyboy/scikit-learn,abhishekkrthakur/scikit-learn,nelson-liu/scikit-learn,pkruskal/scikit-learn,gclenaghan/scikit-learn,yunfeilu/scikit-learn,0x0all/scikit-learn,samuel1208/scikit-learn,cdegroc/scikit-learn,mwv/scikit-learn,3manuek/scikit-learn,kevin-intel/scikit-learn,xubenben/scikit-learn,nvoron23/scikit-learn,petosegan/scikit-learn,fredhusser/scikit-learn,RayMick/scikit-learn,jakirkham/scikit-learn,devanshdalal/scikit-learn,joernhees/scikit-learn,simon-pepin/scikit-learn,pnedunuri/scikit-learn,vinayak-mehta/scikit-learn,luo66/scikit-learn,qifeigit/scikit-learn,ZENGXH/scikit-learn,zorroblue/scikit-learn,pnedunuri/scikit-learn,JosmanPS/scikit-learn,vshtanko/scikit-learn,nmayorov/scikit-learn,andrewnc/scikit-learn,aewhatley/scikit-learn,hitszxp/scikit-learn,LohithBlaze/scikit-learn,jkarnows/scikit-learn,icdishb/scikit-learn,murali-munna/scikit-learn,simon-pepin/scikit-learn,rohanp/scikit-learn,tosolveit/scikit-learn,Clyde-fare/scikit-learn,LiaoPan/scikit-learn,nomadcube/scikit-learn,0asa/scikit-learn,espg/scikit-learn,jkarnows/scikit-learn,samuel1208/scikit-learn,aabadie/scikit-learn,mxjl620/scikit-learn,rishikksh20/scikit-learn,joernhees/scikit-learn,chrsrds/scikit-learn,AIML/scikit-learn,rahuldhote/scikit-learn,aflaxman/scikit-learn,nrhine1/scikit-learn,vshtanko/scikit-learn,lazywei/scikit-learn,gclenaghan/scikit-learn,yask123/scikit-learn,schets/scikit-learn,huobaowangxi/scikit-learn,wzbozon/scikit-learn,adamgreenhall/scikit-learn,loli/sklearn-ensembletrees,zorojean/scikit-learn,altairpearl/scikit-learn,ilyes14/scikit-learn,Garrett-R/scikit-learn,tdhopper/scikit-learn,icdishb/scikit-learn,ndingwall/scikit-learn,joshloyal/scikit-learn,liberatorqjw/scikit-learn,btabibian/scikit-learn,IssamLaradji/scikit-learn,anurag313/scikit-learn,samuel1208/scikit-learn,xyguo/scikit-learn,Aasmi/scikit-learn,IshankGulati/scikit-learn,ky822/scikit-learn,ashhher3/scikit-learn,terkkila/scikit-learn,zorojean/scikit-learn,BiaDarkia/scikit-learn,phdowling/scikit-learn,hainm/scikit-learn,eickenberg/scikit-learn,cybernet14/scikit-learn,mlyundin/scikit-learn,plissonf/scikit-learn,Akshay0724/scikit-learn,meduz/scikit-learn,themrmax/scikit-learn,ClimbsRocks/scikit-learn,lenovor/scikit-learn,MechCoder/scikit-learn,mhue/scikit-learn,UNR-AERIAL/scikit-learn,xubenben/scikit-learn,ldirer/scikit-learn,h2educ/scikit-learn,JosmanPS/scikit-learn,shahankhatch/scikit-learn,tosolveit/scikit-learn,jorge2703/scikit-learn,mugizico/scikit-learn,xiaoxiamii/scikit-learn,shusenl/scikit-learn,IndraVikas/scikit-learn,Sentient07/scikit-learn,Aasmi/scikit-learn,ngoix/OCRF,mxjl620/scikit-learn,arahuja/scikit-learn,eg-zhang/scikit-learn,costypetrisor/scikit-learn,zorojean/scikit-learn,alvarofierroclavero/scikit-learn,dingocuster/scikit-learn,rsivapr/scikit-learn,frank-tancf/scikit-learn,NunoEdgarGub1/scikit-learn,ZenDevelopmentSystems/scikit-learn,glennq/scikit-learn,CVML/scikit-learn,lbishal/scikit-learn,nelson-liu/scikit-learn,zorroblue/scikit-learn,ashhher3/scikit-learn,imaculate/scikit-learn,larsmans/scikit-learn,kashif/scikit-learn,costypetrisor/scikit-learn,jakobworldpeace/scikit-learn,alvarofierroclavero/scikit-learn,ningchi/scikit-learn,gotomypc/scikit-learn,costypetrisor/scikit-learn,toastedcornflakes/scikit-learn,giorgiop/scikit-learn,Jimmy-Morzaria/scikit-learn,tawsifkhan/scikit-learn,theoryno3/scikit-learn,appapantula/scikit-learn,xiaoxiamii/scikit-learn,ominux/scikit-learn,YinongLong/scikit-learn,ankurankan/scikit-learn,hlin117/scikit-learn,sanketloke/scikit-learn,mugizico/scikit-learn,ogrisel/scikit-learn,rsivapr/scikit-learn,PatrickChrist/scikit-learn,ngoix/OCRF,terkkila/scikit-learn,sarahgrogan/scikit-learn,nmayorov/scikit-learn,sumspr/scikit-learn,r-mart/scikit-learn,fbagirov/scikit-learn,beepee14/scikit-learn,siutanwong/scikit-learn,cauchycui/scikit-learn,hsuantien/scikit-learn,theoryno3/scikit-learn,CVML/scikit-learn,jlegendary/scikit-learn,manhhomienbienthuy/scikit-learn,fabioticconi/scikit-learn,ningchi/scikit-learn,procoder317/scikit-learn,UNR-AERIAL/scikit-learn,roxyboy/scikit-learn,alexeyum/scikit-learn,aabadie/scikit-learn,smartscheduling/scikit-learn-categorical-tree,MartinSavc/scikit-learn,mlyundin/scikit-learn,cauchycui/scikit-learn,robin-lai/scikit-learn,shangwuhencc/scikit-learn,jblackburne/scikit-learn,harshaneelhg/scikit-learn,arjoly/scikit-learn,alexeyum/scikit-learn,Titan-C/scikit-learn,beepee14/scikit-learn,ChanderG/scikit-learn,dingocuster/scikit-learn,Jimmy-Morzaria/scikit-learn,NelisVerhoef/scikit-learn,wzbozon/scikit-learn,kaichogami/scikit-learn,sinhrks/scikit-learn,466152112/scikit-learn,justincassidy/scikit-learn,glemaitre/scikit-learn,heli522/scikit-learn,pkruskal/scikit-learn,ZENGXH/scikit-learn,wzbozon/scikit-learn,mugizico/scikit-learn,thilbern/scikit-learn,Djabbz/scikit-learn,Akshay0724/scikit-learn,mojoboss/scikit-learn,poryfly/scikit-learn,arabenjamin/scikit-learn,IssamLaradji/scikit-learn,akionakamura/scikit-learn,djgagne/scikit-learn,mjudsp/Tsallis,adamgreenhall/scikit-learn,TomDLT/scikit-learn,walterreade/scikit-learn,jereze/scikit-learn,carrillo/scikit-learn,henrykironde/scikit-learn,justincassidy/scikit-learn,massmutual/scikit-learn,JsNoNo/scikit-learn,belltailjp/scikit-learn,potash/scikit-learn,mikebenfield/scikit-learn,jlegendary/scikit-learn,samzhang111/scikit-learn,yonglehou/scikit-learn,madjelan/scikit-learn,hlin117/scikit-learn,manhhomienbienthuy/scikit-learn,sumspr/scikit-learn,yunfeilu/scikit-learn,ldirer/scikit-learn,bthirion/scikit-learn,smartscheduling/scikit-learn-categorical-tree,Jimmy-Morzaria/scikit-learn,mblondel/scikit-learn,billy-inn/scikit-learn,lenovor/scikit-learn,lesteve/scikit-learn,xzh86/scikit-learn,robin-lai/scikit-learn,Garrett-R/scikit-learn,jorge2703/scikit-learn,dhruv13J/scikit-learn,jmetzen/scikit-learn,nrhine1/scikit-learn,idlead/scikit-learn,HolgerPeters/scikit-learn,shusenl/scikit-learn,mblondel/scikit-learn,massmutual/scikit-learn,yonglehou/scikit-learn,JeanKossaifi/scikit-learn,nrhine1/scikit-learn,alexsavio/scikit-learn,rrohan/scikit-learn,russel1237/scikit-learn,olologin/scikit-learn,vibhorag/scikit-learn,qifeigit/scikit-learn,henridwyer/scikit-learn,devanshdalal/scikit-learn,trankmichael/scikit-learn,jzt5132/scikit-learn,waterponey/scikit-learn,spallavolu/scikit-learn,michigraber/scikit-learn,aewhatley/scikit-learn,rexshihaoren/scikit-learn,jorge2703/scikit-learn,voxlol/scikit-learn,zaxtax/scikit-learn,vibhorag/scikit-learn,depet/scikit-learn,vibhorag/scikit-learn,rsivapr/scikit-learn,shenzebang/scikit-learn,IssamLaradji/scikit-learn,Titan-C/scikit-learn,bikong2/scikit-learn,zihua/scikit-learn,rvraghav93/scikit-learn,q1ang/scikit-learn,kmike/scikit-learn,mjudsp/Tsallis,AIML/scikit-learn,kmike/scikit-learn,sergeyf/scikit-learn,RomainBrault/scikit-learn,walterreade/scikit-learn,ndingwall/scikit-learn,xzh86/scikit-learn,Srisai85/scikit-learn,chrsrds/scikit-learn,ky822/scikit-learn,gotomypc/scikit-learn,NelisVerhoef/scikit-learn,alexeyum/scikit-learn,lucidfrontier45/scikit-learn,mugizico/scikit-learn,Adai0808/scikit-learn,shikhardb/scikit-learn,pkruskal/scikit-learn,aminert/scikit-learn,bnaul/scikit-learn,Srisai85/scikit-learn,sgenoud/scikit-learn,h2educ/scikit-learn,nhejazi/scikit-learn,cauchycui/scikit-learn,ssaeger/scikit-learn,wlamond/scikit-learn | scikits/learn/__init__.py | scikits/learn/__init__.py | """
Machine Learning module in python
=================================
scikits.learn is a Python module integrating classique machine
learning algorithms in the tightly-nit world of scientific Python
packages (numpy, scipy, matplotlib).
It aims to provide simple and efficient solutions to learning problems
that are accessible to everybody and reusable in various contexts:
machine-learning as a versatile tool for science and engineering.
See http://scikit-learn.sourceforge.net for complete documentation.
"""
from .base import clone
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['cross_val', 'ball_tree', 'cluster', 'covariance', 'datasets',
'fastica', 'feature_extraction', 'feature_selection',
'gaussian_process', 'grid_search', 'hmm', 'lda', 'linear_model',
'metrics', 'mixture', 'naive_bayes', 'neighbors',
'pca', 'pipeline', 'preprocessing', 'qda', 'svm', 'test',
'clone', 'pls']
__version__ = '0.7.git'
| """
Machine Learning module in python
=================================
scikits.learn is a Python module integrating classique machine
learning algorithms in the tightly-nit world of scientific Python
packages (numpy, scipy, matplotlib).
It aims to provide simple and efficient solutions to learning problems
that are accessible to everybody and reusable in various contexts:
machine-learning as a versatile tool for science and engineering.
See http://scikit-learn.sourceforge.net for complete documentation.
"""
from .base import clone
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['cross_val', 'ball_tree', 'cluster', 'covariance', 'datasets',
'fastica', 'feature_extraction', 'feature_selection',
'gaussian_process', 'grid_search', 'hmm', 'lda', 'linear_model',
'metrics', 'mixture', 'naive_bayes', 'neighbors',
'pca', 'pipeline', 'preprocessing', 'qda', 'svm', 'test',
'clone']
__version__ = '0.7.git'
| bsd-3-clause | Python |
3d430b5505ce8307633eda3d01e92713a5abba32 | remove more word count stuff | nprapps/graeae,nprapps/graeae,nprapps/graeae,nprapps/graeae | scrapers/seamus/models.py | scrapers/seamus/models.py | from collections import OrderedDict
from dateutil import parser
from itertools import groupby
from pytz import timezone
from pyquery import PyQuery
from scrapers.homepage.models import ApiEntry
import os
class Story(ApiEntry):
"""
Represents a story in the Seamus API
"""
def __init__(self, element, run_time):
self.element = element
self.run_time = run_time
def serialize(self):
return OrderedDict([
('run_time', self.run_time),
('id', self.id),
('title', self.title),
('publication_date', self.publication_date),
('story_date', self.story_date),
('last_modified_date', self.last_modified_date),
('canonical_url', self.canonical_url),
('has_lead_art', self.has_lead_art),
('lead_art_provider', self.lead_art_provider),
('lead_art_url', self.lead_art_url),
])
def _parse_date(self, date_string):
parsed = parser.parse(date_string)
adjusted = parsed.astimezone(timezone('UTC')).replace(tzinfo=None)
return adjusted
@property
def id(self):
"""
Get the story ID
"""
return self.element.attr('id')
@property
def title(self):
"""
Get the title
"""
return self.element.children('title').text()
@property
def publication_date(self):
"""
Get the publication date
"""
return self._parse_date(self.element.children('pubDate').text())
@property
def story_date(self):
"""
Get the story date
"""
return self._parse_date(self.element.children('storyDate').text())
@property
def last_modified_date(self):
"""
Get the last modified date
"""
return self._parse_date(self.element.children('lastModifiedDate').text())
@property
def canonical_url(self):
"""
Get the canonical URL
"""
url = self.element.children('link[type="html"]').text()
if url.find('?') > -1:
return url[:url.find('?')]
else:
return url
| from collections import OrderedDict
from dateutil import parser
from itertools import groupby
from pytz import timezone
from pyquery import PyQuery
from scrapers.homepage.models import ApiEntry
import os
class Story(ApiEntry):
"""
Represents a story in the Seamus API
"""
def __init__(self, element, run_time):
self.element = element
self.run_time = run_time
def serialize(self):
return OrderedDict([
('run_time', self.run_time),
('id', self.id),
('title', self.title),
('publication_date', self.publication_date),
('story_date', self.story_date),
('last_modified_date', self.last_modified_date),
('canonical_url', self.canonical_url),
('has_lead_art', self.has_lead_art),
('lead_art_provider', self.lead_art_provider),
('lead_art_url', self.lead_art_url),
('word_count', self.word_count),
])
def _parse_date(self, date_string):
parsed = parser.parse(date_string)
adjusted = parsed.astimezone(timezone('UTC')).replace(tzinfo=None)
return adjusted
@property
def id(self):
"""
Get the story ID
"""
return self.element.attr('id')
@property
def title(self):
"""
Get the title
"""
return self.element.children('title').text()
@property
def publication_date(self):
"""
Get the publication date
"""
return self._parse_date(self.element.children('pubDate').text())
@property
def story_date(self):
"""
Get the story date
"""
return self._parse_date(self.element.children('storyDate').text())
@property
def last_modified_date(self):
"""
Get the last modified date
"""
return self._parse_date(self.element.children('lastModifiedDate').text())
@property
def canonical_url(self):
"""
Get the canonical URL
"""
url = self.element.children('link[type="html"]').text()
if url.find('?') > -1:
return url[:url.find('?')]
else:
return url
@property
def word_count(self):
html = self.element.find('textWithHtml').text()
text_parts = PyQuery(html).text().split(' ')
return len(text_parts)
| mit | Python |
6876ce61a2f324c8aa36ba7084b23e5180c14c2a | fix pica conversion thanks to Paul McNett | makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile | reportlab/lib/units.py | reportlab/lib/units.py | #!/bin/env python
#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/lib/units.py
__version__=''' $Id$ '''
inch = 72.0
cm = inch / 2.54
mm = cm * 0.1
pica = 12.0
def toLength(s):
'''convert a string to a length'''
try:
if s[-2:]=='cm': return float(s[:-2])*cm
if s[-2:]=='in': return float(s[:-2])*inch
if s[-2:]=='pt': return float(s[:-2])
if s[-1:]=='i': return float(s[:-1])*inch
if s[-2:]=='mm': return float(s[:-2])*mm
if s[-4:]=='pica': return float(s[:-4])*pica
return float(s)
except:
raise ValueError, "Can't convert '%s' to length" % s
| #!/bin/env python
#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/lib/units.py
__version__=''' $Id$ '''
inch = 72.0
cm = inch / 2.54
mm = cm * 0.1
pica = 12.0
def toLength(s):
'''convert a string to a length'''
try:
if s[-2:]=='cm': return float(s[:-2])*cm
if s[-2:]=='in': return float(s[:-2])*inch
if s[-2:]=='pt': return float(s[:-2])
if s[-1:]=='i': return float(s[:-1])*inch
if s[-2:]=='mm': return float(s[:-2])*mm
if s[-4:]=='pica': return float(s[:-2])*pica
return float(s)
except:
raise ValueError, "Can't convert '%s' to length" % s | bsd-3-clause | Python |
cddccbdf7eb8f329888e58d470f6ad5303b60429 | fix line parks | evenly-epic-mule/Monocle,ZeChrales/Monocle,evenly-epic-mule/Monocle,ZeChrales/Monocle,evenly-epic-mule/Monocle,ZeChrales/Monocle | raidex.py | raidex.py | #!/usr/bin/env python3
from datetime import datetime
from pkg_resources import resource_filename
try:
from ujson import dumps
from flask import json as flask_json
flask_json.dumps = lambda obj, **kwargs: dumps(obj, double_precision=6)
except ImportError:
from json import dumps
from flask import Flask, jsonify, Markup, render_template, request
from monocle import db, sanitized as conf
from monocle.web_utils import *
from monocle.bounds import area, center
from shapely.geometry import Polygon, Point, LineString
app = Flask(__name__, template_folder=resource_filename('monocle', 'templates'), static_folder=resource_filename('monocle', 'static'))
def render_map():
template = app.jinja_env.get_template('raidex.html')
return template.render(
area_name=conf.AREA_NAME,
map_center=center,
map_provider_url=conf.MAP_PROVIDER_URL,
map_provider_attribution=conf.MAP_PROVIDER_ATTRIBUTION
)
@app.route('/')
def fullmap(map_html=render_map()):
return map_html
@app.route('/gym_data')
def gym_data():
gyms = []
parks = get_all_parks()
for g in get_gym_markers():
for p in parks:
coords = p['coords']
if len(coords) == 2:
if LineString(coords).within(Point(g['lat'], g['lon'])):
gyms.append(g)
else if len(coords) > 2:
if Polygon(coords).contains(Point(g['lat'], g['lon'])):
gyms.append(g)
return jsonify(gyms)
@app.route('/parks')
def parks():
return jsonify(get_all_parks())
@app.route('/cells')
def cells():
return jsonify(get_s2_cells())
@app.route('/scan_coords')
def scan_coords():
return jsonify(get_scan_coords())
def main():
args = get_args()
app.run(debug=args.debug, threaded=True, host=args.host, port=args.port)
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
from datetime import datetime
from pkg_resources import resource_filename
try:
from ujson import dumps
from flask import json as flask_json
flask_json.dumps = lambda obj, **kwargs: dumps(obj, double_precision=6)
except ImportError:
from json import dumps
from flask import Flask, jsonify, Markup, render_template, request
from monocle import db, sanitized as conf
from monocle.web_utils import *
from monocle.bounds import area, center
from shapely.geometry import Polygon, Point
app = Flask(__name__, template_folder=resource_filename('monocle', 'templates'), static_folder=resource_filename('monocle', 'static'))
def render_map():
template = app.jinja_env.get_template('raidex.html')
return template.render(
area_name=conf.AREA_NAME,
map_center=center,
map_provider_url=conf.MAP_PROVIDER_URL,
map_provider_attribution=conf.MAP_PROVIDER_ATTRIBUTION
)
@app.route('/')
def fullmap(map_html=render_map()):
return map_html
@app.route('/gym_data')
def gym_data():
gyms = []
parks = get_all_parks()
for g in get_gym_markers():
for p in parks:
if Polygon(p['coords']).contains(Point(g['lat'], g['lon'])):
gyms.append(g)
return jsonify(gyms)
@app.route('/parks')
def parks():
return jsonify(get_all_parks())
@app.route('/cells')
def cells():
return jsonify(get_s2_cells())
@app.route('/scan_coords')
def scan_coords():
return jsonify(get_scan_coords())
def main():
args = get_args()
app.run(debug=args.debug, threaded=True, host=args.host, port=args.port)
if __name__ == '__main__':
main()
| mit | Python |
9e7fe9e2a1ac8d8b7651d4ce9859427844c3c988 | Revert "Add support for setting shard_index instead of shard_num" | berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud | solr/run_solr_shard.py | solr/run_solr_shard.py | #!/usr/bin/env python
import argparse
from mc_solr.constants import *
from mc_solr.solr import run_solr_shard
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Install Solr and start a shard.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-n", "--shard_num", type=int, required=True,
help="Shard to start.")
parser.add_argument("-c", "--shard_count", type=int, required=True,
help="Number of shards across the whole cluster.")
parser.add_argument("-zh", "--zookeeper_host", type=str, required=False, default=MC_SOLR_CLUSTER_ZOOKEEPER_HOST,
help="ZooKeeper host to connect to.")
parser.add_argument("-zp", "--zookeeper_port", type=int, required=False, default=MC_SOLR_CLUSTER_ZOOKEEPER_PORT,
help="ZooKeeper port to connect to.")
parser.add_argument("-mx", "--jvm_heap_size", type=str, required=False, default=MC_SOLR_CLUSTER_JVM_HEAP_SIZE,
help="JVM heap size (-Xmx).")
args = parser.parse_args()
run_solr_shard(shard_num=args.shard_num,
shard_count=args.shard_count,
zookeeper_host=args.zookeeper_host,
zookeeper_port=args.zookeeper_port,
jvm_heap_size=args.jvm_heap_size)
| #!/usr/bin/env python
import argparse
from mc_solr.constants import *
from mc_solr.solr import run_solr_shard
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Install Solr and start a shard.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
shard_group = parser.add_mutually_exclusive_group(required=True)
# Shard number for humans (1, 2, 3, ...)
shard_group.add_argument("-n", "--shard_num", type=int, help="Shard number (starts with 1).")
# Shard index for Supervisor (0, 1, 2, ...)
shard_group.add_argument("-i", "--shard_index", type=int, help="Shard index (starts with 0).")
parser.add_argument("-c", "--shard_count", type=int, required=True,
help="Number of shards across the whole cluster.")
parser.add_argument("-zh", "--zookeeper_host", type=str, required=False, default=MC_SOLR_CLUSTER_ZOOKEEPER_HOST,
help="ZooKeeper host to connect to.")
parser.add_argument("-zp", "--zookeeper_port", type=int, required=False, default=MC_SOLR_CLUSTER_ZOOKEEPER_PORT,
help="ZooKeeper port to connect to.")
parser.add_argument("-mx", "--jvm_heap_size", type=str, required=False, default=MC_SOLR_CLUSTER_JVM_HEAP_SIZE,
help="JVM heap size (-Xmx).")
args = parser.parse_args()
shard_num = args.shard_num
if shard_num is None:
shard_num = args.shard_index + 1
run_solr_shard(shard_num=shard_num,
shard_count=args.shard_count,
zookeeper_host=args.zookeeper_host,
zookeeper_port=args.zookeeper_port,
jvm_heap_size=args.jvm_heap_size)
| agpl-3.0 | Python |
48e682e267f45a731e90a60ce9798272f7a9c24c | Add supportted Mbed platform | 0xc0170/pyOCD,0xc0170/pyOCD,mbedmicro/pyOCD,tgarc/pyOCD,wjzhang/pyOCD,c1728p9/pyOCD,flit/pyOCD,matthewelse/pyOCD,molejar/pyOCD,tgarc/pyOCD,devanlai/pyOCD,devanlai/pyOCD,oliviermartin/pyOCD,geky/pyOCD,mesheven/pyOCD,devanlai/pyOCD,adamgreen/pyOCD,NordicSemiconductor/pyOCD,geky/pyOCDgdb,tgarc/pyOCD,flit/pyOCD,bridadan/pyOCD,0xc0170/pyOCD,pyocd/pyOCD,c1728p9/pyOCD,adamgreen/pyOCD,mesheven/pyOCD,oliviermartin/pyOCD,c1728p9/pyOCD,molejar/pyOCD,oliviermartin/pyOCD,pyocd/pyOCD,wjzhang/pyOCD,wjzhang/pyOCD,bridadan/pyOCD,molejar/pyOCD,mbedmicro/pyOCD,matthewelse/pyOCD,geky/pyOCD,bridadan/pyOCD,mbedmicro/pyOCD,adamgreen/pyOCD,geky/pyDAPLink,matthewelse/pyOCD,mesheven/pyOCD | test/gdb_server.py | test/gdb_server.py | """
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import traceback
import pyOCD.board.mbed_board
from pyOCD.gdbserver import GDBServer
from pyOCD.board import MbedBoard
from optparse import OptionParser
from optparse import OptionGroup
LEVELS={'debug':logging.DEBUG,
'info':logging.INFO,
'warning':logging.WARNING,
'error':logging.ERROR,
'critical':logging.CRITICAL
}
print "Welcome to the PyOCD GDB Server Beta Version "
supportted_list = ''
for (k,v) in pyOCD.board.mbed_board.TARGET_TYPE.items():
supportted_list += v + ' '
parser = OptionParser()
group = OptionGroup(parser, "Supportted Mbed Platform",supportted_list )
parser.add_option_group(group)
parser.add_option("-p", "--port", dest = "port_number", default = 3333, help = "Write the port number that GDB server will open")
parser.add_option("-b", "--board", dest = "board_id", default = None, help = "Write the board id you want to connect")
parser.add_option("-l", "--list", action = "store_true", dest = "list_all", default = False, help = "List all the connected board")
parser.add_option("-d", "--debug", dest = "debug_level", default = 'info', help = "Set the level of system logging output, the available value for DEBUG_LEVEL: debug, info, warning, error, critical" )
(option, args) = parser.parse_args()
gdb = None
level = LEVELS.get(option.debug_level, logging.NOTSET)
logging.basicConfig(level=level)
if option.list_all == True:
MbedBoard.listConnectedBoards()
else:
try:
board_selected = MbedBoard.chooseBoard(board_id = option.board_id)
if board_selected != None:
try:
gdb = GDBServer(board_selected, int(option.port_number))
while gdb.isAlive():
gdb.join(timeout = 0.5)
except ValueError:
logging.error("Port number error!")
except KeyboardInterrupt:
if gdb != None:
gdb.stop()
except Exception as e:
print "uncaught exception: %s" % e
traceback.print_exc()
if gdb != None:
gdb.stop()
| """
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import traceback
from pyOCD.gdbserver import GDBServer
from pyOCD.board import MbedBoard
from optparse import OptionParser
LEVELS={'debug':logging.DEBUG,
'info':logging.INFO,
'warning':logging.WARNING,
'error':logging.ERROR,
'critical':logging.CRITICAL
}
print "Welcome to the PyOCD GDB Server Beta Version "
parser = OptionParser()
parser.add_option("-p", "--port", dest = "port_number", default = 3333, help = "Write the port number that GDB server will open")
parser.add_option("-b", "--board", dest = "board_id", default = None, help = "Write the board id you want to connect")
parser.add_option("-l", "--list", action = "store_true", dest = "list_all", default = False, help = "List all the connected board")
parser.add_option("-d", "--debug", dest = "debug_level", default = 'info', help = "Set the level of system logging output, the available value for DEBUG_LEVEL: debug, info, warning, error, critical" )
(option, args) = parser.parse_args()
gdb = None
level = LEVELS.get(option.debug_level, logging.NOTSET)
logging.basicConfig(level=level)
if option.list_all == True:
MbedBoard.listConnectedBoards()
else:
try:
board_selected = MbedBoard.chooseBoard(board_id = option.board_id)
if board_selected != None:
try:
gdb = GDBServer(board_selected, int(option.port_number))
while gdb.isAlive():
gdb.join(timeout = 0.5)
except ValueError:
logging.error("Port number error!")
except KeyboardInterrupt:
if gdb != None:
gdb.stop()
except Exception as e:
print "uncaught exception: %s" % e
traceback.print_exc()
if gdb != None:
gdb.stop()
| apache-2.0 | Python |
5feebab1580ef511cefd149dc841d2f5802d06cb | add possibility of get context from terminal | buxx/synergine | synergine/core/connection/Terminal.py | synergine/core/connection/Terminal.py | from synergine.core.exception.NotFoundError import NotFoundError
class Terminal():
"""
Obj who receive synergine data at each cycle
"""
_name = None
@classmethod
def get_name(cls):
if not cls._name:
raise Exception("Terminal must be named")
return cls._name
def __init__(self, config, context, synergy_manager):
"""
:param config: ConfigurationManager
:return: void
"""
self._encapsuled_run = False
self._config = config
self._context = context
self._synergy_manager = synergy_manager
def _get_config(self, config_name, default=None):
try:
return self._config.get('terminal.'+self.get_name()+'.'+config_name)
except NotFoundError:
pass
try:
return self._config.get('terminal.__default__.'+config_name)
except NotFoundError:
pass
try:
return self._config.get(config_name)
except NotFoundError:
pass
if default is not None:
return default
raise NotFoundError("Can't found config ", config_name)
def encapsulate_run(self, run_function):
self._encapsuled_run = True
def initialize(self):
pass
def have_encapsulated_run(self):
return self._encapsuled_run
def need_to_run_core(self):
return False
def start_of_cycle(self):
pass
def end_of_cycle(self):
pass
def initialize_screen(self, screen):
pass
def receive(self, actions_done):
pass
def terminate(self):
pass
def get_context(self):
return self._context
| from synergine.core.exception.NotFoundError import NotFoundError
class Terminal():
"""
Obj who receive synergine data at each cycle
"""
_name = None
@classmethod
def get_name(cls):
if not cls._name:
raise Exception("Terminal must be named")
return cls._name
def __init__(self, config, context, synergy_manager):
"""
:param config: ConfigurationManager
:return: void
"""
self._encapsuled_run = False
self._config = config
self._context = context
self._synergy_manager = synergy_manager
def _get_config(self, config_name, default=None):
try:
return self._config.get('terminal.'+self.get_name()+'.'+config_name)
except NotFoundError:
pass
try:
return self._config.get('terminal.__default__.'+config_name)
except NotFoundError:
pass
try:
return self._config.get(config_name)
except NotFoundError:
pass
if default is not None:
return default
raise NotFoundError("Can't found config ", config_name)
def encapsulate_run(self, run_function):
self._encapsuled_run = True
def initialize(self):
pass
def have_encapsulated_run(self):
return self._encapsuled_run
def need_to_run_core(self):
return False
def start_of_cycle(self):
pass
def end_of_cycle(self):
pass
def initialize_screen(self, screen):
pass
def receive(self, actions_done):
pass
def terminate(self):
pass | apache-2.0 | Python |
1469bba5c87c6bd6c66eb87a695579f032806353 | Update data_import.py | anapophenic/knb | data_import.py | data_import.py | import scipy.io
import numpy as np
from collections import Counter
def data_prep(filename,format='explicit'):
"""
Main function for importing triples from raw INTACT DNA methylation data
Inputs:
filename: filename for INTACT DNA methylation data
format: whether to return the data formatted for explicit feature map
or for kernel feature map
Otputs:
N: number of maximum number of coverage
X_importance_weighted: a dictionary of
key: triples (x_1, x_2, x_3)
value: total number of cooccurrence of (x_1, x_2, x_3) (importance weight)
a: correction term \E[1/(n+2)] used in explicit feature map
"""
mat = scipy.io.loadmat(filename)
#print mat
#print mat['mc']
#print mat['h']
#print mat['bins']
coverage = mat['h']
methylated = mat['mc']
N = np.amax(coverage)
print np.shape(coverage)
# merging all the contexts with cytosine(C) at this point:
coverage = np.sum(coverage, axis=1);
methylated = np.sum(methylated, axis=1);
# preparing data
l = np.shape(coverage)[0]
print 'l = '
print l
X0 = coverage * (N+1) + methylated
# compute E[1/(n+2)]
a = sum(1.0 / (coverage+2)) / l
#X0 = np.zeros(l)
#for i in range(l):
# X0[i] = coverage[i]*(N+1) + methylated[i]
if format=='explicit':
X_zipped = zip(X0[0:l-2], X0[1:l-1], X0[2:l])
X_importance_weighted = Counter( X_zipped )
return N, X_importance_weighted, a
else:
X = np.hstack((X0[0:l-2],X0[1:l-1],X0[2:l]))
return N, X, a
if __name__ == '__main__':
filename = 'Data_Intact/cndd/emukamel/HMM/Data/Binned/allc_AM_E1_chrY_binsize100.mat'
N, X, a = data_prep(filename);
print N
print X
print a
| import scipy.io
import numpy as np
from collections import Counter
def data_prep(filename):
"""
Main function for importing triples from raw INTACT DNA methylation data
Inputs:
filename: filename for INTACT DNA methylation data
Otputs:
N: number of maximum number of coverage
X_importance_weighted: a dictionary of
key: triples (x_1, x_2, x_3)
value: total number of cooccurrence of (x_1, x_2, x_3) (importance weight)
a: correction term \E[1/(n+2)] used in explicit feature map
"""
mat = scipy.io.loadmat(filename)
#print mat
#print mat['mc']
#print mat['h']
#print mat['bins']
coverage = mat['h']
methylated = mat['mc']
N = np.amax(coverage)
print np.shape(coverage)
# merging all the contexts with cytosine(C) at this point:
coverage = np.sum(coverage, axis=1);
methylated = np.sum(methylated, axis=1);
# preparing data
l = np.shape(coverage)[0]
print 'l = '
print l
X0 = coverage * (N+1) + methylated
# compute E[1/(n+2)]
a = sum(1.0 / (coverage+2)) / l
#X0 = np.zeros(l)
#for i in range(l):
# X0[i] = coverage[i]*(N+1) + methylated[i]
X_zipped = zip(X0[0:l-2], X0[1:l-1], X0[2:l])
X_importance_weighted = Counter( X_zipped )
return N, X_importance_weighted, a
if __name__ == '__main__':
filename = 'Data_Intact/cndd/emukamel/HMM/Data/Binned/allc_AM_E1_chrY_binsize100.mat'
N, X, a = data_prep(filename);
print N
print X
print a
| cc0-1.0 | Python |
bff5fc6f8266d4a788cd2514de806564c95d9440 | add removal of empty directories | dan-blanchard/conda-build,sandhujasmine/conda-build,mwcraig/conda-build,shastings517/conda-build,frol/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,ilastik/conda-build,rmcgibbo/conda-build,shastings517/conda-build,ilastik/conda-build,sandhujasmine/conda-build,ilastik/conda-build,sandhujasmine/conda-build,frol/conda-build,rmcgibbo/conda-build,shastings517/conda-build,dan-blanchard/conda-build,frol/conda-build,mwcraig/conda-build,rmcgibbo/conda-build | conda_build/noarch.py | conda_build/noarch.py | import os
from os.path import dirname, isdir, join
from conda_build.config import config
BASH_HEAD = '''\
#!/bin/bash
SP_DIR=$($PREFIX/bin/python -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
#echo "SP_DIR='$SP_DIR'"
'''
def handle_file(f):
path = join(config.build_prefix, f)
if f.endswith(('.egg-info', '.pyc')):
os.unlink(path)
return None
if 'site-packages' in f:
nsp = join(config.build_prefix, 'site-packages')
if not isdir(nsp):
os.mkdir(nsp)
g = f[f.find('site-packages'):]
dst = join(config.build_prefix, g)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.mkdir(dst_dir)
os.rename(path, dst)
return g
def transform(m, files):
f1 = open(join(config.build_prefix,
'bin/.%s-pre-link.sh' % m.name()), 'w')
f1.write('''
cp $SOURCE_DIR/bin/.%s-pre-unlink.sh $PREFIX/bin
''' % m.name())
f1.write(BASH_HEAD)
f2 = open(join(config.build_prefix,
'bin/.%s-pre-unlink.sh' % m.name()), 'w')
f2.write(BASH_HEAD)
dirs = set()
for f in files:
g = handle_file(f)
if g is None:
continue
if g.startswith('site-packages/'):
g = g[14:]
dirs.add(dirname(g))
f1.write('''
mkdir -p $SP_DIR/%s
rm -f $SP_DIR/%s
ln $SOURCE_DIR/site-packages/%s $SP_DIR/%s
''' % (dirname(g), g, g, g))
f2.write('rm -f $SP_DIR/%s*\n' % g)
f1.close()
for d in sorted(dirs, key=len, reverse=True):
f2.write('rmdir $SP_DIR/%s\n' % d)
f2.close()
| import os
from os.path import dirname, isdir, join
from conda_build.config import config
BASH_HEAD = '''\
#!/bin/bash
SP_DIR=$($PREFIX/bin/python -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
#echo "SP_DIR='$SP_DIR'"
'''
def handle_file(f):
path = join(config.build_prefix, f)
if f.endswith(('.egg-info', '.pyc')):
os.unlink(path)
return None
if 'site-packages' in f:
nsp = join(config.build_prefix, 'site-packages')
if not isdir(nsp):
os.mkdir(nsp)
g = f[f.find('site-packages'):]
dst = join(config.build_prefix, g)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.mkdir(dst_dir)
os.rename(path, dst)
return g
def transform(m, files):
f1 = open(join(config.build_prefix,
'bin/.%s-pre-link.sh' % m.name()), 'w')
f1.write('''
cp $SOURCE_DIR/bin/.%s-pre-unlink.sh $PREFIX/bin
''' % m.name())
f1.write(BASH_HEAD)
f2 = open(join(config.build_prefix,
'bin/.%s-pre-unlink.sh' % m.name()), 'w')
f2.write(BASH_HEAD)
for f in files:
print f
g = handle_file(f)
if g is None:
continue
if g.startswith('site-packages/'):
g = g[14:]
f1.write('''
mkdir -p $SP_DIR/%s
rm -f $SP_DIR/%s
ln $SOURCE_DIR/site-packages/%s $SP_DIR/%s
''' % (dirname(g), g, g, g))
f2.write('''\
rm -f $SP_DIR/%s*
''' % g)
f1.close()
f2.close()
| bsd-3-clause | Python |
667955e3376357ac987d3fbf40de6151ba0a980c | implement client.init | mlsteele/one-time-chat,mlsteele/one-time-chat,mlsteele/one-time-chat | client/client.py | client/client.py | import requests
import sys
class OTC_Client(object):
def __init__(self,server_address,pad):
self.encrypt_index = 0
self.pad = pad
self.decrypt_index = len(pad)
self.connect(server_address)
raise NotImplementedError("TODO: write a client")
def send(self,message,target):
payload = {'message':message,'target':target}
r = requets.post(self.server_address,data=payload)
raise NotImplementedError("TODO: write send method")
def recieve(self):
raise NotImplementedError("TODO: write recieve")
def decrypt(self,message, pad_index):
## TODO: give ability of default index
raise NotImplementedError("TODO: clients need to decrypt messages")
def encrypt(self,encrypt, pad_index):
raise NotImplementedError("TODO: clients need to encrypt messages")
def connect(self,server_address):
self.server_address = server_address
raise NotImplementedError("TODO:clients need to be able to connect to server")
if __name__ == "__main__":
if (len(sys.argv)<3):
print "ERROR: correct usage is client.py [server_address] [pad_file]"
server_address = sys.argv[1]
pad_file = sys.argv[2]
client = OTC_Client(server_address,pad_file)
| class OTC_Client(object):
def __init__(self):
raise NotImplementedError("TODO: write a client")
def send(self,message):
payload = {'message':message}
r = requets.post(self.server_address,data=payload)
raise NotImplementedError("TODO: write send method")
def recieve(self):
raise NotImplementedError("TODO: write recieve")
def decrypt(self,message, pad_index=current_index):
raise NotImplementedError("TODO: clients need to decrypt messages")
def encrypt(self,encrypt):
raise NotImplementedError("TODO: clients need to encrypt messages")
def connect(self,server_address):
self.server_address = server_address
raise NotImplementedError("TODO:clients need to be able to connect to server")
if __name__ == "__main__":
client = OTC_Client()
| mit | Python |
b88dbecd04072f2215347d10ee5135428cce552c | add pymatgen local_env strategy for all bonds below cutoff | materialsvirtuallab/megnet,materialsvirtuallab/megnet,materialsvirtuallab/megnet,materialsvirtuallab/megnet,materialsvirtuallab/megnet | megnet/data/local_env.py | megnet/data/local_env.py | from pymatgen.analysis.local_env import MinimumDistanceNN
class MinimumDistanceNNAll(MinimumDistanceNN):
"""
Determine bonded sites by fixed cutoff
Args:.
cutoff (float): cutoff radius in Angstrom to look for trial
near-neighbor sites (default: 4.0).
"""
def __init__(self, cutoff=4.0):
self.cutoff = cutoff
def get_nn_info(self, structure, n):
"""
Get all near-neighbor sites as well as the associated image locations
and weights of the site with index n using the closest neighbor
distance-based method.
Args:
structure (Structure): input structure.
n (integer): index of site for which to determine near
neighbors.
Returns:
siw (list of tuples (Site, array, float)): tuples, each one
of which represents a neighbor site, its image location,
and its weight.
"""
site = structure[n]
neighs_dists = structure.get_neighbors(site, self.cutoff)
siw = []
for s, dist in neighs_dists:
w = dist
siw.append({'site': s,
'image': self._get_image(structure, s),
'weight': w,
'site_index': self._get_original_site(structure, s)})
return siw
| bsd-3-clause | Python | |
f9a081af27c45c13b1763d25b9da32f32c4d3b00 | Add checks to verify that functions imported from saved model contain the attribute "tf._original_func_name". | Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow | tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/control_flow_upgrade_legacy_v1.py | tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/control_flow_upgrade_legacy_v1.py | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# RUN: %p/control_flow_upgrade_legacy_v1 | FileCheck %s
# pylint: disable=missing-docstring,line-too-long
import tensorflow.compat.v1 as tf
from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common_v1
from tensorflow.python.ops import control_flow_ops
# Tests V1 control flow is functionalized.
# CHECK-NOT: tf_executor.Switch
# CHECK-NOT: tf_executor.Merge
# CHECK: "tf.If"
# CHECK-SAME: else_branch = @"key/[[else:[a-zA-Z_0-9]+]]"
# CHECK-SAME: then_branch = @"key/[[then:[a-zA-Z_0-9]+]]"
# CHECK: func private @"key/[[else]]"(
# CHECK-SAME: tf._original_func_name
# CHECK: func private @"key/[[then]]"(
# CHECK-SAME: tf._original_func_name
def Test():
data = tf.constant([1, 2, 3, 4, 5, 6])
# Create placeholders to prevent constant folding.
x_op = tf.placeholder(dtype=tf.int32)
y_op = tf.placeholder(dtype=tf.int32)
less_op = tf.less(x_op, y_op)
switch_op = control_flow_ops.switch(data, less_op)
merge_op = control_flow_ops.merge(switch_op)[0]
result = tf.transpose(merge_op)
tensor_info_result = tf.compat.v1.saved_model.utils.build_tensor_info(result)
signature_def = tf.saved_model.signature_def_utils.build_signature_def(
inputs=None,
outputs={'result': tensor_info_result},
method_name='some_function')
return {'key': signature_def}, None, None
if __name__ == '__main__':
common_v1.set_tf_options()
common_v1.do_test(Test)
| # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# RUN: %p/control_flow_upgrade_legacy_v1 | FileCheck %s
# pylint: disable=missing-docstring,line-too-long
import tensorflow.compat.v1 as tf
from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common_v1
from tensorflow.python.ops import control_flow_ops
# Tests V1 control flow is functionalized.
# CHECK-NOT: tf_executor.Switch
# CHECK-NOT: tf_executor.Merge
# CHECK: "tf.If"
# CHECK-SAME: else_branch = @"key/[[else:[a-zA-Z_0-9]+]]"
# CHECK-SAME: then_branch = @"key/[[then:[a-zA-Z_0-9]+]]"
# CHECK: func private @"key/[[else]]"(
# CHECK: func private @"key/[[then]]"(
def Test():
data = tf.constant([1, 2, 3, 4, 5, 6])
# Create placeholders to prevent constant folding.
x_op = tf.placeholder(dtype=tf.int32)
y_op = tf.placeholder(dtype=tf.int32)
less_op = tf.less(x_op, y_op)
switch_op = control_flow_ops.switch(data, less_op)
merge_op = control_flow_ops.merge(switch_op)[0]
result = tf.transpose(merge_op)
tensor_info_result = tf.compat.v1.saved_model.utils.build_tensor_info(result)
signature_def = tf.saved_model.signature_def_utils.build_signature_def(
inputs=None,
outputs={'result': tensor_info_result},
method_name='some_function')
return {'key': signature_def}, None, None
if __name__ == '__main__':
common_v1.set_tf_options()
common_v1.do_test(Test)
| apache-2.0 | Python |
837b63918c29c1cd45a2a0daf8e6ff6e3b28bfb7 | Fix typo breaking the TS6 feature. | merc-devel/merc | merc/features/ts6/sid.py | merc/features/ts6/sid.py | from merc import errors
from merc import feature
from merc import message
from merc import util
class SidFeature(feature.Feature):
NAME = __name__
install = SidFeature.install
@SidFeature.register_server_command
class Sid(message.Command):
NAME = "SID"
MIN_ARITY = 4
def __init__(self, server_name, hopcount, sid, description, *args):
self.server_name = server_name
self.hopcount = hopcount
self.sid = sid
self.description = description
def as_command_params(self):
return [self.server_name, self.hopcount, self.sid, self.description]
def handle_for(self, app, server, prefix):
# TODO: handle me!
pass
@SidFeature.hook("network.burst.sid")
def burst_sids(app, server):
for source, target in app.network.all_links():
server.send(source.sid, Sid(target.name, "1", target.sid,
target.description))
| from merc import errors
from merc import feature
from merc import message
from merc import util
class SidFeature(feature.Feature):
NAME = __name__
install = SidFeature.install
@SidFeature.register_server_command
class Sid(message.Command):
NAME = "SID"
MIN_ARITY = 4
def __init__(self, server_name, hopcount, sid, description, *args):
self.server_name = server_name
self.hopcount = hopcount
self.sid = sid
self.description = description
def as_command_params(self):
return [self.server_name, self.hopcount, self.sid, self.description]
def handle_for(self, app, server, prefix):
# TODO: handle me!
@SidFeature.hook("network.burst.sid")
def burst_sids(app, server):
for source, target in app.network.all_links():
server.send(source.sid, Sid(target.name, "1", target.sid,
target.description))
| mit | Python |
6340c57f3676e0fe05795315d4321c15a61c8533 | Update version.py | VUIIS/dax,VUIIS/dax | dax/version.py | dax/version.py | VERSION = '1.0.0b3'
| VERSION = '1.0.0b2'
| mit | Python |
107e8103ddfefe7a801509a1079642ae8fa1221c | fix names of network and subnetworks | CiscoSystems/tempest,cisco-openstack/tempest,cisco-openstack/tempest,CiscoSystems/tempest | tempest/scenario/test_network_ipv6.py | tempest/scenario/test_network_ipv6.py | # Copyright 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.scenario import manager
from tempest import test
class TestNetworkIPv6(manager.NetworkScenarioTest):
"""This smoke test suite has the same assumption as TestNetworkBasicOps
In addition, here we assume that network part of the cloud is configured
either in IPv6 mode or in dual-stack mode.
"""
_ip_version = 6
network_resources = {'network': False, 'router': False, 'subnet': False,
'dhcp': False}
@test.services('network')
def test_large_prefix(self):
import netaddr
net = self ._create_network(tenant_id=self.tenant_id,
namestart='net-125-126')
for bits in [125, 126]:
sub = self._create_subnet(network=net,
namestart='subnet-{0}'.format(bits),
net_max_bits=bits)
start = netaddr.IPAddress(sub.allocation_pools[0]['start'])
end = netaddr.IPAddress(sub.allocation_pools[0]['end'])
n_addresses = end.value - start.value + 1
self.assertEqual(expected=pow(2, 128 - bits)-3,
observed=n_addresses) | # Copyright 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.scenario import manager
from tempest import test
class TestNetworkIPv6(manager.NetworkScenarioTest):
"""This smoke test suite has the same assumption as TestNetworkBasicOps
In addition, here we assume that network part of the cloud is configured
either in IPv6 mode or in dual-stack mode.
"""
_ip_version = 6
network_resources = {'network': False, 'router': False, 'subnet': False,
'dhcp': False}
@test.services('network')
def test_large_prefix(self):
import netaddr
net = self ._create_network(tenant_id=self.tenant_id,
namestart='net-125')
for bits in [125, 126]:
sub = self._create_subnet(network=net, namestart='subnet-125',
net_max_bits=bits)
start = netaddr.IPAddress(sub.allocation_pools[0]['start'])
end = netaddr.IPAddress(sub.allocation_pools[0]['end'])
n_addresses = end.value - start.value + 1
self.assertEqual(expected=pow(2, 128 - bits)-3,
observed=n_addresses) | apache-2.0 | Python |
927b851b061c4fcbb6a1798bec27c38f6fc14550 | Add todo | robot-tools/iconograph,robot-tools/iconograph,robot-tools/iconograph,robot-tools/iconograph | client/update_grub.py | client/update_grub.py | #!/usr/bin/python3
import os
import re
import string
import subprocess
import tempfile
class GrubUpdater(object):
_VOLUME_ID_REGEX = re.compile(b'^Volume id: (?P<volume_id>.+)$', re.MULTILINE)
_HOTKEYS = string.digits + string.ascii_letters
def __init__(self, image_dir, boot_dir):
self._image_dir = image_dir
self._boot_dir = boot_dir
assert self._image_dir.startswith(self._boot_dir)
self._image_path = '/' + os.path.relpath(self._image_dir, self._boot_dir)
def _GetVolumeID(self, path):
isoinfo = subprocess.check_output([
'isoinfo',
'-d',
'-i', path,
])
match = self._VOLUME_ID_REGEX.search(isoinfo)
return match.group('volume_id').decode('ascii')
def Update(self):
grub_dir = os.path.join(self._boot_dir, 'grub')
# TODO: clean up if we fail here
with tempfile.NamedTemporaryFile('w', dir=grub_dir, delete=False) as fh:
current = os.readlink(os.path.join(self._image_dir, 'current'))
fh.write("""
set timeout=5
set default=%(default_image_filename)s
""" % {
'default_image_filename': os.path.basename(current),
})
files = []
for filename in os.listdir(self._image_dir):
if not filename.endswith('.iso'):
continue
files.append(filename)
for i, filename in enumerate(sorted(files, reverse=True)):
fh.write("""
menuentry "%(image_filename)s (%(volume_id)s)" --hotkey=%(hotkey)s {
search --no-floppy --file --set=root %(image_path)s/%(image_filename)s
iso_path="%(image_path)s/%(image_filename)s"
export iso_path
loopback loop "%(image_path)s/%(image_filename)s"
set root=(loop)
configfile /boot/grub/loopback.cfg
}
""" % {
'image_filename': filename,
'image_path': self._image_path,
'hotkey': self._HOTKEYS[i],
'volume_id': self._GetVolumeID(os.path.join(self._image_dir, filename)),
})
fh.flush()
os.rename(fh.name, os.path.join(grub_dir, 'grub.cfg'))
| #!/usr/bin/python3
import os
import re
import string
import subprocess
import tempfile
class GrubUpdater(object):
_VOLUME_ID_REGEX = re.compile(b'^Volume id: (?P<volume_id>.+)$', re.MULTILINE)
_HOTKEYS = string.digits + string.ascii_letters
def __init__(self, image_dir, boot_dir):
self._image_dir = image_dir
self._boot_dir = boot_dir
assert self._image_dir.startswith(self._boot_dir)
self._image_path = '/' + os.path.relpath(self._image_dir, self._boot_dir)
def _GetVolumeID(self, path):
isoinfo = subprocess.check_output([
'isoinfo',
'-d',
'-i', path,
])
match = self._VOLUME_ID_REGEX.search(isoinfo)
return match.group('volume_id').decode('ascii')
def Update(self):
grub_dir = os.path.join(self._boot_dir, 'grub')
with tempfile.NamedTemporaryFile('w', dir=grub_dir, delete=False) as fh:
current = os.readlink(os.path.join(self._image_dir, 'current'))
fh.write("""
set timeout=5
set default=%(default_image_filename)s
""" % {
'default_image_filename': os.path.basename(current),
})
files = []
for filename in os.listdir(self._image_dir):
if not filename.endswith('.iso'):
continue
files.append(filename)
for i, filename in enumerate(sorted(files, reverse=True)):
fh.write("""
menuentry "%(image_filename)s (%(volume_id)s)" --hotkey=%(hotkey)s {
search --no-floppy --file --set=root %(image_path)s/%(image_filename)s
iso_path="%(image_path)s/%(image_filename)s"
export iso_path
loopback loop "%(image_path)s/%(image_filename)s"
set root=(loop)
configfile /boot/grub/loopback.cfg
}
""" % {
'image_filename': filename,
'image_path': self._image_path,
'hotkey': self._HOTKEYS[i],
'volume_id': self._GetVolumeID(os.path.join(self._image_dir, filename)),
})
fh.flush()
os.rename(fh.name, os.path.join(grub_dir, 'grub.cfg'))
| apache-2.0 | Python |
73e36f40213a6737da015057568bfd5589c9673e | Refactor day04 to allow arbitrary hash starts | mpirnat/adventofcode | day04/day04.py | day04/day04.py | #!/usr/bin/env python
"""
Solve day 4 of Advent of Code.
http://adventofcode.com/day/4
"""
import hashlib
def find_integer(key, hash_start='0'*5):
"""
Find the smallest integer such that the md5 hash of the
given secret key plus the integer yields a hash that
begins with a certain number of zeroes (default 5).
"""
hashed = ''
i = 0
while not hashed.startswith(hash_start):
i += 1
to_hash = (key + str(i)).encode('ascii')
hashed = hashlib.md5(to_hash).hexdigest()
return i
if __name__ == '__main__':
key = 'yzbqklnj'
print('Part 1:', find_integer(key))
print('Part 2:', find_integer(key, hash_start='0'*6))
| #!/usr/bin/env python
"""
Solve day 4 of Advent of Code.
http://adventofcode.com/day/4
"""
import hashlib
def find_integer(key, zeroes=5):
"""
Find the smallest integer such that the md5 hash of the
given secret key plus the integer yields a hash that
begins with a certain number of zeroes (default 5).
"""
hashed = ''
i = 0
while not hashed.startswith('0' * zeroes):
i += 1
to_hash = (key + str(i)).encode('ascii')
hashed = hashlib.md5(to_hash).hexdigest()
return i
if __name__ == '__main__':
key = 'yzbqklnj'
print('Part 1:', find_integer(key))
print('Part 2:', find_integer(key, zeroes=6))
| mit | Python |
27165c0dfcf34a794c168ae29e371ac53843c6ec | disable Selenium tests due to reliably problem with Firefox 17 | MiltosD/CEFELRC,JuliBakagianni/CEF-ELRC,MiltosD/CEFELRC,zeehio/META-SHARE,JuliBakagianni/META-SHARE,MiltosD/CEFELRC,MiltosD/CEFELRC,zeehio/META-SHARE,JuliBakagianni/META-SHARE,JuliBakagianni/META-SHARE,JuliBakagianni/META-SHARE,MiltosD/CEF-ELRC,JuliBakagianni/META-SHARE,MiltosD/CEF-ELRC,JuliBakagianni/META-SHARE,MiltosD/CEFELRC,MiltosD/CEF-ELRC,JuliBakagianni/META-SHARE,JuliBakagianni/CEF-ELRC,zeehio/META-SHARE,zeehio/META-SHARE,MiltosD/CEF-ELRC,MiltosD/CEFELRC,JuliBakagianni/CEF-ELRC,zeehio/META-SHARE,JuliBakagianni/CEF-ELRC,zeehio/META-SHARE,JuliBakagianni/CEF-ELRC,zeehio/META-SHARE,MiltosD/CEF-ELRC,MiltosD/CEF-ELRC,MiltosD/CEF-ELRC,JuliBakagianni/CEF-ELRC,JuliBakagianni/CEF-ELRC,MiltosD/CEFELRC | metashare/test_runner.py | metashare/test_runner.py | import logging
from django_selenium.selenium_runner import SeleniumTestRunner
from django.core.management import call_command
from metashare import settings
from metashare.haystack_routers import MetashareRouter
# set up logging support
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(settings.LOG_HANDLER)
def _run_custom_test_db_setup():
"""
Runs the custom test DB setup logic which is required in META-SHARE.
"""
# from now on, redirect any search index access to the test index
MetashareRouter.in_test_mode = True
# clear the test index
call_command('clear_index', interactive=False,
using=settings.TEST_MODE_NAME)
class MetashareTestRunner(SeleniumTestRunner):
"""
A custom Django test runner which inherits from `SeleniumTestRunner`
which in turn inherits from `DjangoTestSuiteRunner`.
The added value of this test runner on top of the default functionality
provided by Django/Selenium is that the runner automatically sets up
Haystack so that it uses a dedicated search backend for testing.
"""
def setup_databases(self, **kwargs):
_run_custom_test_db_setup()
# run the normal Django test setup
return super(MetashareTestRunner, self).setup_databases(**kwargs)
# if we're in a Jenkins test environment, then we also create a test runner for
# Jenkins
try:
from django_jenkins.runner import CITestSuiteRunner
class MetashareJenkinsTestRunner(CITestSuiteRunner):
# TODO: reenable Selenium tests again as soon as there is a new Selenium
# release which fixes the randomly occurring bug at
# <http://code.google.com/p/selenium/issues/detail?id=4814>:
# from django_selenium.jenkins_runner import JenkinsTestRunner
# class MetashareJenkinsTestRunner(JenkinsTestRunner):
"""
A custom Django Jenkins test runner which inherits from Selenium's
`JenkinsTestRunner` which in turn inherits from `CITestSuiteRunner`.
The added value of this test runner on top of the default functionality
provided by Django/Selenium Jenkins is that the runner automatically
sets up Haystack so that it uses a dedicated search backend for testing.
"""
def setup_databases(self, **kwargs):
_run_custom_test_db_setup()
# run the normal Django test setup
return super(MetashareJenkinsTestRunner, self).setup_databases(
**kwargs)
except ImportError:
pass
| import logging
from django_selenium.selenium_runner import SeleniumTestRunner
from django.core.management import call_command
from metashare import settings
from metashare.haystack_routers import MetashareRouter
# set up logging support
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(settings.LOG_HANDLER)
def _run_custom_test_db_setup():
"""
Runs the custom test DB setup logic which is required in META-SHARE.
"""
# from now on, redirect any search index access to the test index
MetashareRouter.in_test_mode = True
# clear the test index
call_command('clear_index', interactive=False,
using=settings.TEST_MODE_NAME)
class MetashareTestRunner(SeleniumTestRunner):
"""
A custom Django test runner which inherits from `SeleniumTestRunner`
which in turn inherits from `DjangoTestSuiteRunner`.
The added value of this test runner on top of the default functionality
provided by Django/Selenium is that the runner automatically sets up
Haystack so that it uses a dedicated search backend for testing.
"""
def setup_databases(self, **kwargs):
_run_custom_test_db_setup()
# run the normal Django test setup
return super(MetashareTestRunner, self).setup_databases(**kwargs)
# if we're in a Jenkins test environment, then we also create a test runner for
# Jenkins
try:
from django_selenium.jenkins_runner import JenkinsTestRunner
class MetashareJenkinsTestRunner(JenkinsTestRunner):
"""
A custom Django Jenkins test runner which inherits from Selenium's
`JenkinsTestRunner` which in turn inherits from `CITestSuiteRunner`.
The added value of this test runner on top of the default functionality
provided by Django/Selenium Jenkins is that the runner automatically
sets up Haystack so that it uses a dedicated search backend for testing.
"""
def setup_databases(self, **kwargs):
_run_custom_test_db_setup()
# run the normal Django test setup
return super(MetashareJenkinsTestRunner, self).setup_databases(
**kwargs)
except ImportError:
pass
| bsd-3-clause | Python |
eb2746a808efa6317e2d65a4605979ddc7507e6e | fix default connection string | nrempel/rucksack-api | config/base.py | config/base.py | # -*- coding: utf-8 -*-
import os
basedir = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL', 'postgres://localhost/')
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db')
| # -*- coding: utf-8 -*-
import os
basedir = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL', 'localhost')
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db')
| mit | Python |
8633d0e786d50942bba308a66a5f85793d935577 | Allow extra ignore strings to be passed to pyflakes testing. | damnfine/mezzanine,viaregio/mezzanine,stephenmcd/mezzanine,frankier/mezzanine,SoLoHiC/mezzanine,scarcry/snm-mezzanine,AlexHill/mezzanine,dsanders11/mezzanine,dsanders11/mezzanine,geodesign/mezzanine,sjdines/mezzanine,saintbird/mezzanine,dovydas/mezzanine,dsanders11/mezzanine,geodesign/mezzanine,gradel/mezzanine,mush42/mezzanine,christianwgd/mezzanine,ZeroXn/mezzanine,dekomote/mezzanine-modeltranslation-backport,jjz/mezzanine,jerivas/mezzanine,Cajoline/mezzanine,scarcry/snm-mezzanine,theclanks/mezzanine,webounty/mezzanine,Skytorn86/mezzanine,adrian-the-git/mezzanine,AlexHill/mezzanine,biomassives/mezzanine,sjdines/mezzanine,frankchin/mezzanine,dustinrb/mezzanine,gbosh/mezzanine,wyzex/mezzanine,dovydas/mezzanine,PegasusWang/mezzanine,vladir/mezzanine,webounty/mezzanine,dekomote/mezzanine-modeltranslation-backport,gradel/mezzanine,fusionbox/mezzanine,jjz/mezzanine,jerivas/mezzanine,Skytorn86/mezzanine,frankier/mezzanine,Cicero-Zhao/mezzanine,spookylukey/mezzanine,jjz/mezzanine,readevalprint/mezzanine,Cicero-Zhao/mezzanine,Kniyl/mezzanine,industrydive/mezzanine,orlenko/sfpirg,spookylukey/mezzanine,stephenmcd/mezzanine,damnfine/mezzanine,nikolas/mezzanine,viaregio/mezzanine,wyzex/mezzanine,agepoly/mezzanine,dustinrb/mezzanine,Skytorn86/mezzanine,wrwrwr/mezzanine,mush42/mezzanine,douglaskastle/mezzanine,spookylukey/mezzanine,christianwgd/mezzanine,wbtuomela/mezzanine,tuxinhang1989/mezzanine,dekomote/mezzanine-modeltranslation-backport,orlenko/sfpirg,promil23/mezzanine,viaregio/mezzanine,guibernardino/mezzanine,Cajoline/mezzanine,nikolas/mezzanine,dustinrb/mezzanine,biomassives/mezzanine,sjuxax/mezzanine,webounty/mezzanine,joshcartme/mezzanine,eino-makitalo/mezzanine,industrydive/mezzanine,promil23/mezzanine,wbtuomela/mezzanine,vladir/mezzanine,nikolas/mezzanine,ryneeverett/mezzanine,douglaskastle/mezzanine,dovydas/mezzanine,wbtuomela/mezzanine,Kniyl/mezzanine,tuxinhang1989/mezzanine,emile2016/mezzanine,stbarnabas/mezzanine,sjdines/mezzanine,Cajoline/mezzanine,frankier/mezzanine,cccs-web/mezzanine,orlenko/plei,fusionbox/mezzanine,theclanks/mezzanine,molokov/mezzanine,wrwrwr/mezzanine,agepoly/mezzanine,cccs-web/mezzanine,ZeroXn/mezzanine,vladir/mezzanine,molokov/mezzanine,stephenmcd/mezzanine,adrian-the-git/mezzanine,saintbird/mezzanine,Kniyl/mezzanine,ryneeverett/mezzanine,eino-makitalo/mezzanine,SoLoHiC/mezzanine,ryneeverett/mezzanine,joshcartme/mezzanine,gradel/mezzanine,gbosh/mezzanine,ZeroXn/mezzanine,orlenko/sfpirg,orlenko/plei,PegasusWang/mezzanine,tuxinhang1989/mezzanine,molokov/mezzanine,promil23/mezzanine,geodesign/mezzanine,douglaskastle/mezzanine,emile2016/mezzanine,agepoly/mezzanine,frankchin/mezzanine,joshcartme/mezzanine,biomassives/mezzanine,guibernardino/mezzanine,orlenko/plei,sjuxax/mezzanine,eino-makitalo/mezzanine,PegasusWang/mezzanine,sjuxax/mezzanine,gbosh/mezzanine,emile2016/mezzanine,batpad/mezzanine,christianwgd/mezzanine,saintbird/mezzanine,frankchin/mezzanine,wyzex/mezzanine,scarcry/snm-mezzanine,damnfine/mezzanine,industrydive/mezzanine,SoLoHiC/mezzanine,adrian-the-git/mezzanine,jerivas/mezzanine,readevalprint/mezzanine,mush42/mezzanine,theclanks/mezzanine,batpad/mezzanine,readevalprint/mezzanine,stbarnabas/mezzanine | mezzanine/utils/tests.py | mezzanine/utils/tests.py |
from __future__ import with_statement
from compiler import parse
import os
from mezzanine.utils.path import path_for_import
# Ignore these warnings in pyflakes.
PYFLAKES_IGNORE = (
"import *' used",
"'memcache' imported but unused",
"'cmemcache' imported but unused",
)
def run_pyflakes_for_package(package_name, extra_ignore=None):
"""
If pyflakes is installed, run it across the given package name
returning any warnings found.
"""
ignore_strings = PYFLAKES_IGNORE
if extra_ignore:
ignore_strings += extra_ignore
try:
from pyflakes.checker import Checker
except ImportError:
return []
warnings = []
for (root, dirs, files) in os.walk(path_for_import(package_name)):
for f in files:
# Ignore migrations.
directory = root.split(os.sep)[-1]
if not f.endswith(".py") or directory == "migrations":
continue
path = os.path.join(root, f)
with open(path, "U") as source_file:
source = source_file.read()
try:
compile(source, f, "exec")
except (SyntaxError, IndentationError), value:
info = (path, value.lineno, value.args[0])
warnings.append("Invalid syntax in %s:%d: %s" % info)
result = Checker(parse(source), path)
for warning in result.messages:
message = unicode(warning)
for ignore in ignore_strings:
if ignore in message:
break
else:
warnings.append(message)
return warnings
|
from __future__ import with_statement
from compiler import parse
import os
from mezzanine.utils.path import path_for_import
# Ignore these warnings in pyflakes.
PYFLAKES_IGNORE = (
"'from django.conf.urls.defaults import *' used",
"'from local_settings import *' used",
"'memcache' imported but unused",
"'cmemcache' imported but unused",
)
def run_pyflakes_for_package(package_name):
"""
If pyflakes is installed, run it across the given package name
returning any warnings found.
"""
try:
from pyflakes.checker import Checker
except ImportError:
return []
warnings = []
for (root, dirs, files) in os.walk(path_for_import(package_name)):
for f in files:
# Ignore migrations.
directory = root.split(os.sep)[-1]
if not f.endswith(".py") or directory == "migrations":
continue
path = os.path.join(root, f)
with open(path, "U") as source_file:
source = source_file.read()
try:
compile(source, f, "exec")
except (SyntaxError, IndentationError), value:
info = (path, value.lineno, value.args[0])
warnings.append("Invalid syntax in %s:%d: %s" % info)
result = Checker(parse(source), path)
for warning in result.messages:
message = unicode(warning)
for ignore in PYFLAKES_IGNORE:
if ignore in message:
break
else:
warnings.append(message)
return warnings
| bsd-2-clause | Python |
125a8b81b4b0c580862b91cbf812b723c5c30afd | fix GAE xmpp forwarding | melmothx/jsonbot,melmothx/jsonbot,melmothx/jsonbot | gozerlib/gae/xmpp/bot.py | gozerlib/gae/xmpp/bot.py | # gozerlib/gae/xmpp/bot.py
#
#
""" XMPP bot. """
## gozerlib imports
from gozerlib.botbase import BotBase
from gozerlib.socklib.xmpp.presence import Presence
from gozerlib.utils.generic import strippedtxt
## basic imports
import types
import logging
## classes
class XMPPBot(BotBase):
""" XMPPBot just inherits from BotBase for now. """
def __init__(self, cfg=None, users=None, plugs=None, botname="gae-xmpp", *args, **kwargs):
BotBase.__init__(self, cfg, users, plugs, botname, *args, **kwargs)
self.jid = "jsonbot@appspot.com"
if self.cfg:
self.cfg['type'] = 'xmpp'
self.isgae = True
self.type = "xmpp"
def out(self, jids, txt, how="msg", event=None, origin=None, groupchat=None, *args, **kwargs):
""" output xmpp message. """
if type(jids) != types.ListType: jids = [jids, ]
logging.warn("%s - OUT - %s" % (self.name, jids))
self.outnocb(jids, txt)
for jid in jids:
self.outmonitor(self.nick, jid, txt)
def outnocb(self, jids, txt, from_jid=None, message_type=None, raw_xml=False, event=None, origin=None, groupchat=None, *args, **kwargs):
""" output xmpp message. """
from google.appengine.api import xmpp
if not message_type: message_type = xmpp.MESSAGE_TYPE_CHAT
if type(jids) != types.ListType: jids = [jids, ]
txt = self.normalize(txt)
xmpp.send_message(jids, txt, from_jid=from_jid, message_type=message_type, raw_xml=raw_xml)
def invite(self, jid):
from google.appengine.api import xmpp
xmpp.send_invite(jid)
def normalize(self, what):
what = strippedtxt(unicode(what))
what = what.replace("<b>", "")
what = what.replace("</b>", "")
what = what.replace("<b>", "")
what = what.replace("</b>", "")
what = what.replace("<i>", "")
what = what.replace("</i>", "")
what = what.replace("<i>", "")
what = what.replace("</i>", "")
return what
| # gozerlib/gae/xmpp/bot.py
#
#
""" XMPP bot. """
## gozerlib imports
from gozerlib.botbase import BotBase
from gozerlib.socklib.xmpp.presence import Presence
## basic imports
import types
import logging
## classes
class XMPPBot(BotBase):
""" XMPPBot just inherits from BotBase for now. """
def __init__(self, cfg=None, users=None, plugs=None, botname="gae-xmpp", *args, **kwargs):
BotBase.__init__(self, cfg, users, plugs, botname, *args, **kwargs)
self.jid = "jsonbot@appspot.com"
if self.cfg:
self.cfg['type'] = 'xmpp'
self.isgae = True
self.type = "xmpp"
def out(self, jids, txt, how="msg", event=None, origin=None, groupchat=None, *args, **kwargs):
""" output xmpp message. """
self.outnocb(jids, txt)
for jid in jids:
self.outmonitor(self.nick, jid, txt)
def outnocb(self, jids, txt, from_jid=None, message_type=None, raw_xml=False, event=None, origin=None, groupchat=None, *args, **kwargs):
""" output xmpp message. """
from google.appengine.api import xmpp
if not message_type: message_type = xmpp.MESSAGE_TYPE_CHAT
if type(jids) == types.StringType: jids = [jids, ]
xmpp.send_message(jids, txt, from_jid=from_jid, message_type=message_type, raw_xml=raw_xml)
def invite(self, jid):
from google.appengine.api import xmpp
xmpp.send_invite(jid)
def normalize(self, what):
#what = re.sub("\s+", " ", unicode(what))
what = strippedtxt(unicode(what))
what = what.replace("<b>", "")
what = what.replace("</b>", "")
what = what.replace("<b>", "")
what = what.replace("</b>", "")
what = what.replace("<i>", "")
what = what.replace("</i>", "")
what = what.replace("<i>", "")
what = what.replace("</i>", "")
return what
| mit | Python |
3d59bd8b328f2c25ba6f6932ad00dd9abd4038e9 | Increment to version 0.4.3 | dgwartney-io/import-io-api-python,dgwartney-io/import-io-api-python | importio2/version.py | importio2/version.py | #
# Copyright 2017 Import.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__version__ = '0.4.3'
| #
# Copyright 2017 Import.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__version__ = '0.4.2'
| apache-2.0 | Python |
954f8c2bc62c614724f53dd7d0fdee3a368fc208 | Update fastq-to-fasta.py | ged-lab/khmer,F1000Research/khmer,souravsingh/khmer,souravsingh/khmer,souravsingh/khmer,F1000Research/khmer,F1000Research/khmer,ged-lab/khmer,ged-lab/khmer | scripts/fastq-to-fasta.py | scripts/fastq-to-fasta.py | #! /usr/bin/env python2
#
# This file is part of khmer, http://github.com/ged-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2014. It is licensed under
# the three-clause BSD license; see doc/LICENSE.txt.
# Contact: khmer-project@idyll.org
#
# pylint: disable=invalid-name,missing-docstring
"""
Convert FASTQ files to FASTA format.
% python scripts/fastq-to-fasta.py [ -n -o ] <fastq_name>
Use '-h' for parameter help.
"""
import sys
import argparse
import screed
def get_parser():
parser = argparse.ArgumentParser(
description='Converts FASTQ format (.fq) files to FASTA format (.fa).',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('input_sequence', help='The name of the input'
' FASTQ sequence file.')
parser.add_argument('-o', '--output', metavar="filename",
help='The name of the output'
' FASTA sequence file.',
type=argparse.FileType('w'),
default=sys.stdout)
parser.add_argument('-n', '--n_keep', default=False, action='store_true',
help='Option to keep reads containing \'N\'s in ' +
'input_sequence file. Default is to drop reads')
return parser
def main():
args = get_parser().parse_args()
print >> sys.stderr, ('fastq from ', args.input_sequence)
n_count = 0
for n, record in enumerate(screed.open(args.input_sequence)):
if n % 10000 == 0:
print>>sys.stderr, '...', n
sequence = record['sequence']
name = record['name']
if 'N' in sequence:
if not args.n_keep:
n_count += 1
continue
args.output.write('>' + name + '\n')
args.output.write(sequence + '\n')
print >> sys.stderr, '\n' + 'lines from ' + args.input_sequence
if not args.n_keep:
print >> sys.stderr, str(n_count) + ' lines dropped.'
else:
print >> sys.stderr, 'No lines dropped from file.'
print >> sys.stderr, 'Wrote output to', args.output
if __name__ == '__main__':
main()
| #! /usr/bin/env python2
#
# This file is part of khmer, http://github.com/ged-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2014. It is licensed under
# the three-clause BSD license; see doc/LICENSE.txt.
# Contact: khmer-project@idyll.org
#
# pylint: disable=invalid-name,missing-docstring
"""
Convert FASTQ files to FASTA format.
% python scripts/fastq-to-fasta.py [ -n -o ] <fastq_name>
Use '-h' for parameter help.
"""
import sys
import argparse
import screed
def get_parser():
parser = argparse.ArgumentParser(
description='Converts FASTQ format (.fq) files to FASTA format (.fa).',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('input_sequence', help='The name of the input'
' FASTQ sequence file.')
parser.add_argument('-o', '--output', metavar="filename",
help='The name of the output'
' FASTA sequence file.',
type=argparse.FileType('w'),
default=sys.stdout)
parser.add_argument('-n', '--n_keep', default=False, action='store_true',
help='Option to drop reads containing \'N\'s in ' +
'input_sequence file.')
return parser
def main():
args = get_parser().parse_args()
print >> sys.stderr, ('fastq from ', args.input_sequence)
n_count = 0
for n, record in enumerate(screed.open(args.input_sequence)):
if n % 10000 == 0:
print>>sys.stderr, '...', n
sequence = record['sequence']
name = record['name']
if 'N' in sequence:
if not args.n_keep:
n_count += 1
continue
args.output.write('>' + name + '\n')
args.output.write(sequence + '\n')
print >> sys.stderr, '\n' + 'lines from ' + args.input_sequence
if not args.n_keep:
print >> sys.stderr, str(n_count) + ' lines dropped.'
else:
print >> sys.stderr, 'No lines dropped from file.'
print >> sys.stderr, 'Wrote output to', args.output
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
62ea8f97b5ccffabb70c8c931aa83d0773d74cf8 | Test statistics print formatting | samuelsh/pyFstress,samuelsh/pyFstress | server/collector.py | server/collector.py | """
Collector service provides methods for collection of test runtime results results and storing results
2017 - samuels(c)
"""
import time
from logger import server_logger
class Collector:
def __init__(self, test_stats, stop_event):
self.logger = server_logger.StatsLogger('__Collector__').logger
self.test_stats = test_stats
self.stop_event = stop_event
def run(self):
time.sleep(10)
while not self.stop_event.is_set():
self.logger.info("{0:^80}".format("#### Test Runtime Stats ####"))
self.logger.info("{0:>40}".format("Total file operations executed {0}".format(self.test_stats['total'])))
self.logger.info("{0:>40}".format("Total file operations succeeded {0}"
.format(self.test_stats['success']['total'])))
self.logger.info("{0:>40}".format("Total file operations failed {0}"
.format(self.test_stats['failed']['total'])))
self.logger.info("{0:>40}".format("=== Successful operations stats ==="))
for k, v in self.test_stats['success'].items():
self.logger.info("{0:>40}".format("{0}: {1}".format(k, v)))
self.logger.info("{0:>40}".format("=== Failed operations stats ==="))
for k, v in self.test_stats['failed'].items():
self.logger.info("{0:>40}".format("{0}: {1}".format(k, v)))
time.sleep(10)
| """
Collector service provides methods for collection of test runtime results results and storing results
2017 - samuels(c)
"""
import time
from logger import server_logger
class Collector:
def __init__(self, test_stats, stop_event):
self.logger = server_logger.StatsLogger('__Collector__').logger
self.test_stats = test_stats
self.stop_event = stop_event
def run(self):
time.sleep(10)
while not self.stop_event.is_set():
self.logger.info("{0:^80}".format("#### Test Runtime Stats ####"))
self.logger.info("{0:^80}".format("Total file operations executed {0}".format(self.test_stats['total'])))
self.logger.info("{0:^80}".format("Total file operations succeeded {0}".format(self.test_stats['success']['total'])))
self.logger.info("{0:^80}".format("Total file operations failed {0}".format(self.test_stats['failed']['total'])))
self.logger.info("{0:^80}".format("=== Successful operations stats ==="))
for k, v in self.test_stats['success'].items():
self.logger.info("{0:^80}".format("{0} : {1}".format(k, v)))
self.logger.info("{0:^80}".format("=== Failed operations stats ==="))
for k, v in self.test_stats['failed'].items():
self.logger.info("{0:^80}".format("{0} : {1}".format(k, v)))
time.sleep(10)
| mit | Python |
7c7ffc87887d921db970255de9a5b4052600566a | Add a test for an obsid with an uncommanded slot | sot/mica,sot/mica | mica/vv/tests/test_vv.py | mica/vv/tests/test_vv.py | import os
import numpy as np
from .. import vv
from .. import process
from ... import common
def test_get_vv_dir():
obsdir = vv.get_vv_dir(16504)
assert obsdir == os.path.abspath(os.path.join(common.MICA_ARCHIVE, 'vv/16/16504_v01'))
def test_get_vv_files():
obsfiles = vv.get_vv_files(16504)
assert sorted(obsfiles)[-1] == os.path.abspath(os.path.join(common.MICA_ARCHIVE,
'vv/16/16504_v01/vv_report.pkl'))
def test_get_rms_data():
data = vv.get_rms_data()
dz_rms = data[(data['obsid'] == 16505) & (data['slot'] == 4) & (data['isdefault'] == 1)]['dz_rms'][0]
assert np.allclose(dz_rms, 0.047886185719034906)
def test_get_vv():
obs = vv.get_vv(16504)
assert np.allclose(obs['slots']['7']['dz_rms'], 0.11610256063309182)
def test_run_vv():
obi = process.get_arch_vv(2121)
assert np.allclose(obi.info()['sim']['max_d_dy'], 0.002197265625)
def test_run_vv_omitted_slot():
# This test run on obsid with omitted slot is just testing for unhandled exceptions
process.get_arch_vv(19991, version='last')
def test_run_vv_multi_interval():
# This test run on obsid with multiple intervals is just testing for unhandled exceptions
process.get_arch_vv(18980, version='last')
def test_run_vv_omitted_fid():
process.get_arch_vv(18978, version='last')
def test_run_vv_7_track_slots():
# Run on an obsid with only 7 slots *commanded* during Kalman
process.get_arch_vv(19847, version='last')
| import os
import numpy as np
from .. import vv
from .. import process
from ... import common
def test_get_vv_dir():
obsdir = vv.get_vv_dir(16504)
assert obsdir == os.path.abspath(os.path.join(common.MICA_ARCHIVE, 'vv/16/16504_v01'))
def test_get_vv_files():
obsfiles = vv.get_vv_files(16504)
assert sorted(obsfiles)[-1] == os.path.abspath(os.path.join(common.MICA_ARCHIVE,
'vv/16/16504_v01/vv_report.pkl'))
def test_get_rms_data():
data = vv.get_rms_data()
dz_rms = data[(data['obsid'] == 16505) & (data['slot'] == 4) & (data['isdefault'] == 1)]['dz_rms'][0]
assert np.allclose(dz_rms, 0.047886185719034906)
def test_get_vv():
obs = vv.get_vv(16504)
assert np.allclose(obs['slots']['7']['dz_rms'], 0.11610256063309182)
def test_run_vv():
obi = process.get_arch_vv(2121)
assert np.allclose(obi.info()['sim']['max_d_dy'], 0.002197265625)
def test_run_vv_omitted_slot():
# This test run on obsid with omitted slot is just testing for unhandled exceptions
process.get_arch_vv(19991, version='last')
def test_run_vv_multi_interval():
# This test run on obsid with multiple intervals is just testing for unhandled exceptions
process.get_arch_vv(18980, version='last')
def test_run_vv_omitted_fid():
process.get_arch_vv(18978, version='last')
| bsd-3-clause | Python |
cad48e91776ded810b23c336380797c88dd456c0 | Fix Netflix in light of the new scope selection system | foauth/foauth.org,foauth/foauth.org,foauth/foauth.org | services/netflix.py | services/netflix.py | import urlparse
import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
https = False
signature_type = SIGNATURE_TYPE_QUERY
def get_authorize_params(self, *args, **kwargs):
params = super(Netflix, self).get_authorize_params(*args, **kwargs)
params['oauth_consumer_key'] = self.client_id
return params
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/users/current',
params={'output': 'json'})
redirect = r.json[u'resource'][u'link'][u'href']
parts = urlparse.urlparse(redirect)
r = self.api(key, parts.netloc, parts.path,
params={'output': 'json'})
return r.json[u'user'][u'user_id']
| import urlparse
import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
https = False
signature_type = SIGNATURE_TYPE_QUERY
def get_authorize_params(self, redirect_uri):
params = super(Netflix, self).get_authorize_params(redirect_uri)
params['oauth_consumer_key'] = self.client_id
return params
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/users/current',
params={'output': 'json'})
redirect = r.json[u'resource'][u'link'][u'href']
parts = urlparse.urlparse(redirect)
r = self.api(key, parts.netloc, parts.path,
params={'output': 'json'})
return r.json[u'user'][u'user_id']
| bsd-3-clause | Python |
8c353a5596cbcd620eefb3520c86d8117e1dde80 | Change version to v1.2.0a for development purposes. | Rapptz/discord.py,Harmon758/discord.py,imayhaveborkedit/discord.py,rapptz/discord.py,Harmon758/discord.py,khazhyk/discord.py | discord/__init__.py | discord/__init__.py | # -*- coding: utf-8 -*-
"""
Discord API Wrapper
~~~~~~~~~~~~~~~~~~~
A basic wrapper for the Discord API.
:copyright: (c) 2015-2019 Rapptz
:license: MIT, see LICENSE for more details.
"""
__title__ = 'discord'
__author__ = 'Rapptz'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015-2019 Rapptz'
__version__ = '1.2.0a'
from collections import namedtuple
import logging
from .client import Client
from .appinfo import AppInfo
from .user import User, ClientUser, Profile
from .emoji import Emoji, PartialEmoji
from .activity import *
from .channel import *
from .guild import Guild
from .relationship import Relationship
from .member import Member, VoiceState
from .message import Message, Attachment
from .asset import Asset
from .errors import *
from .calls import CallMessage, GroupCall
from .permissions import Permissions, PermissionOverwrite
from .role import Role
from .file import File
from .colour import Color, Colour
from .invite import Invite, PartialInviteChannel, PartialInviteGuild
from .widget import Widget, WidgetMember, WidgetChannel
from .object import Object
from .reaction import Reaction
from . import utils, opus, abc
from .enums import *
from .embeds import Embed
from .shard import AutoShardedClient
from .player import *
from .webhook import *
from .voice_client import VoiceClient
from .audit_logs import AuditLogChanges, AuditLogEntry, AuditLogDiff
from .raw_models import *
VersionInfo = namedtuple('VersionInfo', 'major minor micro releaselevel serial')
version_info = VersionInfo(major=1, minor=2, micro=0, releaselevel='alpha', serial=0)
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
| # -*- coding: utf-8 -*-
"""
Discord API Wrapper
~~~~~~~~~~~~~~~~~~~
A basic wrapper for the Discord API.
:copyright: (c) 2015-2019 Rapptz
:license: MIT, see LICENSE for more details.
"""
__title__ = 'discord'
__author__ = 'Rapptz'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015-2019 Rapptz'
__version__ = '1.1.1'
from collections import namedtuple
import logging
from .client import Client
from .appinfo import AppInfo
from .user import User, ClientUser, Profile
from .emoji import Emoji, PartialEmoji
from .activity import *
from .channel import *
from .guild import Guild
from .relationship import Relationship
from .member import Member, VoiceState
from .message import Message, Attachment
from .asset import Asset
from .errors import *
from .calls import CallMessage, GroupCall
from .permissions import Permissions, PermissionOverwrite
from .role import Role
from .file import File
from .colour import Color, Colour
from .invite import Invite, PartialInviteChannel, PartialInviteGuild
from .widget import Widget, WidgetMember, WidgetChannel
from .object import Object
from .reaction import Reaction
from . import utils, opus, abc
from .enums import *
from .embeds import Embed
from .shard import AutoShardedClient
from .player import *
from .webhook import *
from .voice_client import VoiceClient
from .audit_logs import AuditLogChanges, AuditLogEntry, AuditLogDiff
from .raw_models import *
VersionInfo = namedtuple('VersionInfo', 'major minor micro releaselevel serial')
version_info = VersionInfo(major=1, minor=1, micro=1, releaselevel='final', serial=0)
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
| mit | Python |
be0410a0a674a1f5b5ba4094bde6e3dc8fba600a | Rework time evolution example. | ezekial4/atomic_neu,ezekial4/atomic_neu | examples/rate_equations.py | examples/rate_equations.py | import numpy as np
import matplotlib.pyplot as plt
import atomic
ad = atomic.element('carbon')
temperature = np.logspace(0, 3, 50)
density = 1e19
tau = 1e19 / density
t_normalized = np.logspace(-7, 0, 50)
t_normalized -= t_normalized[0]
times = t_normalized * tau
rt = atomic.RateEquations(ad)
yy = rt.solve(times, temperature, density)
# time evolution of ionisation states at a certain temperature
y_fixed_temperature = yy.at_temperature(38)
# steady state time
tau_ss = yy.steady_state_time()
fig = plt.figure(1); plt.clf()
ax = fig.add_subplot(111)
lines_ref = ax.semilogx(times/tau, y_fixed_temperature)
ax.set_xlabel(r'$t\ [\mathrm{s}]$')
ax.set_ylim(ymin=0)
ax.set_xlim(xmin=0)
plt.draw()
fig = plt.figure(2); fig.clf()
ax = fig.add_subplot(111)
line, = ax.loglog(temperature, tau_ss * density, visible=False)
yy[-1].replot_colored(line, lines_ref)
ax.set_xlabel(r'$T_\mathrm{e}\ [\mathrm{eV}]$')
ax.set_ylabel(r'$n_\mathrm{e} \tau_\mathrm{ss}\ [\mathrm{m^{-3} s}]$')
plt.draw()
fig = plt.figure(3); fig.clf()
tau = np.array([ 1e18, 1e15, 1e14]) / density
log_netau = np.log10(density * tau)
ybar = yy.ensemble_average()
for iax, y in enumerate(ybar.select_times(tau)):
ax = fig.add_subplot(3,1, iax + 1)
lines = ax.loglog(temperature, y.y.T, '-')
y.annotate_ionisation_stages(lines)
ax.set_ylim(0.04, 1.4)
s = r'$n_\mathrm{e} \tau = 10^{%d}\ \mathrm{m^3 s}$' % log_netau[iax]
ax.text(0.95, 0.95, s, transform=ax.transAxes, va='top',
ha='right')
ax.label_outer()
fig.subplots_adjust(hspace=0)
fig.axes[-1].set_xlabel(r'$T_\mathrm{e}\ [\mathrm{eV}]$')
plt.draw()
plt.show()
| import numpy as np
import matplotlib.pyplot as plt
import atomic
ad = atomic.element('carbon')
temperature = np.logspace(0, 3, 50)
density = 1e19
tau = 1e19 / density
t_normalized = np.logspace(-4, 0, 50)
t_normalized -= t_normalized[0]
times = t_normalized * tau
rt = atomic.RateEquations(ad)
yy = rt.solve(times, temperature, density)
# solution after the last timestep
y_final = yy[-1]
# time evolution of ionisation states at a certain temperature
y_fixed_temperature = yy.at_temperature(4)
# steady state time
tau_ss = yy.steady_state_time()
plt.figure(2); plt.clf()
y_final.plot_vs_temperature()
lines_ref = yy.y_coronal.plot_vs_temperature(ls='--')
plt.draw()
fig = plt.figure(3); plt.clf()
ax = fig.add_subplot(111)
ax.semilogx(times/tau, y_fixed_temperature)
plt.draw()
fig = plt.figure(4); fig.clf()
ax = fig.add_subplot(111)
line, = ax.loglog(temperature, tau_ss * density, visible=False)
y_final.replot_colored(line, lines_ref)
ax.set_xlabel(r'$T_\mathrm{e}\ [\mathrm{eV}]$')
ax.set_ylabel(r'$n_\mathrm{e} \tau_\mathrm{ss}\ [\mathrm{m^{-3} s}]$')
plt.draw()
plt.show()
| mit | Python |
eb6fde636cda8967f5094a325b12988c51a92133 | Bump version | animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening | selvbetjening/__init__.py | selvbetjening/__init__.py | __version__ = '8.2'
| __version__ = '8.1'
| mit | Python |
602954fc1157bad28888135255e6169a7cbf59a7 | bump version to 0.7.2 | hovel/django-phonenumber-field,thenewguy/django-phonenumber-field,thenewguy/django-phonenumber-field,invalid-access/django-phonenumber-field,ellmetha/django-phonenumber-field,hovel/django-phonenumber-field,hwkns/django-phonenumber-field,bramd/django-phonenumber-field,ellmetha/django-phonenumber-field,bramd/django-phonenumber-field,hwkns/django-phonenumber-field,stefanfoulis/django-phonenumber-field,thenewguy/django-phonenumber-field,invalid-access/django-phonenumber-field | phonenumber_field/__init__.py | phonenumber_field/__init__.py | # -*- coding: utf-8 -*-
__version__ = '0.7.2'
| # -*- coding: utf-8 -*-
__version__ = '0.7.1'
| mit | Python |
86fe21eae50f95c1d6996a630ae95a2fe7a5301c | Rename test class for cached task | tkf/buildlet | buildlet/tests/test_cachedtask.py | buildlet/tests/test_cachedtask.py | import unittest
from ..task import BaseSimpleTask
from ..task.cachedtask import BaseCachedTask
from ..runner import simple
from ..datastore.inmemory import DataStoreNestableInMemory
class ImMemoryCachedTask(BaseCachedTask, BaseSimpleTask):
num_run = 0
def run(self):
self.num_run += 1
def get_taskvalue(self):
return self.taskvalue
class TestCachedTask(unittest.TestCase):
runner = simple
"""Runner module."""
class Task(ImMemoryCachedTask):
num_parents = 3
def generate_parents(self):
return [
ImMemoryCachedTask(datastore=self.datastore.get_substore(i),
taskvalue=())
for i in range(self.num_parents)]
def assert_run_once(self):
self.assertEqual(self.task.num_run, 1)
for p in self.task.get_parents():
self.assertEqual(p.num_run, 1)
def test_simple_run(self):
self.ds = DataStoreNestableInMemory()
self.task = self.Task(taskvalue=(), datastore=self.ds)
self.runner.run(self.task)
self.assert_run_once()
def test_cached_run(self):
self.test_simple_run()
for i in range(2):
self.runner.run(self.task)
self.assert_run_once()
def test_invalidate_parent(self):
self.test_simple_run()
# Invalidate 0-th parent node cache
ptask = self.task.get_parents()[0]
pths = ptask.get_taskhashstore()
pths.clear()
self.runner.run(self.task)
self.assertRaises(AssertionError, self.assert_run_once)
self.assertEqual(self.task.num_run, 2)
self.assertEqual(ptask.num_run, 2)
for other in self.task.get_parents()[1:]:
self.assertEqual(other.num_run, 1)
| import unittest
from ..task import BaseSimpleTask
from ..task.cachedtask import BaseCachedTask
from ..runner import simple
from ..datastore.inmemory import DataStoreNestableInMemory
class ImMemoryCachedTask(BaseCachedTask, BaseSimpleTask):
num_run = 0
def run(self):
self.num_run += 1
def get_taskvalue(self):
return self.taskvalue
class TestSimpleRunner(unittest.TestCase):
runner = simple
"""Runner module."""
class Task(ImMemoryCachedTask):
num_parents = 3
def generate_parents(self):
return [
ImMemoryCachedTask(datastore=self.datastore.get_substore(i),
taskvalue=())
for i in range(self.num_parents)]
def assert_run_once(self):
self.assertEqual(self.task.num_run, 1)
for p in self.task.get_parents():
self.assertEqual(p.num_run, 1)
def test_simple_run(self):
self.ds = DataStoreNestableInMemory()
self.task = self.Task(taskvalue=(), datastore=self.ds)
self.runner.run(self.task)
self.assert_run_once()
def test_cached_run(self):
self.test_simple_run()
for i in range(2):
self.runner.run(self.task)
self.assert_run_once()
def test_invalidate_parent(self):
self.test_simple_run()
# Invalidate 0-th parent node cache
ptask = self.task.get_parents()[0]
pths = ptask.get_taskhashstore()
pths.clear()
self.runner.run(self.task)
self.assertRaises(AssertionError, self.assert_run_once)
self.assertEqual(self.task.num_run, 2)
self.assertEqual(ptask.num_run, 2)
for other in self.task.get_parents()[1:]:
self.assertEqual(other.num_run, 1)
| bsd-3-clause | Python |
24c2f348e2a91af82759f393c1757693b0e4d1ac | fix import | stamparm/maltrail,stamparm/maltrail,stamparm/maltrail,stamparm/maltrail | core/ignore.py | core/ignore.py | #!/usr/bin/env python
"""
simple ignore rule configured by file misc/ignore_event.txt
example:
#sintax:
#src_ip;src_port;dst_ip;dst_port
#
# '*' is use for any
#
# ignore all events from source ip 192.168.0.3
# 192.168.0.3;*;*;*
#
# ignore all events to ssh port 22
# *;*;*;22
"""
import csv
import gzip
import os
import re
import sqlite3
import StringIO
import subprocess
import urllib2
import zipfile
import zlib
from core.settings import config
from core.settings import INGORE_EVENTS
def ignore_event(event_tuple):
retval = False
_, _, src_ip, src_port, dst_ip, dst_port, _, _, _, _, _ = event_tuple
for ignore_src_ip, ignore_src_port, ignore_dst_ip, ignore_dst_port in INGORE_EVENTS:
if ignore_src_ip != "*" and ignore_src_ip != src_ip :
continue
if ignore_src_port != "*" and ignore_src_port != src_port :
continue
if ignore_dst_ip != "*" and ignore_dst_ip != dst_ip :
continue
if ignore_dst_port != "*" and ignore_dst_port != dst_port :
continue
retval = True
break
if config.SHOW_DEBUG:
print("[i] ignore_event src_ip=%s, src_port=%s, dst_ip=%s, dst_port=%s, retval=%s" % (src_ip, src_port, dst_ip, dst_port, retval))
return retval
| #!/usr/bin/env python
"""
simple ignore rule configured by file misc/ignore_event.txt
example:
#sintax:
#src_ip;src_port;dst_ip;dst_port
#
# '*' is use for any
#
# ignore all events from source ip 192.168.0.3
# 192.168.0.3;*;*;*
#
# ignore all events to ssh port 22
# *;*;*;22
"""
import csv
import gzip
import os
import re
import sqlite3
import StringIO
import subprocess
import urllib2
import zipfile
import zlib
from core.settings import INGORE_EVENTS
def ignore_event(event_tuple):
retval = False
_, _, src_ip, src_port, dst_ip, dst_port, _, _, _, _, _ = event_tuple
for ignore_src_ip, ignore_src_port, ignore_dst_ip, ignore_dst_port in INGORE_EVENTS:
if ignore_src_ip != "*" and ignore_src_ip != src_ip :
continue
if ignore_src_port != "*" and ignore_src_port != src_port :
continue
if ignore_dst_ip != "*" and ignore_dst_ip != dst_ip :
continue
if ignore_dst_port != "*" and ignore_dst_port != dst_port :
continue
retval = True
break
if config.SHOW_DEBUG:
print("[i] ignore_event src_ip=%s, src_port=%s, dst_ip=%s, dst_port=%s, retval=%s" % (src_ip, src_port, dst_ip, dst_port, retval))
return retval
| mit | Python |
8d286613dc1cd51fe90338de429f086ed4a264e7 | Comment out a test that wasn't passing that I don't really need | LogicalDash/LiSE,LogicalDash/LiSE | allegedb/allegedb/tests/test_load.py | allegedb/allegedb/tests/test_load.py | import pytest
import os
from allegedb import ORM
import networkx as nx
scalefreestart = nx.MultiDiGraph(name='scale_free_graph_5')
scalefreestart.add_edges_from([(0, 1), (1, 2), (2, 0)])
testgraphs = [
nx.chvatal_graph(),
nx.scale_free_graph(5, create_using=scalefreestart),
# nx.chordal_cycle_graph(5, create_using=nx.MultiGraph(name='chordal_cycle_graph_5')),
# The standard networkx edges iterator decides to flip some edges about in arbitrary-seeming
# ways that I haven't been able to replicate and it doesn't seem worth it.
]
# have to name it after creation because it clears the create_using
path_graph_9 = nx.path_graph(9)
path_graph_9.name = 'path_graph_9'
testgraphs.append(path_graph_9)
@pytest.fixture
def db():
name = 'allegedb_load_test.db'
if os.path.exists(name):
os.remove(name)
with ORM('sqlite:///' + name) as orm:
for graph in testgraphs:
{
nx.Graph: orm.new_graph,
nx.DiGraph: orm.new_digraph,
nx.MultiGraph: orm.new_multigraph,
nx.MultiDiGraph: orm.new_multidigraph
}[type(graph)](graph.name, graph)
assert set(graph.node.keys()) == set(orm.graph[graph.name].node.keys()), \
"{}'s nodes changed during instantiation".format(graph.name)
assert set(graph.edges) == set(orm.graph[graph.name].edges), \
"{}'s edges changed during instantiation".format(graph.name)
with ORM('sqlite:///' + name) as orm:
yield orm
os.remove(name)
def test_basic_load(db):
for graph in testgraphs:
alleged = db.graph[graph.name]
assert set(graph.node.keys()) == set(alleged.node.keys()), "{}'s nodes are not the same after load".format(
graph.name
)
assert set(graph.edges) == set(alleged.edges), "{}'s edges are not the same after load".format(graph.name) | import pytest
import os
from allegedb import ORM
import networkx as nx
scalefreestart = nx.MultiDiGraph(name='scale_free_graph_5')
scalefreestart.add_edges_from([(0, 1), (1, 2), (2, 0)])
testgraphs = [
nx.chvatal_graph(),
nx.scale_free_graph(5, create_using=scalefreestart),
nx.chordal_cycle_graph(5, create_using=nx.MultiGraph(name='chordal_cycle_graph_5')),
]
# have to name it after creation because it clears the create_using
path_graph_9 = nx.path_graph(9)
path_graph_9.name = 'path_graph_9'
testgraphs.append(path_graph_9)
@pytest.fixture
def db():
name = 'allegedb_load_test.db'
if os.path.exists(name):
os.remove(name)
with ORM('sqlite:///' + name) as orm:
for graph in testgraphs:
{
nx.Graph: orm.new_graph,
nx.DiGraph: orm.new_digraph,
nx.MultiGraph: orm.new_multigraph,
nx.MultiDiGraph: orm.new_multidigraph
}[type(graph)](graph.name, graph)
assert set(graph.node.keys()) == set(orm.graph[graph.name].node.keys()), \
"{}'s nodes changed during instantiation".format(graph.name)
assert set(graph.edges) == set(orm.graph[graph.name].edges), \
"{}'s edges changed during instantiation".format(graph.name)
with ORM('sqlite:///' + name) as orm:
yield orm
os.remove(name)
def test_basic_load(db):
for graph in testgraphs:
alleged = db.graph[graph.name]
assert set(graph.node.keys()) == set(alleged.node.keys()), "{}'s nodes are not the same after load".format(
graph.name
)
assert set(graph.edges) == set(alleged.edges), "{}'s edges are not the same after load".format(graph.name) | agpl-3.0 | Python |
83ec77807d3688236f421555e1f74006a0308208 | Add missing encoding info to alltheitems.py | wurstmineberg/alltheitems.wurstmineberg.de,wurstmineberg/alltheitems.wurstmineberg.de | alltheitems.py | alltheitems.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Wurstmineberg: All The Items
"""
import bottle
application = bottle.Bottle()
document_root = '/var/www/alltheitems.wurstmineberg.de'
@application.route('/')
def show_index():
"""The index page."""
return bottle.static_file('index.html', root=document_root)
@application.route('/alltheitems.png')
def image_alltheitems():
"""The “Craft ALL the items!” image."""
return bottle.static_file('alltheitems.png', root=document_root)
if __name__ == '__main__':
bottle.run(app=application, host='0.0.0.0', port=8081)
| #!/usr/bin/env python3
"""
Wurstmineberg: All The Items
"""
import bottle
application = bottle.Bottle()
document_root = '/var/www/alltheitems.wurstmineberg.de'
@application.route('/')
def show_index():
"""The index page."""
return bottle.static_file('index.html', root=document_root)
@application.route('/alltheitems.png')
def image_alltheitems():
"""The “Craft ALL the items!” image."""
return bottle.static_file('alltheitems.png', root=document_root)
if __name__ == '__main__':
bottle.run(app=application, host='0.0.0.0', port=8081)
| mit | Python |
772840642e828ee2ad14b2d0ab937f23c2adaac9 | Bump version to 0.1.0rc2 | nioinnovation/safepickle | safepickle/__init__.py | safepickle/__init__.py | from .safepickle import load, loads, dump, dumps
__title__ = 'safepickle'
__version__ = '0.1.0rc2'
__author__ = 'n.io'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017 n.io'
| from .safepickle import load, loads, dump, dumps
__title__ = 'safepickle'
__version__ = '0.1.0rc1'
__author__ = 'n.io'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017 n.io'
| apache-2.0 | Python |
deedcff26e18e6edfff6680697ccbe213565e5c7 | add to join_session_command | TeamRemote/remote-sublime,TeamRemote/remote-sublime | remote.py | remote.py | import sublime, sublime_plugin
import Session
import socket
import sys
class DiffListener(sublime_plugin.EventListener):
"""Listens for modifications to the view and gets the diffs using
Operational Transformation"""
def __init___(self):
# watched_views is a sessions of which currently open views are bound to
# remote-collab sessions. This allows the EventListener to check if
# on_modified events happened to the views it cares about, or to other
# views which it doesn't care about.
self.sessions = []
def on_modified_async(self, view):
"""Listens for modifications to views which are part of a currently
active remote session."""
if sessions:
for session in sessions if view in session.view:
current_buffer = view.substr(sublime.Region(0, view.size()))
session.send_diffs(current_buffer)
def on_close(self, view):
"""Check to see if views I care about are closed, and if they are,
drop them from my watched-views"""
if view in sessions.keys():
del sessions[view]
class StartSessionCommand(sublime_plugin.TextCommand):
"""Command to start a new RemoteCollab session for the current view"""
get_buffer = lambda view: view.substr(sublime.Region(0, view.size()))
def run(self):
# this will have to connect to the remote server (getting the address
# from the settings file), wait for the server to generate the session,
# and tell the user the access token. it'll then have to start watching
# the urrent view synchronizing
session = Session(self.view, is_true)
DiffListener.sessions.append(session) = Session(session_id, get_buffer(self.view), is_true)
session.patch
class ConnectToSessionCommand(sublime_plugin.ApplicationCommand):
"""Command to connect to an external RemoteCollab session."""
# this will have to connect to the remote server (configured in settings file),
# send the session token, make a new view containing the contents of the remote
# session, and then start listening for modifications to that view and synchronizing
def run(self):
| import sublime, sublime_plugin
import Session
import socket
import sys
class DiffListener(sublime_plugin.EventListener):
"""Listens for modifications to the view and gets the diffs using
Operational Transformation"""
def __init___(self):
# watched_views is a sessions of which currently open views are bound to
# remote-collab sessions. This allows the EventListener to check if
# on_modified events happened to the views it cares about, or to other
# views which it doesn't care about.
self.sessions = []
def on_modified_async(self, view):
"""Listens for modifications to views which are part of a currently
active remote session."""
if sessions:
for session in sessions if view in session.view:
# get the body text of the whole buffer
current_buffer = view.substr(sublime.Region(0, view.size()))
session.send_diffs(current_buffer)
def on_close(self, view):
"""Check to see if views I care about are closed, and if they are,
drop them from my watched-views"""
if view in sessions.keys():
del sessions[view]
class StartSessionCommand(sublime_plugin.TextCommand):
"""Command to start a new RemoteCollab session for the current view"""
get_buffer = lambda view: view.substr(sublime.Region(0, view.size()))
def run(self):
# this will have to connect to the remote server (getting the address
# from the settings file), wait for the server to generate the session,
# and tell the user the access token. it'll then have to start watching
# the urrent view synchronizing
#session_id = get_id_from_server()
DiffListener.watched_views[self.view] = Session(session_id, get_buffer(self.view), is_true)
class ConnectToSessionCommand(sublime_plugin.ApplicationCommand):
"""Command to connect to an external RemoteCollab session."""
# this will have to connect to the remote server (configured in settings file),
# send the session token, make a new view containing the contents of the remote
# session, and then start listening for modifications to that view and synchronizing
def run(self):
| mit | Python |
6eae9369cc122b23577715951d6b0f59991b0f65 | Update choices descriptions in FileTypes | mociepka/saleor,mociepka/saleor,mociepka/saleor | saleor/csv/__init__.py | saleor/csv/__init__.py | class ExportEvents:
"""The different csv events types."""
EXPORT_PENDING = "export_pending"
EXPORT_SUCCESS = "export_success"
EXPORT_FAILED = "export_failed"
EXPORT_DELETED = "export_deleted"
EXPORTED_FILE_SENT = "exported_file_sent"
CHOICES = [
(EXPORT_PENDING, "Data export was started."),
(EXPORT_SUCCESS, "Data export was completed successfully."),
(EXPORT_FAILED, "Data export failed."),
(EXPORT_DELETED, "Export file was started."),
(
EXPORTED_FILE_SENT,
"Email with link to download csv file was sent to the customer.",
),
]
class FileTypes:
CSV = "csv"
XLSX = "xlsx"
CHOICES = [
(CSV, "Plain CSV file."),
(XLSX, "Excel XLSX file."),
]
| class ExportEvents:
"""The different csv events types."""
EXPORT_PENDING = "export_pending"
EXPORT_SUCCESS = "export_success"
EXPORT_FAILED = "export_failed"
EXPORT_DELETED = "export_deleted"
EXPORTED_FILE_SENT = "exported_file_sent"
CHOICES = [
(EXPORT_PENDING, "Data export was started."),
(EXPORT_SUCCESS, "Data export was completed successfully."),
(EXPORT_FAILED, "Data export failed."),
(EXPORT_DELETED, "Export file was started."),
(
EXPORTED_FILE_SENT,
"Email with link to download csv file was sent to the customer.",
),
]
class FileTypes:
CSV = "csv"
XLSX = "xlsx"
CHOICES = [
(CSV, "Plain csv file."),
(XLSX, "Excel .xlsx file."),
]
| bsd-3-clause | Python |
81269d44b12527fc9059498ef1e9443756aee48c | add email test | gregstiehl/chatparse | cornertests.py | cornertests.py | #!/usr/bin/env python3
import chatparse
import unittest
class CornerTests(unittest.TestCase):
def test_email(self):
'''test email to make sure it is not a mention'''
msg = "user@example.com"
expect = {}
result = chatparse.parse(msg)
self.assertEqual(expect, result)
def test_userurl(self):
'''test for user@ in url'''
msg = "http://user@example.com"
expect = {
"links": [
{
"url": msg,
"title": None,
}
]
}
result = chatparse.parse(msg)
self.assertEqual(expect, result)
def test_nonexisturl(self):
'''do not include links to unknown URLs'''
msg = "http://site-does-not-exist.com"
expect = {}
result = chatparse.parse(msg)
self.assertEqual(expect, result)
def test_toolongemoticon(self):
'''test for emoticon bigger that 15 chars'''
msg = "(1234567890123456)"
expect = {}
result = chatparse.parse(msg)
self.assertEqual(expect, result)
def test_longemoticon(self):
'''test for emoticon bigger that 15 chars'''
msg = "(123456789012345)"
expect = {
"emoticons": [ "123456789012345" ]
}
result = chatparse.parse(msg)
self.assertEqual(expect, result)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python3
import chatparse
import unittest
class CornerTests(unittest.TestCase):
def test_userurl(self):
'''test for @user in url'''
msg = "http://user@example.com"
expect = {
"links": [
{
"url": msg,
"title": None,
}
]
}
result = chatparse.parse(msg)
self.assertEqual(expect, result)
def test_toolongemoticon(self):
'''test for emoticon bigger that 15 chars'''
msg = "(1234567890123456)"
expect = {}
result = chatparse.parse(msg)
self.assertEqual(expect, result)
def test_longemoticon(self):
'''test for emoticon bigger that 15 chars'''
msg = "(123456789012345)"
expect = {
"emoticons": [ "123456789012345" ]
}
result = chatparse.parse(msg)
self.assertEqual(expect, result)
if __name__ == '__main__':
unittest.main()
| mit | Python |
bd1cea280a87818753ec1511207b4561fa01356a | Make help command suggest you can also use it to get more info on commands; minor other improvements | Didero/DideRobot | commands/help.py | commands/help.py | from CommandTemplate import CommandTemplate
import GlobalStore
class Command(CommandTemplate):
triggers = ['help', 'helpfull']
helptext = "Shows the explanation of the provided command, or the list of commands if there aren't any arguments. {commandPrefix}helpfull shows all command aliases as well"
def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength):
#First get all the existing triggers, since we either need to list them or check if the provided one exists
triggerlist = {}
shortTriggerlist = {}
for commandname, command in GlobalStore.commandhandler.commands.iteritems():
if command.showInCommandList and GlobalStore.commandhandler.isCommandAllowedForBot(bot, commandname) and len(command.triggers) > 0:
shortTriggerlist[command.triggers[0]] = command
for trigger in command.triggers:
triggerlist[trigger] = command
replytext = u""
#Check if a command has been passed as argument
command = msgWithoutFirstWord.lower()
#Remove the command prefix if it's there, because the lookup doesn't have it
if command.startswith(bot.factory.commandPrefix):
command = command[bot.factory.commandPrefixLength:]
#check if the provided argument exists
if command in triggerlist:
#'!help, !helpfull: '
replytext = u"{commandPrefix}{triggers}".format(triggers=", {commandPrefix}".join(triggerlist[command].triggers), commandPrefix="{commandPrefix}")
#some commands can only be used by people in the admins list. Inform users of that
if triggerlist[command].adminOnly:
replytext += u" [admin-only]"
replytext += u": {helptext}".format(helptext=triggerlist[command].helptext)
#If the provided command can't be found (either because of misspelling or because they didn't provide one),
# show a list of available commands
else:
#If a command was provided but not found, apologize even though it's not our fault
if command != u"":
replytext = u"I don't know that command, sorry. "
commandslist = ""
if triggerInMsg == u'helpfull':
commandslist = u", ".join(sorted(triggerlist.keys()))
else:
commandslist = ", ".join(sorted(shortTriggerlist.keys()))
replytext += u"Commands loaded: {commandslist}. Type '{prefix}help [commandname] for info on how to use that command".format(commandslist=commandslist, prefix=bot.factory.commandPrefix)
replytext = replytext.format(commandPrefix=bot.factory.commandPrefix)
bot.say(target, replytext)
| from CommandTemplate import CommandTemplate
import GlobalStore
class Command(CommandTemplate):
triggers = ['help', 'helpfull']
helptext = "Shows the explanation of a command, or the list of commands if there aren't any arguments. {commandPrefix}helpfull shows all command aliases as well"
def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength):
#First get all the existing triggers, since we either need to list them or check if the provided one exists
triggerlist = {}
shortTriggerlist = {}
for commandname, command in GlobalStore.commandhandler.commands.iteritems():
if command.showInCommandList and GlobalStore.commandhandler.isCommandAllowedForBot(bot, commandname) and len(command.triggers) > 0:
shortTriggerlist[command.triggers[0]] = command
for trigger in command.triggers:
triggerlist[trigger] = command
replytext = u""
command = u""
#Check if a command has been passed as argument
if msgPartsLength > 1:
command = msgParts[1].lower()
#Remove the command prefix if it's there, because the lookup doesn't have it
if command.startswith(bot.factory.commandPrefix):
command = command[bot.factory.commandPrefixLength:]
#check if the provided argument exists
if command in triggerlist:
#'!help, !helpfull: '
replytext = u"{commandPrefix}{triggers}".format(triggers=", {commandPrefix}".join(triggerlist[command].triggers), commandPrefix="{commandPrefix}")
#some commands can only be used by people in the admins list. Inform users of that
if triggerlist[command].adminOnly:
replytext += u" [admin-only]"
replytext += u": {helptext}".format(helptext=triggerlist[command].helptext)
#If the provided command can't be found (either because of misspelling or because they didn't provide one),
# show a list of available commands
else:
#If a command was provided but not found, apologize even though it's not our fault
if command != "":
replytext = u"I don't know that command, sorry. "
commandslist = ""
if triggerInMsg == 'helpfull':
commandslist = ", ".join(sorted(triggerlist.keys()))
else:
commandslist = ", ".join(sorted(shortTriggerlist.keys()))
replytext += u"Commands loaded: {commandslist}".format(commandslist=commandslist)
replytext = replytext.format(commandPrefix=bot.factory.commandPrefix)
bot.say(target, replytext)
| mit | Python |
59a7c4d059bbd86bdf2dc1572c601aa973698496 | use << and >> in the tests | python-parsy/parsy,jneen/parsy,python-parsy/parsy | test/parsy_test.py | test/parsy_test.py | from parsy import string, regex, chain
import re
import pdb
import readline
whitespace = regex(r'\s+', re.MULTILINE)
comment = regex(r';.*')
ignore = (whitespace | comment).many()
lexeme = lambda p: p << ignore
lparen = lexeme(string('('))
rparen = lexeme(string(')'))
number = lexeme(regex(r'\d+')).map(int)
symbol = lexeme(regex(r'[\d\w_-]+'))
true = lexeme(string('#t')).result(True)
false = lexeme(string('#f')).result(False)
atom = true | false | number | symbol
@chain
def form():
yield lparen
els = yield expr.many()
yield rparen
return els
@chain
def quote():
yield string("'")
e = yield expr
return ['quote', e]
expr = form | quote | atom
program = ignore >> expr.many()
def test_form():
result = program.parse('(1 2 3)')
assert result == [[1, 2, 3]]
def test_quote():
result = program.parse("'foo '(bar baz)")
assert result == [['quote', 'foo'], ['quote', ['bar', 'baz']]]
def test_boolean():
result = program.parse('#t #f')
assert result == [True, False]
def test_comments():
result = program.parse(
"""
; a program with a comment
( foo ; that's a foo
bar )
; some comments at the end
"""
)
assert result == [['foo', 'bar']]
| from parsy import string, regex, chain
import re
import pdb
import readline
whitespace = regex(r'\s+', re.MULTILINE)
comment = regex(r';.*')
ignore = (whitespace | comment).many()
lexeme = lambda p: p.skip(ignore)
lparen = lexeme(string('('))
rparen = lexeme(string(')'))
number = lexeme(regex(r'\d+')).map(int)
symbol = lexeme(regex(r'[\d\w_-]+'))
true = lexeme(string('#t')).result(True)
false = lexeme(string('#f')).result(False)
atom = true | false | number | symbol
@chain
def form():
yield lparen
els = yield expr.many()
yield rparen
return els
@chain
def quote():
yield string("'")
e = yield expr
return ['quote', e]
expr = form | quote | atom
program = ignore.then(expr.many())
def test_form():
result = program.parse('(1 2 3)')
assert result == [[1, 2, 3]]
def test_quote():
result = program.parse("'foo '(bar baz)")
assert result == [['quote', 'foo'], ['quote', ['bar', 'baz']]]
def test_boolean():
result = program.parse('#t #f')
assert result == [True, False]
def test_comments():
result = program.parse(
"""
; a program with a comment
( foo ; that's a foo
bar )
; some comments at the end
"""
)
assert result == [['foo', 'bar']]
| mit | Python |
f106175ed3eb8ec32202b9197cd68f33f57361b3 | Define 'updated' property setter and getters | pipex/gitbot,pipex/gitbot,pipex/gitbot | app/models.py | app/models.py | from app import slack, redis, app
from app.redis import RedisModel
class Channel(RedisModel):
__prefix__ = '#'
@staticmethod
def load_from_slack():
"""Update channel list from slack"""
slack_response = slack.channels.list()
if not slack_response.successful:
app.logger.error('Error loading channel list. Server returned %s' % slack_response.error)
return False
# Add channel to list and save
for channel in slack_response.body.get('channels', []):
name = channel.get('name')
entity = Channel(channel.get('name'))
entity.slack_id = channel.get('id')
return True
class User(RedisModel):
__prefix__ = '@'
@property
def updated(self):
if 'updated' in self:
return datetime.strptime(self['updated'], "%Y-%m-%dT%H:%M:%S.%fZ")
return None
@updated.setter
def updated(self, value):
self['updated'] = datetime.strftime(value, "%Y-%m-%dT%H:%M:%S.%fZ")
@staticmethod
def load_from_slack(include_bots=False, include_deleted=False):
"""Update user list from slack"""
slack_response = slack.users.list()
if not slack_response.successful:
app.logger.error('Error loading user list. Server returned %s' % slack_response.error)
return False
# Add channel to list and save
for user in slack_response.body.get('members', []):
if user.get('is_bot') and not include_bots:
continue
if user.get('deleted') and not include_deleted:
continue
entity = User(user.get('name'))
entity.slack_id = user.get('id')
return True
def load_data_from_slack():
"""Load data from slack.
To be called on application start"""
Channel.load_from_slack()
User.load_from_slack()
| from app import slack, redis, app
from app.redis import RedisModel
class Channel(RedisModel):
__prefix__ = '#'
@staticmethod
def load_from_slack():
"""Update channel list from slack"""
slack_response = slack.channels.list()
if not slack_response.successful:
app.logger.error('Error loading channel list. Server returned %s' % slack_response.error)
return False
# Add channel to list and save
for channel in slack_response.body.get('channels', []):
name = channel.get('name')
entity = Channel(channel.get('name'))
entity.slack_id = channel.get('id')
return True
class User(RedisModel):
__prefix__ = '@'
@staticmethod
def load_from_slack(include_bots=False, include_deleted=False):
"""Update user list from slack"""
slack_response = slack.users.list()
if not slack_response.successful:
app.logger.error('Error loading user list. Server returned %s' % slack_response.error)
return False
# Add channel to list and save
for user in slack_response.body.get('members', []):
if user.get('is_bot') and not include_bots:
continue
if user.get('deleted') and not include_deleted:
continue
entity = User(user.get('name'))
entity.slack_id = user.get('id')
return True
def load_data_from_slack():
"""Load data from slack.
To be called on application start"""
Channel.load_from_slack()
User.load_from_slack()
| apache-2.0 | Python |
06493206e99b8e63027898fe9bcf73d18264154e | Add initial solution | CubicComet/exercism-python-solutions | space-age/space_age.py | space-age/space_age.py | class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
| class SpaceAge(object):
def __init__(self):
pass
| agpl-3.0 | Python |
3bc508799cff4da89b592b7c97d162ee19d38a3a | Fix __init__ formatting | arista-eosplus/pyeapi | pyeapi/__init__.py | pyeapi/__init__.py | #
# Copyright (c) 2014, Arista Networks, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of Arista Networks nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
__version__ = 'develop'
__author__ = 'Arista EOS+'
from .client import load_config, connect, connect_to, config_for
__all__ = ['load_config', 'connect', 'connect_to', 'config_for']
| #
# Copyright (c) 2014, Arista Networks, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of Arista Networks nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
__version__ = 'develop'
__author__ = 'Arista EOS+'
from .client import load_config, connect, connect_to, config_for
__all__ = ['load_config', 'connect', 'connect_to', 'config_for']
| bsd-3-clause | Python |
1c795df23bd9d554eabcc419dfe830270e26fede | Add some info to the printouts | enjin/contracts | solidity/python/FormulaTestPurchase.py | solidity/python/FormulaTestPurchase.py | from sys import argv
from decimal import Decimal
from random import randrange
from Formula import calculatePurchaseReturn
def formulaTest(_supply, _reserveBalance, _reserveRatio, _amount):
fixed = calculatePurchaseReturn(_supply, _reserveBalance, _reserveRatio, _amount)
real = Decimal(_supply)*((1+Decimal(_amount)/Decimal(_reserveBalance))**(Decimal(_reserveRatio)/100)-1)
if fixed > real:
error = []
error.append('error occurred on:')
error.append('_supply = {}'.format(_supply))
error.append('_reserveBalance = {}'.format(_reserveBalance))
error.append('_reserveRatio = {}'.format(_reserveRatio))
error.append('_amount = {}'.format(_amount))
error.append('fixed result = {}'.format(fixed))
error.append('real result = {}'.format(real))
raise BaseException('\n'.join(error))
return float(fixed / real)
size = int(argv[1]) if len(argv) > 1 else 0
if size == 0:
size = input('How many test-cases would you like to execute? ')
n = 0
worstAccuracy = 1
numOfFailures = 0
while n < size: # avoid creating a large range in memory
_supply = randrange(1,10**26)
_reserveBalance = randrange(1,10**23)
_reserveRatio = randrange(1,99)
_amount = randrange(1,_supply)
try:
accuracy = formulaTest(_supply, _reserveBalance, _reserveRatio, _amount)
worstAccuracy = min(worstAccuracy,accuracy)
except Exception,error:
accuracy = 0
numOfFailures += 1
except BaseException,error:
print error
break
print 'Test #{}: accuracy = {:.12f}, worst accuracy = {:.12f}, num of failures = {}'.format(n,accuracy,worstAccuracy,numOfFailures)
n += 1
| from sys import argv
from decimal import Decimal
from random import randrange
from Formula import calculatePurchaseReturn
def formulaTest(_supply, _reserveBalance, _reserveRatio, _amount):
fixed = calculatePurchaseReturn(_supply, _reserveBalance, _reserveRatio, _amount)
real = Decimal(_supply)*((1+Decimal(_amount)/Decimal(_reserveBalance))**(Decimal(_reserveRatio)/100)-1)
if fixed > real:
error = []
error.append('error occurred on:')
error.append('_supply = {}'.format(_supply))
error.append('_reserveBalance = {}'.format(_reserveBalance))
error.append('_reserveRatio = {}'.format(_reserveRatio))
error.append('_amount = {}'.format(_amount))
error.append('fixed result = {}'.format(fixed))
error.append('real result = {}'.format(real))
raise BaseException('\n'.join(error))
return float(fixed / real)
size = int(argv[1]) if len(argv) > 1 else 0
if size == 0:
size = input('How many test-cases would you like to execute? ')
n = 0
worstAccuracy = 1
while n < size: # avoid creating a large range in memory
_supply = randrange(1,10**26)
_reserveBalance = randrange(1,10**23)
_reserveRatio = randrange(1,99)
_amount = randrange(1,_supply)
try:
accuracy = formulaTest(_supply, _reserveBalance, _reserveRatio, _amount)
if worstAccuracy > accuracy:
worstAccuracy = accuracy
print 'accuracy = {:.12f}, worst accuracy = {:.12f}'.format(accuracy,worstAccuracy)
n += 1
except Exception,error:
pass
except BaseException,error:
print error
break
| apache-2.0 | Python |
c6da29f07c30beefef53bfca4b67c4c27f85d69a | fix record category filtering | curaloucura/money-forecast,curaloucura/money-forecast,curaloucura/money-forecast | moneyforecast/records/admin.py | moneyforecast/records/admin.py | import logging
from django.contrib import admin
from records.models import Category, Record, Budget, SYSTEM_CATEGORIES
logger = logging.getLogger(__name__)
class CurrentUserAdmin(admin.ModelAdmin):
readonly_fields = ('user',)
def get_queryset(self, request):
qs = super(CurrentUserAdmin, self).get_queryset(request)
# make sure all users, even superusers, see only their own objects
# to see all categories a superuser need to add a hidden option
# ?all=True to the querystring eg.: /admin/categories/?all=True
if request.user.is_superuser and request.GET.get("all"):
return qs
return qs.filter(user=request.user)
def save_model(self, request, obj, form, change):
if getattr(obj, 'user', None) is None:
obj.user = request.user
obj.save()
class CategoryAdmin(CurrentUserAdmin):
list_display = ('name', 'type_category')
list_filter = ('type_category', )
prepopulated_fields = {"slug": ("name",)}
def get_queryset(self, request):
qs = super(CategoryAdmin, self).get_queryset(request)
return qs.exclude(type_category=SYSTEM_CATEGORIES)
class RecordAdmin(CurrentUserAdmin):
list_display = (
'description', 'category', 'amount', 'start_date',
'day_of_month', 'end_date', 'is_paid_out')
list_filter = ('start_date', 'end_date', 'is_paid_out', 'category')
list_display_links = ('description', 'category')
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "category":
kwargs["queryset"] = Category.objects.filter(user=request.user)
return super(RecordAdmin, self).formfield_for_foreignkey(
db_field, request, **kwargs)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Record, RecordAdmin)
admin.site.register(Budget)
| from django.contrib import admin
from records.models import Category, Record, Budget, SYSTEM_CATEGORIES
class CurrentUserAdmin(admin.ModelAdmin):
readonly_fields = ('user',)
def get_queryset(self, request):
qs = super(CurrentUserAdmin, self).get_queryset(request)
# make sure all users, even superusers, see only their own objects
return qs.filter(user=request.user)
def save_model(self, request, obj, form, change):
if getattr(obj, 'user', None) is None:
obj.user = request.user
obj.save()
class CategoryAdmin(CurrentUserAdmin):
list_display = ('name', 'type_category')
list_filter = ('type_category',)
prepopulated_fields = {"slug": ("name",)}
# TODO: block changing the slug for internal categories
def get_queryset(self, request):
qs = super(CurrentUserAdmin, self).queryset(request)
# make sure all users, even superusers, see only their own objects
return qs.exclude(type_category=SYSTEM_CATEGORIES)
# TODO: Hide also extra income and others
class RecordAdmin(CurrentUserAdmin):
list_display = (
'description', 'category', 'amount', 'start_date',
'day_of_month', 'end_date', 'is_paid_out')
list_filter = ('start_date', 'end_date', 'is_paid_out', 'category')
list_display_links = ('description', 'category')
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "category":
kwargs["queryset"] = Category.objects.filter(user=request.user)
return super(RecordAdmin, self).formfield_for_foreignkey(
db_field, request, **kwargs)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Record, RecordAdmin)
admin.site.register(Budget)
| unlicense | Python |
3e708149534926f976b73ec4e0a8a6ec3123ef40 | update tests | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/util/tests/test_sentry.py | corehq/util/tests/test_sentry.py | # coding=utf-8
import uuid
from django.test import SimpleTestCase, override_settings
from corehq.util.sentry import sanitize_system_passwords
class HQSentryTest(SimpleTestCase):
def test_couch_password(self):
couch_pw = uuid.uuid4().hex
couch_pw2 = uuid.uuid4().hex
overridden_dbs = {
'db{}'.format(i): {
'COUCH_HTTPS': False,
'COUCH_SERVER_ROOT': '127.0.0.1:5984',
'COUCH_USERNAME': 'commcarehq',
'COUCH_PASSWORD': pw,
'COUCH_DATABASE_NAME': 'commcarehq',
}
for i, pw in enumerate([couch_pw, couch_pw2])
}
subs = {
'pw': couch_pw,
'pw2': couch_pw2,
}
masks = {
'pw': '********',
'pw2': '********',
}
with override_settings(COUCH_DATABASES=overridden_dbs):
for test in [
'{pw}',
'http://username:{pw}@example.com',
'p1: {pw}, p2: {pw2}',
'no secrets here',
'in दिल्ली we say {pw}'
]:
formatted_test = test.format(**subs)
expected_result = test.format(**masks)
self.assertEqual(expected_result, sanitize_system_passwords.sanitize('key', formatted_test))
for edge_case in [
(None, None),
({'foo': 'bar'}, {'foo': 'bar'}),
]:
self.assertEqual(edge_case[1], sanitize_system_passwords.sanitize('key', edge_case[0]))
| # coding=utf-8
import uuid
from django.test import SimpleTestCase, override_settings
from corehq.util.sentry import HQSanitzeSystemPasswordsProcessor
class HQSentryTest(SimpleTestCase):
def test_couch_password(self):
couch_pw = uuid.uuid4().hex
couch_pw2 = uuid.uuid4().hex
overridden_dbs = {
'db{}'.format(i): {
'COUCH_HTTPS': False,
'COUCH_SERVER_ROOT': '127.0.0.1:5984',
'COUCH_USERNAME': 'commcarehq',
'COUCH_PASSWORD': pw,
'COUCH_DATABASE_NAME': 'commcarehq',
}
for i, pw in enumerate([couch_pw, couch_pw2])
}
subs = {
'pw': couch_pw,
'pw2': couch_pw2,
}
masks = {
'pw': '********',
'pw2': '********',
}
with override_settings(COUCH_DATABASES=overridden_dbs):
processor = HQSanitzeSystemPasswordsProcessor(client=None)
for test in [
'{pw}',
'http://username:{pw}@example.com',
'p1: {pw}, p2: {pw2}',
'no secrets here',
'in दिल्ली we say {pw}'
]:
formatted_test = test.format(**subs)
expected_result = test.format(**masks)
self.assertEqual(expected_result, processor.sanitize('key', formatted_test))
for edge_case in [
(None, None),
({'foo': 'bar'}, {'foo': 'bar'}),
]:
self.assertEqual(edge_case[1], processor.sanitize('key', edge_case[0]))
| bsd-3-clause | Python |
dcff103f954c9315011e0c8602cfcbd4b2cc076f | fix typo | Aluriak/24hducode2016,Aluriak/24hducode2016 | src/source_weather/source_weather.py | src/source_weather/source_weather.py | """
Definition of a source than add dumb data
"""
from src.source import Source
from src import default
from . import weather
class SourceWeather(Source):
"""
Throught Open Weather Map generates today weather and
expected weather for next days, if possible
"""
def enrichment(self, data_dict):
if default.FIELD_LATITUDE in data_dict:
lat = data_dict[default.FIELD_LATITUDE]
lon = data_dict[default.FIELD_LONGITUDE]
data_dict[default.FIELD_WEATHER_NOW] = weather.actual(lat, lon)
if default.FIELD_DATE in data_dict:
date = data_dict[default.FIELD_DATE]
if weather.is_predictable(date):
data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
return data_dict
def keywords(self):
return {default.FIELD_WEATHER_PREDICTED,
default.FIELD_WEATHER}
| """
Definition of a source than add dumb data
"""
from src.source import Source
from src import default
from . import weather
class SourceWeather(Source):
"""
Throught Open Weather Map generates today weather and
expected weather for next days, if possible
"""
def enrichment(self, data_dict):
if default.FIELD_LATITUDE in data_dict:
lat = data_dict[default.FIELD_LATITUDE]
lon = data_dict[default.FIELD_LONGITUDE]
data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon)
if default.FIELD_DATE in data_dict:
date = data_dict[default.FIELD_DATE]
if weather.is_predictable(date):
data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
return data_dict
def keywords(self):
return {default.FIELD_WEATHER_PREDICTED,
default.FIELD_WEATHER}
| unlicense | Python |
33e65176172da48b3b75e77aa20b68bbd4d95069 | Check pywin32 is installed before running | visio2img/sphinxcontrib-visio | sphinxcontrib/visio.py | sphinxcontrib/visio.py | # -*- coding: utf-8 -*-
# Copyright 2014 Yassu
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pkg_resources
from hashlib import sha1
from docutils.parsers.rst import directives
from visio2img.visio2img import (
VisioFile, filter_pages, is_pywin32_available
)
from sphinxcontrib.imagehelper import (
ImageConverter, add_image_type, add_image_directive, add_figure_directive
)
class VisioConverter(ImageConverter):
option_spec = {
'page': directives.nonnegative_int,
'sheet': directives.unchanged,
}
def get_filename_for(self, node):
options = dict(uri=node['uri'], page=node.get('page'), name=node.get('sheet'))
hashed = sha1(str(options).encode('utf-8')).hexdigest()
return "visio-%s.png" % hashed
def convert(self, node, filename, to):
if not is_pywin32_available():
self.app.env.warn_node('Fail to convert visio image: win32com not installed',
node)
return False
try:
with VisioFile.Open(filename) as visio:
pages = filter_pages(visio.pages, node.get('page'), node.get('sheet'))
if len(pages) > 1:
msg = ('visio file [%s] contains multiple pages. '
'specify :page: or :name: option.')
self.app.warn(msg % node['uri'])
return False
pages[0].Export(to)
return True
except Exception as exc:
self.app.warn('Fail to convert visio image: %s' % exc)
return False
def setup(app):
add_image_type(app, 'visio', ('vsd', 'vsdx'), VisioConverter)
add_image_directive(app, 'visio', VisioConverter.option_spec)
add_figure_directive(app, 'visio', VisioConverter.option_spec)
return {
'version': pkg_resources.require('sphinxcontrib-visio')[0].version,
'parallel_read_safe': True,
'parallel_write_safe': True,
}
| # -*- coding: utf-8 -*-
# Copyright 2014 Yassu
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pkg_resources
from hashlib import sha1
from docutils.parsers.rst import directives
from visio2img.visio2img import VisioFile, filter_pages
from sphinxcontrib.imagehelper import (
ImageConverter, add_image_type, add_image_directive, add_figure_directive
)
class VisioConverter(ImageConverter):
option_spec = {
'page': directives.nonnegative_int,
'sheet': directives.unchanged,
}
def get_filename_for(self, node):
options = dict(uri=node['uri'], page=node.get('page'), name=node.get('sheet'))
hashed = sha1(str(options).encode('utf-8')).hexdigest()
return "visio-%s.png" % hashed
def convert(self, node, filename, to):
try:
with VisioFile.Open(filename) as visio:
pages = filter_pages(visio.pages, node.get('page'), node.get('sheet'))
if len(pages) > 1:
msg = ('visio file [%s] contains multiple pages. '
'specify :page: or :name: option.')
self.app.warn(msg % node['uri'])
return False
pages[0].Export(to)
return True
except Exception as exc:
self.app.warn('Fail to convert visio image: %s' % exc)
return False
def setup(app):
add_image_type(app, 'visio', ('vsd', 'vsdx'), VisioConverter)
add_image_directive(app, 'visio', VisioConverter.option_spec)
add_figure_directive(app, 'visio', VisioConverter.option_spec)
return {
'version': pkg_resources.require('sphinxcontrib-visio')[0].version,
'parallel_read_safe': True,
'parallel_write_safe': True,
}
| apache-2.0 | Python |
808d727de4360311fe4bb134b567b96628a1f7f9 | bump version number | ephes/django-indieweb,ephes/django-indieweb | indieweb/__init__.py | indieweb/__init__.py | __version__ = '0.0.4'
| __version__ = '0.0.3'
| bsd-3-clause | Python |
d0f43e157f5e51fdb2b4a22eff046c7edb75d2c6 | bump version | daler/sphinxleash,daler/sphinxleash | sphinxleash/version.py | sphinxleash/version.py | __version__ = "0.2"
| __version__ = "0.1"
| mit | Python |
b6f04f32556fc8251566212c56159dcfff7bf596 | Add unexpected character bug fix | the-raspberry-pi-guy/lidar | pi_approach/Distance_Pi/distance.py | pi_approach/Distance_Pi/distance.py | # Lidar Project Distance Subsystem
import serial
import socket
import time
import sys
sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries")
import serverxclient as cli
arduino_dist = serial.Serial('/dev/ttyUSB0',9600)
client = cli.Client()
class distance_controller(object):
"""An all-powerful distance-finding controller"""
def get_distance(self):
distance = arduino_dist.readline()
return distance
def setup_handshake(self):
connected = False
while not connected:
try:
client.socket_connection()
connected = True
except:
print "Failure"
time.sleep(2)
received_communication = client.receive_data()
if received_communication == "VERIFY?":
hand_shake = "DISTANCE!"
client.send_data(hand_shake)
else:
print "Unidentified communication"
def active_listen(self):
received_communication = client.receive_data()
if received_communication == "FIRE":
result = self.get_distance()
try:
test_int = int(result)
print result
client.send_data(result)
except:
print "Unexpected character"
client.send_data("0")
def main(self):
self.setup_handshake()
while True:
self.active_listen()
distance = distance_controller()
distance.main()
| # Lidar Project Distance Subsystem
import serial
import socket
import time
import sys
sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries")
import serverxclient as cli
arduino_dist = serial.Serial('/dev/ttyUSB0',9600)
client = cli.Client()
class distance_controller(object):
"""An all-powerful distance-finding controller"""
def get_distance(self):
distance = arduino_dist.readline()
return distance
def setup_handshake(self):
connected = False
while not connected:
try:
client.socket_connection()
connected = True
except:
print "Failure"
time.sleep(2)
received_communication = client.receive_data()
if received_communication == "VERIFY?":
hand_shake = "DISTANCE!"
client.send_data(hand_shake)
else:
print "Unidentified communication"
def active_listen(self):
received_communication = client.receive_data()
if received_communication == "FIRE":
result = self.get_distance()
try:
test_int = int(result)
print result
client.send_data(result)
except:
print "Unexpected character"
def main(self):
self.setup_handshake()
while True:
self.active_listen()
distance = distance_controller()
distance.main()
| mit | Python |
940e5d5446a04d6228ea5fad2ac1f9ed4cf109a7 | fix doxygen-link.py by skipping invalid entries in libnl.dict | cfriedt/libnl,greearb/libnl-ct,claudelee/libnl,baloo/libnl,baloo/libnl,claudelee/libnl,honsys/libnl,congwang/libnl,tgraf/libnl,UnicronNL/libnl3,dsahern/libnl,mshirley/libnl,HolyShitMan/libnl-1,HolyShitMan/libnl-1,vyos/libnl3,cyclops8456/libnl3,mshirley/libnl,honsys/libnl,vyos/libnl3,kolyshkin/libnl,tklauser/libnl,honsys/libnl,tgraf/libnl,congwang/libnl,vyos/libnl3,kolyshkin/libnl,greearb/libnl-ct,HolyShitMan/libnl-1,thom311/libnl,cfriedt/libnl,cyclops8456/libnl3,houstar/libnl,cyclops8456/libnl3,HolyShitMan/libnl-1,tklauser/libnl,vyos/libnl3,greearb/libnl-ct,tklauser/libnl,dsahern/libnl,cyclops8456/libnl3,mshirley/libnl,cfriedt/libnl,baloo/libnl,dsahern/libnl,UnicronNL/libnl3,houstar/libnl,tgraf/libnl,kolyshkin/libnl,tklauser/libnl,tgraf/libnl,congwang/libnl,thom311/libnl,cfriedt/libnl,mshirley/libnl,honsys/libnl,UnicronNL/libnl3,houstar/libnl,houstar/libnl,thom311/libnl,UnicronNL/libnl3,dsahern/libnl,congwang/libnl,claudelee/libnl,claudelee/libnl,kolyshkin/libnl,baloo/libnl | doc/doxygen-link.py | doc/doxygen-link.py | #!/usr/bin/env python
from __future__ import print_function
import fileinput
import re
import sys
rc_script = re.compile(r'\s*(.*\S)?\s*')
def parse_dict(filename):
links = {}
for line in open(filename, 'r'):
m = re.match('^([^=]+)=([^\n]+)$', line);
if not m:
continue
name = m.group(1)
value = m.group(2)
# strip leading and trailing whitespace
m = rc_script.match(name)
if m:
name = m.group(1)
# skip special names
if name == '':
continue
if name == '\\':
continue
links[name] = "<a href=\"" + value + "\" class=\"dg\">" + name + "</a>"
return links
links = parse_dict(sys.argv[1])
def translate(match):
return links[match.group(1)]
# match for all names, with word boundaries \b
rc = re.compile(r'\b(' + '|'.join(map(re.escape, sorted(links, reverse=True))) + r')\b')
for line in open(sys.argv[2], 'r'):
print(rc.sub(translate, line), end='')
| #!/usr/bin/env python
import fileinput
import re
import sys
links = {}
for line in open(sys.argv[1], 'r'):
m = re.match('^([^=]+)=([^\n]+)$', line);
if m:
link = "<a href=\"" + m.group(2) + "\" class=\"dg\">" + m.group(1) + "</a>"
links[m.group(1)] = link
def translate(match):
return links[match.group(0)]
rc = re.compile('|'.join(map(re.escape, sorted(links, reverse=True))))
for line in open(sys.argv[2], 'r'):
print(rc.sub(translate, line))
| lgpl-2.1 | Python |
aeb4592f6cde29c237e10452c518006913ab9ecb | Mark the revoke kvs backend deprecated, for removal in Kilo | MaheshIBM/keystone,maestro-hybrid-cloud/keystone,MaheshIBM/keystone,ilay09/keystone,cernops/keystone,rushiagr/keystone,promptworks/keystone,mahak/keystone,roopali8/keystone,ilay09/keystone,ajayaa/keystone,UTSA-ICS/keystone-kerberos,maestro-hybrid-cloud/keystone,klmitch/keystone,dims/keystone,openstack/keystone,himanshu-setia/keystone,blueboxgroup/keystone,nuxeh/keystone,jumpstarter-io/keystone,ajayaa/keystone,jumpstarter-io/keystone,jamielennox/keystone,openstack/keystone,vivekdhayaal/keystone,dstanek/keystone,jonnary/keystone,rushiagr/keystone,rajalokan/keystone,ging/keystone,rajalokan/keystone,idjaw/keystone,ging/keystone,openstack/keystone,roopali8/keystone,klmitch/keystone,cernops/keystone,takeshineshiro/keystone,dstanek/keystone,idjaw/keystone,promptworks/keystone,vivekdhayaal/keystone,jonnary/keystone,promptworks/keystone,dstanek/keystone,jumpstarter-io/keystone,vivekdhayaal/keystone,UTSA-ICS/keystone-kerberos,ilay09/keystone,rushiagr/keystone,himanshu-setia/keystone,rajalokan/keystone,nuxeh/keystone,takeshineshiro/keystone,mahak/keystone,jamielennox/keystone,mahak/keystone,nuxeh/keystone,dims/keystone,blueboxgroup/keystone | keystone/contrib/revoke/backends/kvs.py | keystone/contrib/revoke/backends/kvs.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo.utils import timeutils
from keystone.common import kvs
from keystone import config
from keystone.contrib import revoke
from keystone import exception
from keystone.openstack.common import versionutils
CONF = config.CONF
_EVENT_KEY = 'os-revoke-events'
_KVS_BACKEND = 'openstack.kvs.Memory'
class Revoke(revoke.Driver):
@versionutils.deprecated(
versionutils.deprecated.JUNO,
in_favor_of='keystone.contrib.revoke.backends.sql',
remove_in=+1,
what='keystone.contrib.revoke.backends.kvs')
def __init__(self, **kwargs):
super(Revoke, self).__init__()
self._store = kvs.get_key_value_store('os-revoke-driver')
self._store.configure(backing_store=_KVS_BACKEND, **kwargs)
def _get_event(self):
try:
return self._store.get(_EVENT_KEY)
except exception.NotFound:
return []
def _prune_expired_events_and_get(self, last_fetch=None, new_event=None):
pruned = []
results = []
expire_delta = datetime.timedelta(seconds=CONF.token.expiration)
oldest = timeutils.utcnow() - expire_delta
# TODO(ayoung): Store the time of the oldest event so that the
# prune process can be skipped if none of the events have timed out.
with self._store.get_lock(_EVENT_KEY) as lock:
events = self._get_event()
if new_event is not None:
events.append(new_event)
for event in events:
revoked_at = event.revoked_at
if revoked_at > oldest:
pruned.append(event)
if last_fetch is None or revoked_at > last_fetch:
results.append(event)
self._store.set(_EVENT_KEY, pruned, lock)
return results
def get_events(self, last_fetch=None):
return self._prune_expired_events_and_get(last_fetch=last_fetch)
def revoke(self, event):
self._prune_expired_events_and_get(new_event=event)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo.utils import timeutils
from keystone.common import kvs
from keystone import config
from keystone.contrib import revoke
from keystone import exception
CONF = config.CONF
_EVENT_KEY = 'os-revoke-events'
_KVS_BACKEND = 'openstack.kvs.Memory'
class Revoke(revoke.Driver):
def __init__(self, **kwargs):
super(Revoke, self).__init__()
self._store = kvs.get_key_value_store('os-revoke-driver')
self._store.configure(backing_store=_KVS_BACKEND, **kwargs)
def _get_event(self):
try:
return self._store.get(_EVENT_KEY)
except exception.NotFound:
return []
def _prune_expired_events_and_get(self, last_fetch=None, new_event=None):
pruned = []
results = []
expire_delta = datetime.timedelta(seconds=CONF.token.expiration)
oldest = timeutils.utcnow() - expire_delta
# TODO(ayoung): Store the time of the oldest event so that the
# prune process can be skipped if none of the events have timed out.
with self._store.get_lock(_EVENT_KEY) as lock:
events = self._get_event()
if new_event is not None:
events.append(new_event)
for event in events:
revoked_at = event.revoked_at
if revoked_at > oldest:
pruned.append(event)
if last_fetch is None or revoked_at > last_fetch:
results.append(event)
self._store.set(_EVENT_KEY, pruned, lock)
return results
def get_events(self, last_fetch=None):
return self._prune_expired_events_and_get(last_fetch=last_fetch)
def revoke(self, event):
self._prune_expired_events_and_get(new_event=event)
| apache-2.0 | Python |
40151dd39dc6aa51f50bb311dc81381eeb4cf282 | fix precision on lmsensors | Ssawa/Diamond,actmd/Diamond,jaingaurav/Diamond,Netuitive/Diamond,tusharmakkar08/Diamond,hamelg/Diamond,Netuitive/Diamond,anandbhoraskar/Diamond,bmhatfield/Diamond,hamelg/Diamond,actmd/Diamond,tusharmakkar08/Diamond,jaingaurav/Diamond,Netuitive/netuitive-diamond,anandbhoraskar/Diamond,russss/Diamond,Ssawa/Diamond,Ensighten/Diamond,gg7/diamond,hamelg/Diamond,skbkontur/Diamond,Clever/Diamond,bmhatfield/Diamond,Netuitive/Diamond,MichaelDoyle/Diamond,actmd/Diamond,tusharmakkar08/Diamond,MichaelDoyle/Diamond,russss/Diamond,MichaelDoyle/Diamond,gg7/diamond,russss/Diamond,python-diamond/Diamond,MichaelDoyle/Diamond,skbkontur/Diamond,Clever/Diamond,Ensighten/Diamond,anandbhoraskar/Diamond,Netuitive/netuitive-diamond,Ssawa/Diamond,skbkontur/Diamond,jaingaurav/Diamond,gg7/diamond,jaingaurav/Diamond,bmhatfield/Diamond,tusharmakkar08/Diamond,Clever/Diamond,bmhatfield/Diamond,Ensighten/Diamond,python-diamond/Diamond,Netuitive/Diamond,russss/Diamond,anandbhoraskar/Diamond,actmd/Diamond,Clever/Diamond,Ssawa/Diamond,Ensighten/Diamond,skbkontur/Diamond,gg7/diamond,python-diamond/Diamond,hamelg/Diamond,Netuitive/netuitive-diamond,Netuitive/netuitive-diamond | src/collectors/lmsensors/lmsensors.py | src/collectors/lmsensors/lmsensors.py | # coding=utf-8
"""
This class collects data from libsensors. It should work against libsensors 2.x
and 3.x, pending support within the PySensors Ctypes binding:
[http://pypi.python.org/pypi/PySensors/](http://pypi.python.org/pypi/PySensors/)
Requires: 'sensors' to be installed, configured, and the relevant kernel
modules to be loaded. Requires: PySensors requires Python 2.6+
If you're having issues, check your version of 'sensors'. This collector
written against: sensors version 3.1.2 with libsensors version 3.1.2
#### Dependencies
* [PySensors](http://pypi.python.org/pypi/PySensors/)
"""
import diamond.collector
from diamond.collector import str_to_bool
try:
import sensors
sensors # workaround for pyflakes issue #13
except ImportError:
sensors = None
class LMSensorsCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(LMSensorsCollector, self).get_default_config_help()
config_help.update({
'send_zero': 'Send sensor data even when there is no value'
})
return config_help
def get_default_config(self):
"""
Returns default collector settings.
"""
config = super(LMSensorsCollector, self).get_default_config()
config.update({
'path': 'sensors',
'send_zero': False
})
return config
def collect(self):
if sensors is None:
self.log.error('Unable to import module sensors')
return {}
sensors.init()
try:
for chip in sensors.iter_detected_chips():
for feature in chip:
label = feature.label.replace(' ', '-')
value = None
try:
value = feature.get_value()
except Exception:
if str_to_bool(self.config['send_zero']):
value = 0
if value is not None:
self.publish(".".join([str(chip), label]),
value,
precision=2)
finally:
sensors.cleanup()
| # coding=utf-8
"""
This class collects data from libsensors. It should work against libsensors 2.x
and 3.x, pending support within the PySensors Ctypes binding:
[http://pypi.python.org/pypi/PySensors/](http://pypi.python.org/pypi/PySensors/)
Requires: 'sensors' to be installed, configured, and the relevant kernel
modules to be loaded. Requires: PySensors requires Python 2.6+
If you're having issues, check your version of 'sensors'. This collector
written against: sensors version 3.1.2 with libsensors version 3.1.2
#### Dependencies
* [PySensors](http://pypi.python.org/pypi/PySensors/)
"""
import diamond.collector
from diamond.collector import str_to_bool
try:
import sensors
sensors # workaround for pyflakes issue #13
except ImportError:
sensors = None
class LMSensorsCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(LMSensorsCollector, self).get_default_config_help()
config_help.update({
'send_zero': 'Send sensor data even when there is no value'
})
return config_help
def get_default_config(self):
"""
Returns default collector settings.
"""
config = super(LMSensorsCollector, self).get_default_config()
config.update({
'path': 'sensors',
'send_zero': False
})
return config
def collect(self):
if sensors is None:
self.log.error('Unable to import module sensors')
return {}
sensors.init()
try:
for chip in sensors.iter_detected_chips():
for feature in chip:
label = feature.label.replace(' ', '-')
value = None
try:
value = feature.get_value()
except Exception:
if str_to_bool(self.config['send_zero']):
value = 0
if value is not None:
self.publish(".".join([str(chip), label]), value)
finally:
sensors.cleanup()
| mit | Python |
4f7f4b53d00bf13360526c7b51b72ab862469e3d | Update `test_dcos_command` to work with new CoreOS | dcos/shakedown | tests/acceptance/test_dcos_command.py | tests/acceptance/test_dcos_command.py | import json
from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
assert 'CoreOS' in output
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
# Get all IPs associated with the 'jenkins' task running in the 'marathon' service
service_ips = get_service_ips('marathon', 'jenkins')
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
| import json
from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
assert output.startswith('Core')
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
# Get all IPs associated with the 'jenkins' task running in the 'marathon' service
service_ips = get_service_ips('marathon', 'jenkins')
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
| apache-2.0 | Python |
6c8e0a41adf7d9edc075c60281d7273c09b98b65 | write out json | lacker/chess,lacker/chess,lacker/chess | Chess/build.py | Chess/build.py | #!/usr/bin/python
import json
import shutil
import os
base_dir = os.path.join(*os.path.split(__file__)[:-1])
images = base_dir + "/images"
for image in os.listdir(images):
# Strip off the .png
name = image.split(".")[0]
# The source picture
source = os.path.join(images, image)
# Check if the destination dir exists
target_dir = "%s/iOS/Images.xcassets/%s.imageset" % (base_dir, name)
if os.path.exists(target_dir):
print target_dir, "already exists"
continue
print target_dir, "does not exist. creating it..."
os.mkdir(target_dir)
images_data = []
for suffix, scale in (("", "1x"), ("@2x", "2x"), ("@3x", "3x")):
target_fname = name + suffix + ".png"
images_data.append({
"idiom": "universal",
"scale": scale,
"filename": target_fname,
})
target = target_dir + "/" + target_fname
shutil.copyfile(source, target)
# Create json registry
contents = {
"images": images_data,
"info": {
"version": 2,
"author": "build.py",
},
}
# Create Contents.json
f = open(target_dir + "/Contents.json", "w")
f.write(json.dumps(contents))
f.close()
| #!/usr/bin/python
import json
import shutil
import os
base_dir = os.path.join(*os.path.split(__file__)[:-1])
images = base_dir + "/images"
for image in os.listdir(images):
# Strip off the .png
name = image.split(".")[0]
# The source picture
source = os.path.join(images, image)
# Check if the destination dir exists
target_dir = "%s/iOS/Images.xcassets/%s.imageset" % (base_dir, name)
if os.path.exists(target_dir):
print target_dir, "already exists"
continue
print target_dir, "does not exist. creating it..."
os.mkdir(target_dir)
images_data = []
for suffix, scale in (("", "1x"), ("@2x", "2x"), ("@3x", "3x")):
target_fname = name + suffix + ".png"
images_data.append({
"idiom": "universal",
"scale": scale,
"filename": target_fname,
})
target = target_dir + "/" + target_fname
shutil.copyfile(source, target)
# Create json registry
contents = {
"images": images_data,
"info": {
"version": 2,
"author": "build.py",
},
}
# TODO: actually create Contents.json
print "contents:", json.dumps(contents)
| mit | Python |
cddaa0a097ace8eaf671800727ca291112a64bce | Update test.py | jcfromsiberia/RedZone,jcfromsiberia/RedZone,jcfromsiberia/RedZone | cython/test.py | cython/test.py | #!/usr/bin/env python
#-*- coding: utf-8 -*-
__author__ = 'jc'
from RedZone import *
context = Context({
"items": [
{"text": "Hello World!", "active": True},
{"text": "Foo", "active": True},
{"text": "Bar", "active": False}
],
"numbers": {
"first": 5,
"second": 11,
"third": True
}
})
tpl = FileTemplate('test.tpl')
print tpl.render(context)
| #!/usr/bin/env python
#-*- coding: utf-8 -*-
__author__ = 'jc'
from RedZone import *
from RedZone import *
context = Context({
"items": [
{"text": "Hello World!", "active": True},
{"text": "Foo", "active": True},
{"text": "Bar", "active": False}
],
"numbers": {
"first": 5,
"second": 11,
"third": True
}
})
tpl = FileTemplate('test.tpl')
print tpl.render(context)
| bsd-2-clause | Python |
f6a044e95c1a1bdf69bb57730876c37b37f0cbe3 | Add KeplerLIghtCurveFile class | gully/PyKE,christinahedges/PyKE | pyke/lightcurve.py | pyke/lightcurve.py | import numpy as np
from astropy.io import fits
__all__ = ['LightCurve', 'KeplerLightCurveFile']
class LightCurve(object):
"""
Implements a basic time-series class for a generic lightcurve.
Parameters
----------
time : numpy array-like
Time-line.
flux : numpy array-like
Data flux for every time point.
"""
def __init__(self, time, flux, flux_err):
self.time = time
self.flux = flux
self.flux_err = flux_err
def detrend(self, method='arclength', **kwargs):
"""
"""
if method == 'arclength':
return ArcLengthDetrender().detrend(self.time, self.flux, **kwargs)
else:
return FirstDifferenceDetrender().detrend(self.time, self.flux, **kwargs)
def draw(self):
raise NotImplementedError("Should we implement a LightCurveDrawer class?")
def to_csv(self):
raise NotImplementedError()
def to_fits(self):
raise NotImplementedError()
class KeplerLightCurveFile(LightCurve):
def __init__(self, path, **kwargs):
self.hdu = fits.open(path, **kwargs)
self.time = self.hdu[1].data['TIME']
self.flux = self.get_flux('PDCSAP_FLUX')
def flux_types(self):
"""Returns a list of available flux types for this light curve file"""
return [n for n in self.hdu[1].data.columns.names if 'FLUX' in n]
def get_flux(self, flux_type):
if flux_type is in self._flux_types():
return self.hdu[1].data[flux_type]
else:
raise KeyError("{} is not a valid flux type. Available types are: {}".
format(flux_type, self._flux_types))
def set_flux_type(self, flux_type):
self.flux = self.get_flux(flux_type)
class Detrender(object):
"""
"""
def detrend(self):
"""
Returns a LightCurve object
"""
pass
class FirstDifferenceDetrender(Detrender):
"""
First difference detrending
"""
def detrend(time, flux):
return LightCurve(time, flux - np.append(0, flux[1:]))
class LinearDetrender(Detrender):
"""
"""
@staticmethod
def detrend(time, flux):
pass
class ArcLengthDetrender(Detrender):
def detrend(time, flux):
pass
class EMDDetrender(Detrender):
"""
Empirical Mode Decomposition Detrender
"""
def detrend(time, flux):
pass
class PolynomialDetrender(Detrender):
"""
"""
def detrend(time, flux):
pass
| import numpy as np
__all__ = ['LightCurve']
class LightCurve(object):
"""
Implements a basic time-series class for a generic lightcurve.
Parameters
----------
time : numpy array-like
Time-line.
flux : numpy array-like
Data flux for every time point.
"""
def __init__(self, time, flux):
self.time = time
self.flux = flux
def detrend(self, method='arclength', **kwargs):
"""
"""
if method == 'arclength':
return ArcLengthDetrender().detrend(self.time, self.flux, **kwargs)
else:
return FirstDifferenceDetrender().detrend(self.time, self.flux, **kwargs)
def to_pandas(self):
raise NotImplementedError("@gully")
class Detrender(object):
"""
"""
def detrend(self):
"""
Returns a LightCurve object
"""
pass
class FirstDifferenceDetrender(Detrender):
"""
First difference detrending
"""
def detrend(time, flux):
return LightCurve(time, flux - np.append(0, flux[1:]))
class LinearDetrender(Detrender):
"""
"""
@staticmethod
def detrend(time, flux):
pass
class ArcLengthDetrender(Detrender):
def detrend(time, flux):
pass
class EMDDetrender(Detrender):
"""
Empirical Mode Decomposition Detrender
"""
def detrend(time, flux):
pass
class PolynomialDetrender(Detrender):
"""
"""
def detrend(time, flux):
pass
| mit | Python |
f0a20866fa6d85459b9e09a00d62f0e193edb497 | Remove rogue print statement | RedHatInsights/insights-core,RedHatInsights/insights-core | insights/settings.py | insights/settings.py | import sys
import os
import yaml
import pkgutil
INSTALL_DIR = os.path.dirname(os.path.abspath(__file__))
NAME = "insights.yaml"
DEFAULTS_NAME = "defaults.yaml"
def load_and_read(path):
if os.path.exists(path):
with open(path) as fp:
return fp.read()
CONFIGS = [
pkgutil.get_data('insights', 'defaults.yaml'),
load_and_read(os.path.join("/etc", NAME)), # System-wide config
load_and_read(os.path.join(os.path.expanduser("~/.local"), NAME)), # User-specific config
load_and_read("." + NAME) # Directory-specific config
]
config = {}
for c in CONFIGS:
try:
y = yaml.load(c)
for name, section in y.iteritems():
if name in config:
config[name].update(section)
else:
config[name] = section
except:
pass
# The defaults section is for keys that belong in every section and can be
# overridden in particular sections if desired. This adds the default values
# to each section if they aren't already there.
for k in config["defaults"]:
for section in {s for s in config if s != "defaults"}:
if k not in config[section]:
config[section][k] = config["defaults"][k]
# Flatten the attribute hierarchy a bit by cutting out "config".
for name, section in config.iteritems():
setattr(sys.modules[__name__], name, section)
| import sys
import os
import yaml
import pkgutil
INSTALL_DIR = os.path.dirname(os.path.abspath(__file__))
print INSTALL_DIR
NAME = "insights.yaml"
DEFAULTS_NAME = "defaults.yaml"
def load_and_read(path):
if os.path.exists(path):
with open(path) as fp:
return fp.read()
CONFIGS = [
pkgutil.get_data('insights', 'defaults.yaml'),
load_and_read(os.path.join("/etc", NAME)), # System-wide config
load_and_read(os.path.join(os.path.expanduser("~/.local"), NAME)), # User-specific config
load_and_read("." + NAME) # Directory-specific config
]
config = {}
for c in CONFIGS:
try:
y = yaml.load(c)
for name, section in y.iteritems():
if name in config:
config[name].update(section)
else:
config[name] = section
except:
pass
# The defaults section is for keys that belong in every section and can be
# overridden in particular sections if desired. This adds the default values
# to each section if they aren't already there.
for k in config["defaults"]:
for section in {s for s in config if s != "defaults"}:
if k not in config[section]:
config[section][k] = config["defaults"][k]
# Flatten the attribute hierarchy a bit by cutting out "config".
for name, section in config.iteritems():
setattr(sys.modules[__name__], name, section)
| apache-2.0 | Python |
49fa606664f7fb28799678574b34328d2f16567e | use the queue config for message passing not cache | thenetcircle/dino,thenetcircle/dino,thenetcircle/dino,thenetcircle/dino | dino/server.py | dino/server.py | #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from flask import Flask
from flask_socketio import SocketIO
from werkzeug.contrib.fixers import ProxyFix
from dino import environ
from dino.config import ConfigKeys
__author__ = 'Oscar Eriksson <oscar@gmail.com>'
logger = logging.getLogger(__name__)
logging.getLogger('amqp').setLevel(logging.INFO)
logging.getLogger('kafka.conn').setLevel(logging.INFO)
def create_app():
_app = Flask(__name__)
# used for encrypting cookies for handling sessions
_app.config['SECRET_KEY'] = 'abc492ee-9739-11e6-a174-07f6b92d4a4b'
message_queue_type = environ.env.config.get(ConfigKeys.TYPE, domain=ConfigKeys.QUEUE, default=None)
if message_queue_type is None and not (len(environ.env.config) == 0 or environ.env.config.get(ConfigKeys.TESTING)):
raise RuntimeError('no message queue type specified')
message_queue = 'redis://%s' % environ.env.config.get(ConfigKeys.HOST, domain=ConfigKeys.QUEUE, default='')
message_channel = 'dino_%s' % environ.env.config.get(ConfigKeys.ENVIRONMENT, default='test')
logger.info('message_queue: %s' % message_queue)
_socketio = SocketIO(
_app,
logger=logger,
engineio_logger=os.environ.get('DINO_DEBUG', '0') == '1',
async_mode='eventlet',
message_queue=message_queue,
channel=message_channel)
# preferably "emit" should be set during env creation, but the socketio object is not created until after env is
environ.env.out_of_scope_emit = _socketio.emit
_app.wsgi_app = ProxyFix(_app.wsgi_app)
return _app, _socketio
app, socketio = create_app()
import dino.endpoint.sockets
| #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from flask import Flask
from flask_socketio import SocketIO
from werkzeug.contrib.fixers import ProxyFix
from dino import environ
from dino.config import ConfigKeys
__author__ = 'Oscar Eriksson <oscar@gmail.com>'
logger = logging.getLogger(__name__)
logging.getLogger('amqp').setLevel(logging.INFO)
logging.getLogger('kafka.conn').setLevel(logging.INFO)
def create_app():
_app = Flask(__name__)
# used for encrypting cookies for handling sessions
_app.config['SECRET_KEY'] = 'abc492ee-9739-11e6-a174-07f6b92d4a4b'
message_queue_type = environ.env.config.get(ConfigKeys.TYPE, domain=ConfigKeys.QUEUE, default=None)
if message_queue_type is None and not (len(environ.env.config) == 0 or environ.env.config.get(ConfigKeys.TESTING)):
raise RuntimeError('no message queue type specified')
message_queue = 'redis://%s' % environ.env.config.get(ConfigKeys.HOST, domain=ConfigKeys.CACHE_SERVICE, default='')
message_channel = 'dino_%s' % environ.env.config.get(ConfigKeys.ENVIRONMENT, default='test')
logger.info('message_queue: %s' % message_queue)
_socketio = SocketIO(
_app,
logger=logger,
engineio_logger=os.environ.get('DINO_DEBUG', '0') == '1',
async_mode='eventlet',
message_queue=message_queue,
channel=message_channel)
# preferably "emit" should be set during env creation, but the socketio object is not created until after env is
environ.env.out_of_scope_emit = _socketio.emit
_app.wsgi_app = ProxyFix(_app.wsgi_app)
return _app, _socketio
app, socketio = create_app()
import dino.endpoint.sockets
| apache-2.0 | Python |
6b8f6a2f5a2b7b10b31832ddad85e27c81e51eb9 | Add 'section' to IRC colour stylesheet | xtaran/debian-devel-changes-bot,sebastinas/debian-devel-changes-bot,xtaran/debian-devel-changes-bot,lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot | DebianDevelChangesBot/utils/irc_colours.py | DebianDevelChangesBot/utils/irc_colours.py | # -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def colourise(s):
tags = {
'reset': chr(15),
'b': chr(2),
'/b': chr(2),
'u': chr(31),
'/u': chr(31),
'black': chr(3) + "01",
'darkblue': chr(3) + "02",
'darkgreen': chr(3) + "03",
'brightred': chr(3) + "04",
'darkred': chr(3) + "05",
'magenta': chr(3) + "06",
'darkyellow': chr(3) + "07",
'brightyellow': chr(3) + "08",
'lightgreen': chr(3) + "09",
'darkcyan': chr(3) + "10",
'lightcyan': chr(3) + "11",
'lightblue': chr(3) + "12",
'pink': chr(3) + "13",
'grey': chr(3) + "14",
'white': chr(3) + "00",
'nostyle': '',
}
# Stylesheet
tags.update({
'by': tags['lightcyan'],
'package': tags['darkgreen'],
'version': tags['brightyellow'],
'distribution': tags['lightblue'],
'security': tags['brightred'],
'severity': tags['brightred'],
'urgency': tags['brightred'],
'new': tags['brightred'],
'section': tags['grey'],
'url': tags['nostyle'],
'/url': tags['nostyle'],
'bug': tags['b'],
'/bug': tags['/b'],
'title': tags['nostyle'],
})
s = s + '[reset]'
for k, v in tags.iteritems():
s = s.replace('[%s]' % k, v)
return s
| # -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def colourise(s):
tags = {
'reset': chr(15),
'b': chr(2),
'/b': chr(2),
'u': chr(31),
'/u': chr(31),
'black': chr(3) + "01",
'darkblue': chr(3) + "02",
'darkgreen': chr(3) + "03",
'brightred': chr(3) + "04",
'darkred': chr(3) + "05",
'magenta': chr(3) + "06",
'darkyellow': chr(3) + "07",
'brightyellow': chr(3) + "08",
'lightgreen': chr(3) + "09",
'darkcyan': chr(3) + "10",
'lightcyan': chr(3) + "11",
'lightblue': chr(3) + "12",
'pink': chr(3) + "13",
'grey': chr(3) + "14",
'white': chr(3) + "00",
'nostyle': '',
}
# Stylesheet
tags.update({
'by': tags['lightcyan'],
'package': tags['darkgreen'],
'version': tags['brightyellow'],
'distribution': tags['lightblue'],
'security': tags['brightred'],
'severity': tags['brightred'],
'urgency': tags['brightred'],
'new': tags['brightred'],
'url': tags['nostyle'],
'/url': tags['nostyle'],
'bug': tags['b'],
'/bug': tags['/b'],
'title': tags['nostyle'],
})
s = s + '[reset]'
for k, v in tags.iteritems():
s = s.replace('[%s]' % k, v)
return s
| agpl-3.0 | Python |
5ef8c450dcfb063b25de74d1ea49b9d880f8e58f | debug checkup | jasuka/pyBot,jasuka/pyBot | modules/logger_daemon.py | modules/logger_daemon.py | ##Logger daemon version 1
from time import gmtime, strftime
import os
def logger_daemon ( self ):
if os.path.exists(self.config["log-path"]) == True:
if len(self.msg) >= 4:
if "353" in self.msg or "366" in self.msg or "412" in self.msg:
return
else:
brackets = self.config["TimestampBrackets"].split(",")
usertxt = ""
chan = self.msg[2]
for i in range(3, len(self.msg)):
usertxt += self.msg[i] +" "
if chan[0] == "#":
log = self.config["log-path"]+chan+".log"
logline = brackets[0]+strftime(self.config["timeformat"])+brackets[1] + " " + self.get_nick() + " @ " + chan + " " + usertxt
with open(log, "a") as log:
log.write(logline)
log.flush()
else:
os.mkdir(self.config["log-path"])
if self.config["debug"] == "true":
print("Cannot find existing folder for logs, creating: "+self.config["log-path"])
#if os.path.exists(self.config["log-path"]) == True:
# seendb = self.config["log-path"]+"seendb.txt"
# with open(seendb, "w"
# Jäi kesken, tuli muuta.
| ##Logger daemon version 1
from time import gmtime, strftime
import os
def logger_daemon ( self ):
if os.path.exists(self.config["log-path"]) == True:
if len(self.msg) >= 4:
if "353" in self.msg or "366" in self.msg or "412" in self.msg:
return
else:
brackets = self.config["TimestampBrackets"].split(",")
usertxt = ""
chan = self.msg[2]
for i in range(3, len(self.msg)):
usertxt += self.msg[i] +" "
if chan[0] == "#":
log = self.config["log-path"]+chan+".log"
logline = brackets[0]+strftime(self.config["timeformat"])+brackets[1] + " " + self.get_nick() + " @ " + chan + " " + usertxt
with open(log, "a") as log:
log.write(logline)
log.flush()
else:
print("Cannot find existing folder for logs, creating: "+self.config["log-path"])
os.mkdir(self.config["log-path"])
#if os.path.exists(self.config["log-path"]) == True:
# seendb = self.config["log-path"]+"seendb.txt"
# with open(seendb, "w"
# Jäi kesken, tuli muuta.
| mit | Python |
c0e6327c8bc5b984969728c64f41648e355a14f2 | add deploy step | NeblioTeam/neblio,NeblioTeam/neblio,NeblioTeam/neblio,NeblioTeam/neblio,NeblioTeam/neblio | ci_scripts/test_osx-gui_wallet.py | ci_scripts/test_osx-gui_wallet.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import multiprocessing as mp
import neblio_ci_libs as nci
working_dir = os.getcwd()
build_dir = "build"
nci.mkdir_p(build_dir)
os.chdir(build_dir)
nci.call_with_err_code('brew update')
nci.call_with_err_code('brew outdated qt || brew upgrade qt')
nci.call_with_err_code('brew outdated berkeley-db@4 || brew upgrade berkeley-db@4')
nci.call_with_err_code('brew outdated boost@1.60 || brew upgrade boost@1.60')
nci.call_with_err_code('brew outdated miniupnpc || brew upgrade miniupnpc')
nci.call_with_err_code('brew outdated curl || brew upgrade curl')
nci.call_with_err_code('brew outdated numpy || brew upgrade numpy')
nci.call_with_err_code('brew outdated python || brew upgrade python')
nci.call_with_err_code('brew outdated openssl || brew upgrade openssl')
nci.call_with_err_code('brew outdated qrencode || brew upgrade qrencode')
nci.call_with_err_code('brew install qt --force')
nci.call_with_err_code('brew install berkeley-db@4 --force')
nci.call_with_err_code('brew install boost@1.60 --force')
nci.call_with_err_code('brew install miniupnpc --force')
nci.call_with_err_code('brew install curl --force')
nci.call_with_err_code('brew install python --force')
nci.call_with_err_code('brew install openssl --force')
nci.call_with_err_code('brew install qrencode --force')
nci.call_with_err_code('brew unlink qt && brew link --force --overwrite qt')
nci.call_with_err_code('brew unlink berkeley-db@4 && brew link --force --overwrite berkeley-db@4')
nci.call_with_err_code('brew unlink boost@1.60 && brew link --force --overwrite boost@1.60')
nci.call_with_err_code('brew unlink miniupnpc && brew link --force --overwrite miniupnpc')
nci.call_with_err_code('brew unlink curl && brew link --force --overwrite curl')
nci.call_with_err_code('brew unlink python && brew link --force --overwrite python')
nci.call_with_err_code('brew unlink openssl && brew link --force --overwrite openssl')
nci.call_with_err_code('brew unlink qrencode && brew link --force --overwrite qrencode')
nci.call_with_err_code('qmake "USE_UPNP=1" "USE_QRCODE=1" "RELEASE=1" "NEBLIO_CONFIG += Tests" ../neblio-wallet.pro')
nci.call_with_err_code("make -j" + str(mp.cpu_count()))
nci.call_with_err_code('../contrib/macdeploy/macdeployqtplus neblio-Qt.app -add-qt-tr da,de,es,hu,ru,uk,zh_CN,zh_TW -dmg -fancy ../contrib/macdeploy/fancy.plist -verbose 3')
print("")
print("")
print("Building finished successfully.")
print("")
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import multiprocessing as mp
import neblio_ci_libs as nci
working_dir = os.getcwd()
build_dir = "build"
nci.mkdir_p(build_dir)
os.chdir(build_dir)
nci.call_with_err_code('brew update')
nci.call_with_err_code('brew outdated qt || brew upgrade qt')
nci.call_with_err_code('brew outdated berkeley-db@4 || brew upgrade berkeley-db@4')
nci.call_with_err_code('brew outdated boost@1.60 || brew upgrade boost@1.60')
nci.call_with_err_code('brew outdated miniupnpc || brew upgrade miniupnpc')
nci.call_with_err_code('brew outdated curl || brew upgrade curl')
nci.call_with_err_code('brew outdated numpy || brew upgrade numpy')
nci.call_with_err_code('brew outdated python || brew upgrade python')
nci.call_with_err_code('brew outdated openssl || brew upgrade openssl')
nci.call_with_err_code('brew outdated qrencode || brew upgrade qrencode')
nci.call_with_err_code('brew install qt --force')
nci.call_with_err_code('brew install berkeley-db@4 --force')
nci.call_with_err_code('brew install boost@1.60 --force')
nci.call_with_err_code('brew install miniupnpc --force')
nci.call_with_err_code('brew install curl --force')
nci.call_with_err_code('brew install python --force')
nci.call_with_err_code('brew install openssl --force')
nci.call_with_err_code('brew install qrencode --force')
nci.call_with_err_code('brew unlink qt && brew link --force --overwrite qt')
nci.call_with_err_code('brew unlink berkeley-db@4 && brew link --force --overwrite berkeley-db@4')
nci.call_with_err_code('brew unlink boost@1.60 && brew link --force --overwrite boost@1.60')
nci.call_with_err_code('brew unlink miniupnpc && brew link --force --overwrite miniupnpc')
nci.call_with_err_code('brew unlink curl && brew link --force --overwrite curl')
nci.call_with_err_code('brew unlink python && brew link --force --overwrite python')
nci.call_with_err_code('brew unlink openssl && brew link --force --overwrite openssl')
nci.call_with_err_code('brew unlink qrencode && brew link --force --overwrite qrencode')
nci.call_with_err_code('qmake "USE_UPNP=1" "USE_QRCODE=1" "RELEASE=1" "NEBLIO_CONFIG += Tests" ../neblio-wallet.pro')
nci.call_with_err_code("make -j" + str(mp.cpu_count()))
print("")
print("")
print("Building finished successfully.")
print("")
| mit | Python |
1f3a404b1e1fbd53d52d93ca83e8339147327f74 | bump version to 0.5.0 | ivelum/cub-python | cub/version.py | cub/version.py | version = '0.5.0'
| version = '0.4.1'
| mit | Python |
20d28be2ca57687b6072f9cee043f5dcbe16b1e2 | Add ability to parse package extras, like this: 'raven[flask]' | dveselov/rparse | rparse.py | rparse.py | #!/usr/bin/env python
# Copyright 2015, Dmitry Veselov
from re import sub
from plyplus import Grammar, STransformer, \
ParseError, TokenizeError
try:
# Python 2.x and pypy
from itertools import imap as map
from itertools import ifilter as filter
except ImportError:
# Python 3.x already have lazy map
pass
__all__ = [
"parse"
]
grammar = Grammar(r"""
@start : package ;
package : name extras? specs?;
name : string ;
specs : comparison version (',' comparison version)* ;
comparison : '<' | '<=' | '!=' | '==' | '>=' | '>' | '~=' | '===' ;
version : string ;
extras : '\[' (extra (',' extra)*)? '\]' ;
extra : string ;
@string : '[-A-Za-z0-9_\.]+' ;
SPACES: '[ \t\n]+' (%ignore) (%newline);
""")
class Requirement(object):
def __init__(self, name=None, extras=None, specs=None):
self.name = name
self.extras = extras
self.specs = specs
def __str__(self):
return "<{0}(name='{1}'>".format(self.__class__.__name__, self.name)
class RTransformer(STransformer):
def package(self, node):
requirement = Requirement()
for key, value in node.tail:
setattr(requirement, key, value)
return requirement
def name(self, node):
return ("name", node.tail[0])
def specs(self, node):
comparisons, versions = node.tail[0::2], node.tail[1::2]
return ("specs", list(zip(comparisons, versions)))
def comparison(self, node):
return node.tail[0]
def version(self, node):
return node.tail[0]
def extras(self, node):
return ("extras", [name for name in node.tail])
def extra(self, node):
return node.tail[0]
def _parse(requirement, g=grammar):
requirement = sub(r"#.*", "", requirement)
try:
if requirement:
return g.parse(requirement)
else:
return None
except (ParseError, TokenizeError):
message = "Invalid requirements line: '{0}'" \
.format(requirement.strip())
raise ValueError(message)
def parse(requirements):
"""
Parses given requirements line-by-line.
"""
transformer = RTransformer()
return map(transformer.transform, filter(None, map(_parse, requirements.splitlines())))
| #!/usr/bin/env python
# Copyright 2015, Dmitry Veselov
from re import sub
from plyplus import Grammar, STransformer, \
ParseError, TokenizeError
try:
# Python 2.x and pypy
from itertools import imap as map
from itertools import ifilter as filter
except ImportError:
# Python 3.x already have lazy map
pass
__all__ = [
"parse"
]
grammar = Grammar(r"""
@start : package ;
package : name vspec? ;
name : string ;
vspec : comparison version (',' comparison version)* ;
comparison : '<' | '<=' | '!=' | '==' | '>=' | '>' | '~=' | '===' ;
version : string ;
@string : '[-A-Za-z0-9_\.]+' ;
SPACES: '[ \t\n]+' (%ignore) (%newline);
""")
class RTransformer(STransformer):
def package(self, node):
if len(node.tail) == 2:
name, vspec = node.tail
else:
name, vspec = node.tail[0], None
return name, vspec
def name(self, node):
return node.tail[0]
def vspec(self, node):
comparisons, versions = node.tail[0::2], node.tail[1::2]
return list(zip(comparisons, versions))
def comparison(self, node):
return node.tail[0]
def version(self, node):
return node.tail[0]
def _parse(requirement, g=grammar):
requirement = sub(r"#.*", "", requirement)
try:
if requirement:
return g.parse(requirement)
else:
return None
except (ParseError, TokenizeError):
message = "Invalid requirements line: '{0}'" \
.format(requirement.strip())
raise ValueError(message)
def parse(requirements):
"""
Parses given requirements line-by-line.
"""
transformer = RTransformer()
return map(transformer.transform, filter(None, map(_parse, requirements.splitlines())))
| mit | Python |
dc43f25c72f1f6779c8e9cc52605d6f99ba8a77a | Set Flask's debug=True. | ajdavis/zero-to-app,ajdavis/zero-to-app | server.py | server.py | import urllib
from bson import json_util
from flask import Flask, redirect, url_for, Response
from flask import render_template
from flask import request
from pymongo import MongoClient
from werkzeug.routing import NumberConverter
db = MongoClient().test
app = Flask(__name__, static_path='/zero-to-app/static')
# Accept more 'float' numbers than Werkzeug does by default: also accept
# numbers beginning with minus, or with no trailing digits.
# From https://gist.github.com/akhenakh/3376839
class NegativeFloatConverter(NumberConverter):
regex = r'\-?\d+(\.\d+)?'
num_convert = float
def __init__(self, mapping, minimum=None, maximum=None):
NumberConverter.__init__(self, mapping, 0, minimum, maximum)
app.url_map.converters['float'] = NegativeFloatConverter
def address_to_lat_lon(addr):
url = 'http://maps.google.com/?q=' + urllib.quote(addr) + '&output=js'
# Get XML location.
xml = urllib.urlopen(url).read()
if '<error>' in xml:
raise Exception('%s\n' % url)
else:
# Strip lat/long coordinates from XML.
center = xml[xml.find('{center')+9:xml.find('}', xml.find('{center'))]
center = center.replace('lat:', '').replace('lng:', '')
lat, lng = center.split(',')
return float(lat), float(lng)
@app.route('/zero-to-app/near/<float:lat>/<float:lon>')
def near(lat, lon):
return render_template('near.html', results=results, lat=lat, lon=lon)
@app.route('/zero-to-app/results/json', methods=['POST'])
def results():
request_data = request.get_json()
num = int(request_data['num'])
lat = float(request_data['lat'])
lon = float(request_data['lon'])
# NOTE: lon, lat order!!
result = db.command(
'geoNear', 'cafes',
near={'type': 'Point', 'coordinates': [lon, lat]},
spherical=True,
num=num)
return Response(json_util.dumps(result), mimetype='application/json')
@app.route('/zero-to-app/address', methods=['POST'])
def address():
lat, lon = address_to_lat_lon(request.form.get('address'))
return redirect(url_for('near', lat=lat, lon=lon))
@app.route('/zero-to-app')
def main():
n_cafes = db.cafes.count()
return render_template('main.html', n_cafes=n_cafes)
if __name__ == '__main__':
print('Go visit http://localhost:5000/zero-to-app')
app.run(host='0.0.0.0', debug=True)
| import urllib
from bson import json_util
from flask import Flask, redirect, url_for, Response
from flask import render_template
from flask import request
from pymongo import MongoClient
from werkzeug.routing import NumberConverter
db = MongoClient().test
app = Flask(__name__, static_path='/zero-to-app/static')
# Accept more 'float' numbers than Werkzeug does by default: also accept
# numbers beginning with minus, or with no trailing digits.
# From https://gist.github.com/akhenakh/3376839
class NegativeFloatConverter(NumberConverter):
regex = r'\-?\d+(\.\d+)?'
num_convert = float
def __init__(self, mapping, minimum=None, maximum=None):
NumberConverter.__init__(self, mapping, 0, minimum, maximum)
app.url_map.converters['float'] = NegativeFloatConverter
def address_to_lat_lon(addr):
url = 'http://maps.google.com/?q=' + urllib.quote(addr) + '&output=js'
# Get XML location.
xml = urllib.urlopen(url).read()
if '<error>' in xml:
raise Exception('%s\n' % url)
else:
# Strip lat/long coordinates from XML.
center = xml[xml.find('{center')+9:xml.find('}', xml.find('{center'))]
center = center.replace('lat:', '').replace('lng:', '')
lat, lng = center.split(',')
return float(lat), float(lng)
@app.route('/zero-to-app/near/<float:lat>/<float:lon>')
def near(lat, lon):
return render_template('near.html', results=results, lat=lat, lon=lon)
@app.route('/zero-to-app/results/json', methods=['POST'])
def results():
request_data = request.get_json()
num = int(request_data['num'])
lat = float(request_data['lat'])
lon = float(request_data['lon'])
# NOTE: lon, lat order!!
result = db.command(
'geoNear', 'cafes',
near={'type': 'Point', 'coordinates': [lon, lat]},
spherical=True,
num=num)
return Response(json_util.dumps(result), mimetype='application/json')
@app.route('/zero-to-app/address', methods=['POST'])
def address():
lat, lon = address_to_lat_lon(request.form.get('address'))
return redirect(url_for('near', lat=lat, lon=lon))
@app.route('/zero-to-app')
def main():
n_cafes = db.cafes.count()
return render_template('main.html', n_cafes=n_cafes)
if __name__ == '__main__':
print('Go visit http://localhost:5000/zero-to-app')
app.run(host='0.0.0.0')
| apache-2.0 | Python |
069e98f036c77f635a955ea2c48580709089e702 | Set default values for `tags` and `availability` | PyconUK/ConferenceScheduler | src/conference_scheduler/resources.py | src/conference_scheduler/resources.py | from typing import NamedTuple, Sequence, Dict, Iterable, List
from datetime import datetime
class Slot(NamedTuple):
venue: str
starts_at: datetime
duration: int
capacity: int
session: str
class BaseEvent(NamedTuple):
name: str
duration: int
demand: int
tags: List[str]
unavailability: List
class Event(BaseEvent):
__slots__ = ()
def __new__(cls, name, duration, demand, tags=None, unavailability=None):
if tags is None:
tags = []
if unavailability is None:
unavailability = []
return super().__new__(
cls, name, duration, demand, tags, unavailability
)
class ScheduledItem(NamedTuple):
event: Event
slot: Slot
class ChangedEventScheduledItem(NamedTuple):
event: Event
old_slot: Slot = None
new_slot: Slot = None
class ChangedSlotScheduledItem(NamedTuple):
slot: Slot
old_event: Event = None
new_event: Event = None
class Shape(NamedTuple):
"""Represents the shape of a 2 dimensional array of events and slots"""
events: int
slots: int
class Constraint(NamedTuple):
label: str
condition: bool
| from typing import NamedTuple, Sequence, Dict, Iterable, List
from datetime import datetime
class Slot(NamedTuple):
venue: str
starts_at: datetime
duration: int
capacity: int
session: str
class Event(NamedTuple):
name: str
duration: int
demand: int
tags: List[str] = []
unavailability: List = []
class ScheduledItem(NamedTuple):
event: Event
slot: Slot
class ChangedEventScheduledItem(NamedTuple):
event: Event
old_slot: Slot = None
new_slot: Slot = None
class ChangedSlotScheduledItem(NamedTuple):
slot: Slot
old_event: Event = None
new_event: Event = None
class Shape(NamedTuple):
"""Represents the shape of a 2 dimensional array of events and slots"""
events: int
slots: int
class Constraint(NamedTuple):
label: str
condition: bool
| mit | Python |
ac1bd603ad066eade7dc89ed2f40528e5c9139ca | make sure /proc isn't a symlink to itself | sipb/homeworld,sipb/homeworld,sipb/homeworld,sipb/homeworld | platform/debian/clean_fakechroot.py | platform/debian/clean_fakechroot.py | #!/usr/bin/env python3
"""cleans up any symbolic links pointing with absolute paths to the build directory itself"""
import os
import sys
if len(sys.argv) != 2:
print("usage:", sys.argv[0], "<ROOTFS>", file=sys.stderr)
sys.exit(1)
rootfs = os.path.abspath(sys.argv[1])
for root, dirs, files in os.walk(rootfs):
for f in files:
path = os.path.join(root, f)
if not os.path.islink(path):
continue
full_link = os.readlink(path)
if not os.path.isabs(full_link):
continue
rootrel = os.path.relpath(full_link, rootfs)
if rootrel.split("/")[0] == "..":
# doesn't point within the rootfs; nothing to do
continue
os.remove(path)
os.symlink(os.path.join("/", rootrel), path)
# We have a Jenkins-only bug where /proc is a symlink to itself in the flannel
# container, which breaks a lot of things. We don't know why, exactly, this is
# happening -- but we need to mitigate it.
if os.path.islink(os.path.join(rootfs, "proc")):
os.unlink(os.path.join(rootfs, "proc"))
os.mkdir(os.path.join(rootfs, "proc"))
| #!/usr/bin/env python3
"""cleans up any symbolic links pointing with absolute paths to the build directory itself"""
import os
import sys
if len(sys.argv) != 2:
print("usage:", sys.argv[0], "<ROOTFS>", file=sys.stderr)
sys.exit(1)
rootfs = os.path.abspath(sys.argv[1])
for root, dirs, files in os.walk(rootfs):
for f in files:
path = os.path.join(root, f)
if not os.path.islink(path):
continue
full_link = os.readlink(path)
if not os.path.isabs(full_link):
continue
rootrel = os.path.relpath(full_link, rootfs)
if rootrel.split("/")[0] == "..":
# doesn't point within the rootfs; nothing to do
continue
os.remove(path)
os.symlink(os.path.join("/", rootrel), path)
| mit | Python |
f918ad734d6ac9edf176f3c4b596da8e551db7cb | support different scaling for each axis | olivierverdier/sfepy,rc/sfepy,lokik/sfepy,olivierverdier/sfepy,lokik/sfepy,lokik/sfepy,RexFuzzle/sfepy,RexFuzzle/sfepy,lokik/sfepy,sfepy/sfepy,sfepy/sfepy,BubuLK/sfepy,vlukes/sfepy,BubuLK/sfepy,vlukes/sfepy,sfepy/sfepy,rc/sfepy,RexFuzzle/sfepy,vlukes/sfepy,RexFuzzle/sfepy,olivierverdier/sfepy,rc/sfepy,BubuLK/sfepy | script/convert_mesh.py | script/convert_mesh.py | #!/usr/bin/env python
import sys
sys.path.append('.')
from optparse import OptionParser
import numpy as nm
from sfepy.fem import Mesh
usage = """%prog [options] filename_in filename_out
Convert a mesh file from one SfePy-supported format to another.
Examples:
$script/convert_mesh.py database/simple.mesh new.vtk
$script/convert_mesh.py database/simple.mesh new.vtk -s2.5
$script/convert_mesh.py database/simple.mesh new.vtk -s0.5,2,1
"""
help = {
'scale' : 'scale factor [default: %default]',
}
def main():
parser = OptionParser(usage=usage)
parser.add_option("-s", "--scale", metavar='scale',
action="store", dest="scale",
default=None, help=help['scale'])
(options, args) = parser.parse_args()
if len(args) != 2:
parser.print_help()
sys.exit(1)
scale = options.scale
if scale is not None:
try:
scale = float(scale)
except ValueError:
scale = [float(ii) for ii in scale.split(',')]
scale = nm.array(scale, dtype=nm.float64, ndmin=1)
filename_in, filename_out = args
mesh = Mesh.from_file(filename_in)
if scale is not None:
if len(scale) == 1:
tr = nm.eye(mesh.dim, dtype=nm.float64) * scale
elif len(scale) == mesh.dim:
tr = nm.diag(scale)
mesh.transform_coords(tr)
mesh.write(filename_out, io='auto')
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import sys
sys.path.append('.')
from optparse import OptionParser
import numpy as nm
from sfepy.fem import Mesh
usage = """%prog [options] filename_in filename_out
Convert a mesh file from one SfePy-supported format to another.
"""
help = {
'scale' : 'scale factor [default: %default]',
}
def main():
parser = OptionParser(usage=usage)
parser.add_option("-s", "--scale", type=int, metavar='scale',
action="store", dest="scale",
default=None, help=help['scale'])
(options, args) = parser.parse_args()
if len(args) != 2:
parser.print_help()
sys.exit(1)
filename_in, filename_out = args
mesh = Mesh.from_file(filename_in)
if options.scale is not None:
tr = nm.eye(mesh.dim, dtype=nm.float64) * options.scale
mesh.transform_coords(tr)
mesh.write(filename_out, io='auto')
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
c42b3f733c43f06050d59fc85ba6c9756ba2488d | bump version 0 0.3.0 | iancze/Starfish | Starfish/__init__.py | Starfish/__init__.py | # We first need to detect if we're being called as part of the setup
# procedure itself in a reliable manner.
try:
__STARFISH_SETUP__
except NameError:
__STARFISH_SETUP__ = False
__version__ = "0.3.0"
if not __STARFISH_SETUP__:
from .spectrum import Spectrum
__all__ = [
"constants",
"emulator",
"grid_tools",
"models",
"samplers",
"spectrum",
"Spectrum",
"transforms",
"utils",
]
| # We first need to detect if we're being called as part of the setup
# procedure itself in a reliable manner.
try:
__STARFISH_SETUP__
except NameError:
__STARFISH_SETUP__ = False
__version__ = "0.3.0-dev"
if not __STARFISH_SETUP__:
from .spectrum import Spectrum
__all__ = [
"constants",
"emulator",
"grid_tools",
"models",
"samplers",
"spectrum",
"Spectrum",
"transforms",
"utils",
]
| bsd-3-clause | Python |
a918d12a2081d0b6da8074b3478c5f2c5e380bcd | Fix syntax errors in test resulting from recent changes to command_line program in r4305 or 6; now fails due to assertion errors rather than type errors (progress of a kind) N.B. seems shift was 0,0,0... | dials/dials,dials/dials,dials/dials,dials/dials,dials/dials | test/command_line/tst_discover_better_experimental_model.py | test/command_line/tst_discover_better_experimental_model.py | from __future__ import division
import os
import libtbx.load_env
from libtbx import easy_run
from libtbx.test_utils import approx_equal
from libtbx.test_utils import open_tmp_directory
from scitbx import matrix
# apply a random seed to avoid this randomly crashing... I hope
import random
random.seed(12345)
have_dials_regression = libtbx.env.has_module("dials_regression")
if have_dials_regression:
dials_regression = libtbx.env.find_in_repositories(
relative_path="dials_regression",
test=os.path.isdir)
def exercise():
if not have_dials_regression:
print "Skipping exercise(): dials_regression not available."
return
data_dir = os.path.join(dials_regression, "indexing_test_data", "trypsin")
pickle_path1 = os.path.join(data_dir, "strong_P1_X6_1_0-1.pickle")
pickle_path2 = os.path.join(data_dir, "strong_P1_X6_2_0-1.pickle")
datablock_path1 = os.path.join(data_dir, "datablock_P1_X6_1.json")
datablock_path2 = os.path.join(data_dir, "datablock_P1_X6_2.json")
args = ["dials.discover_better_experimental_model",
datablock_path1,
datablock_path2,
pickle_path1,
pickle_path2]
command = " ".join(args)
print command
cwd = os.path.abspath(os.curdir)
tmp_dir = open_tmp_directory()
os.chdir(tmp_dir)
result = easy_run.fully_buffered(command=command).raise_if_errors()
assert os.path.exists('optimized_datablock.json')
from dxtbx.serialize import load
datablocks = load.datablock(datablock_path1, check_format=False)
original_imageset = datablocks[0].extract_imagesets()[0]
optimized_datablock = load.datablock('optimized_datablock.json',
check_format=False)
detector_1 = original_imageset.get_detector()
detector_2 = optimized_datablock[0].unique_detectors()[0]
shift = (matrix.col(detector_1[0].get_origin()) -
matrix.col(detector_2[0].get_origin()))
print shift.elems
print (-0.178, -0.041, 0.0)
assert approx_equal(shift.elems, (-0.178, -0.041, 0.0), eps=1e-2)
os.chdir(cwd)
def run():
exercise()
print "OK"
if __name__ == '__main__':
from dials.test import cd_auto
with cd_auto(__file__):
run()
| from __future__ import division
import os
import libtbx.load_env
from libtbx import easy_run
from libtbx.test_utils import approx_equal
from libtbx.test_utils import open_tmp_directory
from scitbx import matrix
# apply a random seed to avoid this randomly crashing... I hope
import random
random.seed(12345)
have_dials_regression = libtbx.env.has_module("dials_regression")
if have_dials_regression:
dials_regression = libtbx.env.find_in_repositories(
relative_path="dials_regression",
test=os.path.isdir)
def exercise():
if not have_dials_regression:
print "Skipping exercise(): dials_regression not available."
return
data_dir = os.path.join(dials_regression, "indexing_test_data", "trypsin")
pickle_path1 = os.path.join(data_dir, "strong_P1_X6_1_0-1.pickle")
pickle_path2 = os.path.join(data_dir, "strong_P1_X6_2_0-1.pickle")
datablock_path1 = os.path.join(data_dir, "datablock_P1_X6_1.json")
datablock_path2 = os.path.join(data_dir, "datablock_P1_X6_2.json")
args = ["dials.discover_better_experimental_model",
datablock_path1,
datablock_path2,
pickle_path1,
pickle_path2]
command = " ".join(args)
print command
cwd = os.path.abspath(os.curdir)
tmp_dir = open_tmp_directory()
os.chdir(tmp_dir)
result = easy_run.fully_buffered(command=command).raise_if_errors()
assert os.path.exists('optimized_imageset.json')
from dxtbx.serialize import load
datablocks = load.datablock(datablock_path1, check_format=False)
original_imageset = datablocks[0].extract_imagesets()[0]
optimized_imageset = load.imageset('optimized_imageset.json')
detector_1 = original_imageset.get_detector()
detector_2 = optimized_imageset.get_detector()
shift = (matrix.col(detector_1[0].get_origin()) -
matrix.col(detector_2[0].get_origin()))
assert approx_equal(shift.elems, (-0.178, -0.041, 0.0), eps=1e-2)
os.chdir(cwd)
def run():
exercise()
print "OK"
if __name__ == '__main__':
from dials.test import cd_auto
with cd_auto(__file__):
run()
| bsd-3-clause | Python |
2024506ca76d5e7bae3016f1c74800bb02a13e1e | Remove default run behavior for if calling as __name__ == '__main__'. | UMIACS/qav,raushan802/qav | qav/listpack.py | qav/listpack.py |
class ListPack(object):
BOLD = '\033[1m'
OFF = '\033[0m'
def __init__(self, lp, sep=": ", padding=" ", indentation=0, width=79):
self.sep = sep
self.padding = padding
self.indentation = indentation
self.width = width
self._lp = lp
self.new_line = '' + (' ' * self.indentation)
def calc(self, t):
s1, s2 = t
return len(str(s1)) + len(self.sep) + len(str(s2)) + len(self.padding)
def bold(self, t):
s1, s2 = t
return '%s%s%s%s%s%s' % (self.BOLD, str(s1), self.OFF, self.sep,
str(s2), self.padding)
def append_item(self, item):
self._lp.append(item)
def prepend_item(self, item):
self._lp.insert(0, item)
def __str__(self):
_str = ''
line = self.new_line
line_length = len(line)
for i in self._lp:
if line_length + self.calc(i) > self.width:
if _str != '':
_str = _str + '\n' + line
else:
_str = line
line = self.new_line + self.bold(i)
line_length = len(self.new_line) + self.calc(i)
else:
line += self.bold(i)
line_length += self.calc(i)
_str = _str + '\n' + line
return _str
| import random
import string
class ListPack(object):
BOLD = '\033[1m'
OFF = '\033[0m'
def __init__(self, lp, sep=": ", padding=" ", indentation=0, width=79):
self.sep = sep
self.padding = padding
self.indentation = indentation
self.width = width
self._lp = lp
self.new_line = '' + (' ' * self.indentation)
def calc(self, t):
s1, s2 = t
return len(str(s1)) + len(self.sep) + len(str(s2)) + len(self.padding)
def bold(self, t):
s1, s2 = t
return '%s%s%s%s%s%s' % (self.BOLD, str(s1), self.OFF, self.sep,
str(s2), self.padding)
def append_item(self, item):
self._lp.append(item)
def prepend_item(self, item):
self._lp.insert(0, item)
def __str__(self):
_str = ''
line = self.new_line
line_length = len(line)
for i in self._lp:
if line_length + self.calc(i) > self.width:
if _str != '':
_str = _str + '\n' + line
else:
_str = line
line = self.new_line + self.bold(i)
line_length = len(self.new_line) + self.calc(i)
else:
line += self.bold(i)
line_length += self.calc(i)
_str = _str + '\n' + line
return _str
def id_generator(size=6, chars=None):
if chars is None:
chars = string.ascii_lowercase + string.ascii_uppercase + string.digits
letter = random.choice(string.ascii_lowercase)
return letter + ''.join(random.choice(chars) for x in range(size))
def lp_generator(size=20):
lp = []
for i in range(size):
lp.append((id_generator(size=random.randint(4, 15)),
id_generator(size=random.randint(6, 60))))
return lp
if __name__ == "__main__":
lp = ListPack([('hostname', 'novelty.umiacs.umd.edu'),
('architecture', 'x86_64'),
('ipaddress', '128.8.120.234'),
('netmask', '255.255.255.0'),
('gateway', '128.8.120.1'),
('macaddress', 'aa:bb:cc:dd:ee:ff'),
('cr', None)])
print lp
for x in range(10):
print ListPack(lp_generator())
print "--------------------------------------------------"
| lgpl-2.1 | Python |
e0f8555788bd198c7841326b169983672e6e4079 | raise a 404 if a section does not exist | mthornhill/django-pressroom,mthornhill/django-pressroom | src/pressroom/views.py | src/pressroom/views.py | from datetime import datetime
from django.template.context import RequestContext
from django.views.generic import list_detail
from django.shortcuts import render_to_response, get_object_or_404
from pressroom.models import Article, Section
def index(request):
articles = Article.objects.get_published()[:5]
try:
from photologue.models import Gallery
galleries = Gallery.objects.all()[:3]
except:
pass
return render_to_response('pressroom/index.html', locals(),
context_instance=RequestContext(request))
def view_section(request, slug, page=1):
section = get_object_or_404(Section, slug=slug)
articles = section.articles.filter(publish=True, pub_date__lte=datetime.now())
try:
from photologue.models import Gallery
galleries = Gallery.objects.all()[:3]
except:
galleries = None
return list_detail.object_list(request,
queryset=articles,
paginate_by=5,
page=page,
allow_empty=True,
template_name='pressroom/view_section.html',
extra_context={'section': section,
'galleries': Gallery.objects.all()[:3]})
def article_list(request, page=0):
return list_detail.object_list(request=request,
queryset=Article.objects.get_published(),
allow_empty=True,
paginate_by=5,
page=page)
| from datetime import datetime
from django.template.context import RequestContext
from django.views.generic import list_detail
from django.shortcuts import render_to_response
from pressroom.models import Article, Section
def index(request):
articles = Article.objects.get_published()[:5]
try:
from photologue.models import Gallery
galleries = Gallery.objects.all()[:3]
except:
pass
return render_to_response('pressroom/index.html', locals(),
context_instance=RequestContext(request))
def view_section(request, slug, page=1):
section = Section.objects.get(slug__exact=slug)
articles = section.articles.filter(publish=True, pub_date__lte=datetime.now())
try:
from photologue.models import Gallery
galleries = Gallery.objects.all()[:3]
except:
galleries = None
return list_detail.object_list(request,
queryset=articles,
paginate_by=5,
page=page,
allow_empty=True,
template_name='pressroom/view_section.html',
extra_context={'section': section,
'galleries': Gallery.objects.all()[:3]})
def article_list(request, page=0):
return list_detail.object_list(request=request,
queryset=Article.objects.get_published(),
allow_empty=True,
paginate_by=5,
page=page) | bsd-3-clause | Python |
4934d3488321126fb73d236f00f37fe152f05476 | Add test database and some notes | notapresent/rbm2m,notapresent/rbm2m | rbm2m/config.py | rbm2m/config.py | # -*- coding: utf-8 -*-
import os
def get_app_env():
return os.environ.get('RBM2M_ENV', 'Production')
class Config(object):
APP_ENV = get_app_env()
DEBUG = False
TESTING = False
# TODO: ?charset=utf8
DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m'
REDIS_URI = 'redis://@localhost:6379/0'
class ProductionConfig(Config):
DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
DEBUG = True
class TestingConfig(Config):
TESTING = True
DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
| # -*- coding: utf-8 -*-
import os
def get_app_env():
return os.environ.get('RBM2M_ENV', 'Production')
class Config(object):
APP_ENV = get_app_env()
DEBUG = False
TESTING = False
DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/dbm2m'
REDIS_URI = 'redis://@localhost:6379/0'
class ProductionConfig(Config):
DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
DEBUG = True
class TestingConfig(Config):
TESTING = True
| apache-2.0 | Python |
9a382008eba526a57bbc9be3abfe0d058b1c63af | update wsgi.py | CooloiStudio/Django_deskxd.com,CooloiStudio/Django_deskxd.com,CooloiStudio/Django_deskxd.com,CooloiStudio/Django_deskxd.com | deskxd_com/wsgi.py | deskxd_com/wsgi.py | """
WSGI config for deskxd_com project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os, sys
from django.core.wsgi import get_wsgi_application
path = '/home/active/Django_deskxd.com'
if path not in sys.path:
print "====== Not in! ======"
sys.path.append(path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "deskxd_com.settings")
application = get_wsgi_application()
| """
WSGI config for deskxd_com project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "deskxd_com.settings")
application = get_wsgi_application()
| mit | Python |
7d3b4f149a1028bb68d917fec2bc3a89a0f170ee | set ALLOWED_HOSTS for production | riffschelder/reviewshub | reviewsHub/reviewsHub/settings/production.py | reviewsHub/reviewsHub/settings/production.py | DEBUG = False
TEMPLATE_DEBUG = False
SECRET_KEY = '$d416i%#@eqim_ms34y42jy%7-+(ml7*7iz8l!w7*7h%et!i3l'
ALLOWED_HOSTS = ['.schelder.com', '.schelder.com.']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
| DEBUG = False
TEMPLATE_DEBUG = False
SECRET_KEY = '$d416i%#@eqim_ms34y42jy%7-+(ml7*7iz8l!w7*7h%et!i3l'
ALLOWED_HOSTS = []
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
| mit | Python |
2c5fc14bf9019ecf03d977860520ae2e4d5ec896 | add v3.55.0 (#27558) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-progressbar2/package.py | var/spack/repos/builtin/packages/py-progressbar2/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyProgressbar2(PythonPackage):
"""A progress bar for Python 2 and Python 3"""
homepage = "https://github.com/WoLpH/python-progressbar"
pypi = "progressbar2/progressbar2-3.50.1.tar.gz"
version('3.55.0', sha256='86835d1f1a9317ab41aeb1da5e4184975e2306586839d66daf63067c102f8f04')
version('3.50.1', sha256='2c21c14482016162852c8265da03886c2b4dea6f84e5a817ad9b39f6bd82a772')
version('3.39.3', sha256='8e5b5419e04193bb7c3fea71579937bbbcd64c26472b929718c2fe7ec420fe39')
depends_on('py-setuptools', type='build')
depends_on('py-six', type=('build', 'run'))
depends_on('py-python-utils@2.3.0:', type=('build', 'run'))
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyProgressbar2(PythonPackage):
"""A progress bar for Python 2 and Python 3"""
homepage = "https://github.com/WoLpH/python-progressbar"
pypi = "progressbar2/progressbar2-3.50.1.tar.gz"
version('3.50.1', sha256='2c21c14482016162852c8265da03886c2b4dea6f84e5a817ad9b39f6bd82a772')
version('3.39.3', sha256='8e5b5419e04193bb7c3fea71579937bbbcd64c26472b929718c2fe7ec420fe39')
depends_on('py-setuptools', type='build')
depends_on('py-six', type=('build', 'run'))
depends_on('py-python-utils@2.3.0:', type=('build', 'run'))
| lgpl-2.1 | Python |
a2275fc2a2414e0e456aa1fb99bcdfc1aa716781 | Allow flake8 to take --snippet option | adrianmoisey/lint-review,zoidbergwill/lint-review,markstory/lint-review,zoidbergwill/lint-review,zoidbergwill/lint-review,adrianmoisey/lint-review,markstory/lint-review,markstory/lint-review | lintreview/tools/flake8.py | lintreview/tools/flake8.py | import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Flake8(Tool):
name = 'flake8'
# see: http://flake8.readthedocs.org/en/latest/config.html
PYFLAKE_OPTIONS = [
'exclude',
'filename',
'select',
'ignore',
'max-line-length',
'format',
'max-complexity',
'snippet',
]
def check_dependencies(self):
"""
See if flake8 is on the PATH
"""
return in_path('flake8')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext == '.py'
def process_files(self, files):
"""
Run code checks with flake8.
Only a single process is made for all files
to save resources.
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['flake8']
for option in self.PYFLAKE_OPTIONS:
if self.options.get(option):
command.extend(
['--%(option)s' % {'option': option},
self.options.get(option)])
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No flake8 errors found.')
return False
for line in output:
filename, line, error = self._parse_line(line)
self.problems.add(filename, line, error)
def _parse_line(self, line):
"""
flake8 only generates results as stdout.
Parse the output for real data.
"""
parts = line.split(':', 3)
if len(parts) == 3:
message = parts[2].strip()
else:
message = parts[3].strip()
return (parts[0], int(parts[1]), message)
| import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Flake8(Tool):
name = 'flake8'
# see: http://flake8.readthedocs.org/en/latest/config.html
PYFLAKE_OPTIONS = [
'exclude',
'filename',
'select',
'ignore',
'max-line-length',
'format',
'max-complexity',
]
def check_dependencies(self):
"""
See if flake8 is on the PATH
"""
return in_path('flake8')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext == '.py'
def process_files(self, files):
"""
Run code checks with flake8.
Only a single process is made for all files
to save resources.
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['flake8']
for option in self.PYFLAKE_OPTIONS:
if self.options.get(option):
command.extend(
['--%(option)s' % {'option': option},
self.options.get(option)])
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No flake8 errors found.')
return False
for line in output:
filename, line, error = self._parse_line(line)
self.problems.add(filename, line, error)
def _parse_line(self, line):
"""
flake8 only generates results as stdout.
Parse the output for real data.
"""
parts = line.split(':', 3)
if len(parts) == 3:
message = parts[2].strip()
else:
message = parts[3].strip()
return (parts[0], int(parts[1]), message)
| mit | Python |
2519cf06b3cfbdf37f0e79b6222f55f7688ec291 | fix import | ENCODE-DCC/pipeline-container,ENCODE-DCC/pipeline-container,ENCODE-DCC/pipeline-container,ENCODE-DCC/pipeline-container | src/test_encode_map.py | src/test_encode_map.py | import unittest
import encode_map
class TestEncodeMap(unittest.TestCase):
def setUp(self):
pass
def test_strip_extensions(self):
pass
def test_resolve_reference(self):
pass
def test_crop(self):
pass
def test_process(self):
pass
if __name__ == "__main__":
unittest.main()
| import unittest
import mock
import encode_map
class TestEncodeMap(unittest.TestCase):
def setUp(self):
pass
def test_strip_extensions(self):
pass
def test_resolve_reference(self):
pass
def test_crop(self):
pass
def test_process(self):
pass
if __name__ == "__main__":
unittest.main()
| mit | Python |
ac3ba70ed97fb026a24376e1ba9dbfec659fb83e | Update global_defaults_to_system_settings.py | indictranstech/trufil-erpnext,gangadharkadam/v4_erp,indictranstech/focal-erpnext,meisterkleister/erpnext,pombredanne/erpnext,gangadhar-kadam/verve_erp,indictranstech/osmosis-erpnext,suyashphadtare/vestasi-update-erp,mbauskar/sapphire-erpnext,gangadharkadam/saloon_erp,hernad/erpnext,4commerce-technologies-AG/erpnext,Tejal011089/trufil-erpnext,indictranstech/erpnext,Tejal011089/paypal_erpnext,suyashphadtare/gd-erp,ShashaQin/erpnext,tmimori/erpnext,mbauskar/helpdesk-erpnext,gangadharkadam/saloon_erp_install,dieface/erpnext,gsnbng/erpnext,susuchina/ERPNEXT,pawaranand/phrerp,Tejal011089/digitales_erpnext,shitolepriya/test-erp,indictranstech/tele-erpnext,shitolepriya/test-erp,rohitwaghchaure/digitales_erpnext,gangadharkadam/letzerp,mbauskar/sapphire-erpnext,Tejal011089/fbd_erpnext,gangadharkadam/saloon_erp,indictranstech/reciphergroup-erpnext,indictranstech/internal-erpnext,gangadharkadam/johnerp,mbauskar/helpdesk-erpnext,rohitwaghchaure/erpnext_smart,mbauskar/alec_frappe5_erpnext,mbauskar/sapphire-erpnext,suyashphadtare/sajil-final-erp,gangadharkadam/letzerp,Tejal011089/paypal_erpnext,hatwar/focal-erpnext,Tejal011089/huntercamp_erpnext,hatwar/focal-erpnext,Tejal011089/fbd_erpnext,hatwar/buyback-erpnext,indictranstech/fbd_erpnext,indictranstech/erpnext,rohitwaghchaure/erpnext-receipher,rohitwaghchaure/digitales_erpnext,indictranstech/biggift-erpnext,BhupeshGupta/erpnext,hanselke/erpnext-1,gangadharkadam/v6_erp,gangadhar-kadam/laganerp,rohitwaghchaure/New_Theme_Erp,Tejal011089/digitales_erpnext,indictranstech/trufil-erpnext,indictranstech/focal-erpnext,Tejal011089/trufil-erpnext,rohitwaghchaure/erpnext-receipher,geekroot/erpnext,gangadhar-kadam/helpdesk-erpnext,gangadharkadam/office_erp,aruizramon/alec_erpnext,gangadharkadam/office_erp,dieface/erpnext,indictranstech/biggift-erpnext,Tejal011089/osmosis_erpnext,dieface/erpnext,gangadhar-kadam/laganerp,gangadhar-kadam/verve_test_erp,sheafferusa/erpnext,rohitwaghchaure/erpnext_smart,indictranstech/Das_Erpnext,hatwar/buyback-erpnext,fuhongliang/erpnext,rohitwaghchaure/digitales_erpnext,gangadhar-kadam/smrterp,tmimori/erpnext,geekroot/erpnext,meisterkleister/erpnext,hanselke/erpnext-1,hatwar/focal-erpnext,hatwar/Das_erpnext,gangadharkadam/sterp,indictranstech/focal-erpnext,Tejal011089/osmosis_erpnext,indictranstech/tele-erpnext,gangadhar-kadam/verve_test_erp,indictranstech/erpnext,SPKian/Testing,indictranstech/internal-erpnext,indictranstech/vestasi-erpnext,gangadharkadam/sher,Tejal011089/fbd_erpnext,gangadharkadam/saloon_erp_install,Tejal011089/osmosis_erpnext,aruizramon/alec_erpnext,Tejal011089/huntercamp_erpnext,gangadharkadam/verveerp,gangadhar-kadam/verve_live_erp,treejames/erpnext,hernad/erpnext,suyashphadtare/sajil-erp,MartinEnder/erpnext-de,gangadhar-kadam/helpdesk-erpnext,gangadharkadam/v4_erp,mbauskar/helpdesk-erpnext,suyashphadtare/gd-erp,gangadharkadam/saloon_erp,hatwar/focal-erpnext,indictranstech/biggift-erpnext,indictranstech/reciphergroup-erpnext,ThiagoGarciaAlves/erpnext,suyashphadtare/vestasi-erp-1,gangadhar-kadam/helpdesk-erpnext,indictranstech/phrerp,gangadharkadam/v5_erp,mahabuber/erpnext,netfirms/erpnext,SPKian/Testing2,susuchina/ERPNEXT,mbauskar/omnitech-erpnext,gangadhar-kadam/verve-erp,hanselke/erpnext-1,hatwar/Das_erpnext,suyashphadtare/vestasi-erp-jan-end,mbauskar/omnitech-erpnext,gangadharkadam/v4_erp,hernad/erpnext,mbauskar/helpdesk-erpnext,hanselke/erpnext-1,gangadharkadam/smrterp,mbauskar/phrerp,gangadharkadam/verveerp,sheafferusa/erpnext,indictranstech/Das_Erpnext,fuhongliang/erpnext,gangadhar-kadam/verve_test_erp,indictranstech/trufil-erpnext,suyashphadtare/vestasi-erp-final,MartinEnder/erpnext-de,gangadharkadam/vlinkerp,treejames/erpnext,suyashphadtare/test,indictranstech/Das_Erpnext,ThiagoGarciaAlves/erpnext,pawaranand/phrerp,4commerce-technologies-AG/erpnext,mbauskar/Das_Erpnext,rohitwaghchaure/GenieManager-erpnext,gangadharkadam/v5_erp,indictranstech/erpnext,sagar30051991/ozsmart-erp,saurabh6790/test-erp,gangadharkadam/contributionerp,anandpdoshi/erpnext,Suninus/erpnext,suyashphadtare/vestasi-erp-jan-end,gangadharkadam/smrterp,rohitwaghchaure/New_Theme_Erp,gangadharkadam/vlinkerp,meisterkleister/erpnext,gangadhar-kadam/laganerp,gangadhar-kadam/latestchurcherp,gangadharkadam/v6_erp,gangadharkadam/letzerp,gangadhar-kadam/verve_live_erp,Tejal011089/digitales_erpnext,netfirms/erpnext,gangadhar-kadam/verve_live_erp,mbauskar/omnitech-demo-erpnext,Aptitudetech/ERPNext,gangadharkadam/v5_erp,indictranstech/phrerp,Tejal011089/huntercamp_erpnext,pombredanne/erpnext,gangadharkadam/saloon_erp,mahabuber/erpnext,gangadharkadam/vlinkerp,njmube/erpnext,gangadharkadam/v5_erp,rohitwaghchaure/GenieManager-erpnext,geekroot/erpnext,suyashphadtare/sajil-erp,gsnbng/erpnext,gangadharkadam/saloon_erp_install,indictranstech/vestasi-erpnext,mahabuber/erpnext,gangadhar-kadam/smrterp,indictranstech/phrerp,mbauskar/omnitech-demo-erpnext,suyashphadtare/gd-erp,susuchina/ERPNEXT,SPKian/Testing2,gangadharkadam/tailorerp,anandpdoshi/erpnext,rohitwaghchaure/GenieManager-erpnext,indictranstech/osmosis-erpnext,mbauskar/phrerp,suyashphadtare/vestasi-erp-final,suyashphadtare/sajil-erp,Drooids/erpnext,indictranstech/buyback-erp,fuhongliang/erpnext,rohitwaghchaure/erpnext_smart,gangadharkadam/sher,Tejal011089/trufil-erpnext,gangadhar-kadam/verve_erp,gmarke/erpnext,indictranstech/osmosis-erpnext,rohitwaghchaure/erpnext-receipher,hatwar/Das_erpnext,mbauskar/phrerp,indictranstech/internal-erpnext,rohitwaghchaure/GenieManager-erpnext,tmimori/erpnext,aruizramon/alec_erpnext,gangadhar-kadam/latestchurcherp,pombredanne/erpnext,shft117/SteckerApp,mbauskar/phrerp,indictranstech/buyback-erp,suyashphadtare/test,suyashphadtare/vestasi-update-erp,Drooids/erpnext,treejames/erpnext,saurabh6790/test-erp,BhupeshGupta/erpnext,indictranstech/internal-erpnext,indictranstech/biggift-erpnext,Tejal011089/paypal_erpnext,shft117/SteckerApp,mbauskar/omnitech-demo-erpnext,Tejal011089/digitales_erpnext,indictranstech/fbd_erpnext,hatwar/buyback-erpnext,mbauskar/omnitech-erpnext,gangadharkadam/verveerp,mbauskar/Das_Erpnext,indictranstech/reciphergroup-erpnext,indictranstech/tele-erpnext,gangadharkadam/letzerp,mahabuber/erpnext,suyashphadtare/vestasi-erp-final,sagar30051991/ozsmart-erp,mbauskar/omnitech-erpnext,gangadhar-kadam/verve_erp,gangadhar-kadam/verve_live_erp,sagar30051991/ozsmart-erp,indictranstech/buyback-erp,MartinEnder/erpnext-de,gsnbng/erpnext,indictranstech/focal-erpnext,SPKian/Testing2,meisterkleister/erpnext,4commerce-technologies-AG/erpnext,gangadhar-kadam/verve_erp,njmube/erpnext,gmarke/erpnext,BhupeshGupta/erpnext,SPKian/Testing,anandpdoshi/erpnext,geekroot/erpnext,suyashphadtare/vestasi-erp-1,netfirms/erpnext,indictranstech/fbd_erpnext,rohitwaghchaure/digitales_erpnext,suyashphadtare/sajil-final-erp,susuchina/ERPNEXT,Tejal011089/trufil-erpnext,Tejal011089/fbd_erpnext,gangadharkadam/v6_erp,gangadharkadam/tailorerp,gmarke/erpnext,ThiagoGarciaAlves/erpnext,tmimori/erpnext,shft117/SteckerApp,gangadhar-kadam/latestchurcherp,Suninus/erpnext,gangadhar-kadam/verve-erp,netfirms/erpnext,dieface/erpnext,mbauskar/alec_frappe5_erpnext,mbauskar/Das_Erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/sterp,pawaranand/phrerp,ThiagoGarciaAlves/erpnext,Tejal011089/huntercamp_erpnext,Tejal011089/osmosis_erpnext,indictranstech/osmosis-erpnext,treejames/erpnext,gangadhar-kadam/verve_test_erp,Suninus/erpnext,gsnbng/erpnext,mbauskar/omnitech-demo-erpnext,ShashaQin/erpnext,gangadhar-kadam/helpdesk-erpnext,sheafferusa/erpnext,SPKian/Testing2,gangadharkadam/office_erp,suyashphadtare/vestasi-erp-jan-end,saurabh6790/test-erp,gangadharkadam/contributionerp,saurabh6790/test-erp,njmube/erpnext,gangadharkadam/vlinkerp,Suninus/erpnext,anandpdoshi/erpnext,suyashphadtare/gd-erp,SPKian/Testing,gangadhar-kadam/verve-erp,indictranstech/trufil-erpnext,fuhongliang/erpnext,sagar30051991/ozsmart-erp,shitolepriya/test-erp,shitolepriya/test-erp,Drooids/erpnext,rohitwaghchaure/New_Theme_Erp,gangadharkadam/contributionerp,shft117/SteckerApp,njmube/erpnext,Drooids/erpnext,mbauskar/Das_Erpnext,gangadharkadam/v4_erp,ShashaQin/erpnext,hatwar/Das_erpnext,indictranstech/fbd_erpnext,suyashphadtare/vestasi-erp-1,indictranstech/vestasi-erpnext,indictranstech/Das_Erpnext,indictranstech/vestasi-erpnext,sheafferusa/erpnext,BhupeshGupta/erpnext,hernad/erpnext,suyashphadtare/sajil-final-erp,indictranstech/buyback-erp,gangadharkadam/v6_erp,gangadharkadam/verveerp,aruizramon/alec_erpnext,gangadharkadam/johnerp,gmarke/erpnext,MartinEnder/erpnext-de,SPKian/Testing,Tejal011089/paypal_erpnext,indictranstech/phrerp,hatwar/buyback-erpnext,suyashphadtare/test,gangadhar-kadam/latestchurcherp,mbauskar/sapphire-erpnext,rohitwaghchaure/erpnext-receipher,ShashaQin/erpnext,suyashphadtare/vestasi-erp-jan-end,pawaranand/phrerp,gangadharkadam/contributionerp,pombredanne/erpnext,suyashphadtare/vestasi-update-erp,indictranstech/reciphergroup-erpnext,rohitwaghchaure/New_Theme_Erp,gangadharkadam/saloon_erp_install,mbauskar/alec_frappe5_erpnext,indictranstech/tele-erpnext | erpnext/patches/v4_0/global_defaults_to_system_settings.py | erpnext/patches/v4_0/global_defaults_to_system_settings.py | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from collections import Counter
from frappe.core.doctype.user.user import STANDARD_USERS
def execute():
system_settings = frappe.get_doc("System Settings")
# set values from global_defauls
global_defaults = frappe.db.get_value("Global Defaults", None,
["time_zone", "date_format", "number_format", "float_precision", "session_expiry"], as_dict=True)
if global_defaults:
for key, val in global_defaults.items():
if not system_settings.get(key):
system_settings[key] = val
# language
if not system_settings.get("language"):
# find most common language
lang = frappe.db.sql_list("""select language from `tabUser`
where ifnull(language, '')!='' and language not like "Loading%%" and name not in ({standard_users})""".format(
standard_users=", ".join(["%s"]*len(STANDARD_USERS))), tuple(STANDARD_USERS))
lang = Counter(lang).most_common(1)
lang = (len(lang) > 0) and lang[0][0] or "english"
system_settings.language = lang
system_settings.ignore_mandatory = True
system_settings.save()
| # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from collections import Counter
from frappe.core.doctype.user.user import STANDARD_USERS
def execute():
system_settings = frappe.get_doc("System Settings")
# set values from global_defauls
global_defaults = frappe.db.get_value("Global Defaults", None,
["time_zone", "date_format", "number_format", "float_precision", "session_expiry"], as_dict=True)
if global_defauls:
for key, val in global_defaults.items():
if not system_settings.get(key):
system_settings[key] = val
# language
if not system_settings.get("language"):
# find most common language
lang = frappe.db.sql_list("""select language from `tabUser`
where ifnull(language, '')!='' and language not like "Loading%%" and name not in ({standard_users})""".format(
standard_users=", ".join(["%s"]*len(STANDARD_USERS))), tuple(STANDARD_USERS))
lang = Counter(lang).most_common(1)
lang = (len(lang) > 0) and lang[0][0] or "english"
system_settings.language = lang
system_settings.ignore_mandatory = True
system_settings.save()
| agpl-3.0 | Python |
8be0c31e2319eb3718ecb3476d22d5b7ce5101e7 | Update 07_Weather_Station.py | userdw/RaspberryPi_3_Starter_Kit | 07_Weather_Station/07_Weather_Station/07_Weather_Station.py | 07_Weather_Station/07_Weather_Station/07_Weather_Station.py | import MCP3202
import time
import datetime
import os
from time import strftime
try:
while 1:
os.system("clear")
value1= MCP3202.readADC(0)
voltage = round(float(value1 * 5000 / 4096), 2)
temperature = (voltage - 550) / 10
tampil = round(float(temperature), 2)
print("Weather Station")
print("Curent Temperature : ", tampil, u"\xb0", "C")
print("")
print("Press CTRL+C to exit")
time.sleep(0.075)
except KeyboardInterrupt:
print("exit")
| import MCP3202
import time
import datetime
import os
from time import strftime
try:
while 1:
os.system('clear')
value1= MCP3202.readADC(0) # range data 0 - vref (volt)
voltage=round(float(value1*5000/4096),2)
temperature = (voltage-550)/10
tampil= round(float(temperature),2)
print "Weather Station"
print "Curent Temperature : ",tampil,u"\u2103"
print ""
print "Press CTRL+C to exit"
time.sleep(0.075)
except KeyboardInterrupt:
print "exit"
| mit | Python |
73fe441f081a4d1b327cf411025e5f7b29ba13d7 | improve model recognition | illwieckz/Urcheon,illwieckz/grtoolbox | pak_profiles/common.py | pak_profiles/common.py | #! /usr/bin/env python3
#-*- coding: UTF-8 -*-
### Legal
#
# Author: Thomas DEBESSE <dev@illwieckz.net>
# License: ISC
#
file_common_deps = {
"file_base": "DEPS",
"description": "Package DEPS file",
"action": "copy",
}
file_common_external_editor = {
"file_ext": [
"xcf",
"psd",
"ora",
],
"description": "External Editor File",
"action": "ignore",
}
file_common_metada_sidecar = {
"file_ext": [
"vorbiscomment",
],
"description": "Metadata Sidecar",
"action": "ignore",
}
file_common_texture = {
"file_ext": [
"jpg",
"jpeg",
"png",
"tga",
"bmp",
"webp",
"crn",
"dds",
],
"description": "Texture",
"action": "copy",
}
file_common_sound = {
"file_ext": [
"wav",
"flac",
"ogg",
"opus",
],
"description": "Sound File",
"action": "copy",
}
file_common_script = {
"file_ext": [
"shader",
"particle",
"trail",
],
"dir_ancestor_name": "scripts",
"description": "Common Script",
"action": "copy",
}
file_common_model = {
"file_ext": [
"ase",
"iqm",
"md3",
"md5anim",
"md5mesh",
"qc",
],
"description": "Common Model File",
"action": "copy",
}
file_common_text = {
"file_ext": [
"txt",
"md",
],
"description": "Common Text file",
"action": "copy",
}
file_common_readme = {
"inherit": "file_common_text",
"file_base": "README",
"description": "Common ReadMe file",
}
file_common_nullwav = {
"inherit": "file_common_sound",
"file_ext": "wav",
"file_base": "null",
"description": "Common NULL Sound File",
"action": "copy",
}
| #! /usr/bin/env python3
#-*- coding: UTF-8 -*-
### Legal
#
# Author: Thomas DEBESSE <dev@illwieckz.net>
# License: ISC
#
file_common_deps = {
"file_base": "DEPS",
"description": "Package DEPS file",
"action": "copy",
}
file_common_external_editor = {
"file_ext": [
"xcf",
"psd",
"ora",
],
"description": "External Editor File",
"action": "ignore",
}
file_common_metada_sidecar = {
"file_ext": [
"vorbiscomment",
],
"description": "Metadata Sidecar",
"action": "ignore",
}
file_common_texture = {
"file_ext": [
"jpg",
"jpeg",
"png",
"tga",
"bmp",
"webp",
"crn",
"dds",
],
"description": "Texture",
"action": "copy",
}
file_common_sound = {
"file_ext": [
"wav",
"flac",
"ogg",
"opus",
],
"description": "Sound File",
"action": "copy",
}
file_common_script = {
"file_ext": [
"shader",
"particle",
"trail",
],
"dir_ancestor_name": "scripts",
"description": "Common Script",
"action": "copy",
}
file_common_model = {
"file_ext": [
"qc",
"ase",
"md3",
],
"description": "Common Model File",
"action": "copy",
}
file_common_text = {
"file_ext": [
"txt",
"md",
],
"description": "Common Text file",
"action": "copy",
}
file_common_readme = {
"inherit": "file_common_text",
"file_base": "README",
"description": "Common ReadMe file",
}
file_common_nullwav = {
"inherit": "file_common_sound",
"file_ext": "wav",
"file_base": "null",
"description": "Common NULL Sound File",
"action": "copy",
}
| isc | Python |
8be9a250c190d9dd8615da4616cdfe401ad6e0af | Bump version number for 1.7 alpha 1. | seocam/django,daniponi/django,manhhomienbienthuy/django,SujaySKumar/django,kaedroho/django,unaizalakain/django,liavkoren/djangoDev,epandurski/django,fpy171/django,mrfuxi/django,syphar/django,mojeto/django,techdragon/django,gunchleoc/django,django/django,barbuza/django,andreif/django,TridevGuha/django,jdelight/django,edevil/django,myang321/django,ajaali/django,auvipy/django,pauloxnet/django,dhruvagarwal/django,SoftwareMaven/django,supriyantomaftuh/django,manhhomienbienthuy/django,AndrewGrossman/django,treyhunner/django,denis-pitul/django,jmcarp/django,rhertzog/django,alilotfi/django,taaviteska/django,hassanabidpk/django,sephii/django,memtoko/django,jallohm/django,sadaf2605/django,schinckel/django,ifduyue/django,delinhabit/django,RevelSystems/django,mcella/django,Vixionar/django,mttr/django,bak1an/django,dudepare/django,alrifqi/django,filias/django,sjlehtin/django,GhostThrone/django,fpy171/django,georgemarshall/django,supriyantomaftuh/django,guettli/django,vitan/django,pasqualguerrero/django,riteshshrv/django,rmboggs/django,hassanabidpk/django,sbellem/django,Leila20/django,ironbox360/django,WillGuan105/django,follow99/django,koordinates/django,mathspace/django,claudep/django,arun6582/django,jeezybrick/django,whs/django,gengue/django,hynekcer/django,ironbox360/django,erikr/django,koniiiik/django,davgibbs/django,karyon/django,Vixionar/django,zanderle/django,rrrene/django,ojake/django,twz915/django,z0by/django,doismellburning/django,roselleebarle04/django,litchfield/django,mbox/django,indevgr/django,aerophile/django,codepantry/django,jscn/django,seocam/django,syphar/django,takis/django,sarvex/django,jnovinger/django,baylee/django,treyhunner/django,indevgr/django,lunafeng/django,ivandevp/django,felixxm/django,denis-pitul/django,aidanlister/django,dbaxa/django,ivandevp/django,akintoey/django,elijah513/django,eugena/django,MarcJoan/django,krisys/django,nielsvanoch/django,leekchan/django_test,apocquet/django,risicle/django,dwightgunning/django,tbeadle/django,double-y/django,lunafeng/django,etos/django,reinout/django,z0by/django,yakky/django,techdragon/django,yograterol/django,YangSongzhou/django,NullSoldier/django,rsalmaso/django,epandurski/django,raphaelmerx/django,bikong2/django,beck/django,seocam/django,delhivery/django,mcella/django,jenalgit/django,gunchleoc/django,frdb194/django,jejimenez/django,takeshineshiro/django,twz915/django,ryanahall/django,adamchainz/django,kholidfu/django,AltSchool/django,hcsturix74/django,jenalgit/django,djbaldey/django,mathspace/django,memtoko/django,DrMeers/django,davidharrigan/django,etos/django,matiasb/django,labcodes/django,kholidfu/django,carljm/django,kswiat/django,kcpawan/django,gcd0318/django,deployed/django,jhoos/django,schinckel/django,marckuz/django,dracos/django,liuliwork/django,elky/django,waytai/django,JorgeCoock/django,varunnaganathan/django,Nepherhotep/django,jmcarp/django,varunnaganathan/django,nhippenmeyer/django,gdub/django,frePPLe/django,AltSchool/django,claudep/django,programadorjc/django,akaariai/django,krisys/django,WSDC-NITWarangal/django,etos/django,extremewaysback/django,theo-l/django,vitaly4uk/django,kangfend/django,mrfuxi/django,yograterol/django,hkchenhongyi/django,Matt-Deacalion/django,peterlauri/django,davidharrigan/django,tysonclugg/django,alrifqi/django,evansd/django,tbeadle/django,kcpawan/django,dbaxa/django,frePPLe/django,hynekcer/django,sadaf2605/django,evansd/django,blighj/django,xrmx/django,SujaySKumar/django,akshatharaj/django,pasqualguerrero/django,arun6582/django,spisneha25/django,quamilek/django,erikr/django,harisibrahimkv/django,eugena/django,ArnossArnossi/django,abomyi/django,oinopion/django,himleyb85/django,DONIKAN/django,alexmorozov/django,dhruvagarwal/django,hcsturix74/django,megaumi/django,zerc/django,MarkusH/django,mrbox/django,quxiaolong1504/django,ryangallen/django,rlugojr/django,Yong-Lee/django,sdcooke/django,dpetzold/django,mcardillo55/django,sarvex/django,knifenomad/django,PetrDlouhy/django,alimony/django,elky/django,kosz85/django,daniponi/django,Argon-Zhou/django,syaiful6/django,GhostThrone/django,Nepherhotep/django,megaumi/django,scorphus/django,hottwaj/django,vmarkovtsev/django,GitAngel/django,abomyi/django,frePPLe/django,ckirby/django,NullSoldier/django,mjtamlyn/django,craynot/django,hunter007/django,ar45/django,wkschwartz/django,oberlin/django,shtouff/django,Korkki/django,risicle/django,blueyed/django,joakim-hove/django,nemesisdesign/django,googleinterns/django,gengue/django,sdcooke/django,myang321/django,scorphus/django,hybrideagle/django,rsalmaso/django,rizumu/django,TimYi/django,savoirfairelinux/django,willhardy/django,tysonclugg/django,mjtamlyn/django,MatthewWilkes/django,x111ong/django,alilotfi/django,aspidites/django,b-me/django,jscn/django,Adnn/django,riteshshrv/django,dfunckt/django,hobarrera/django,jyotsna1820/django,Anonymous-X6/django,taaviteska/django,sgzsh269/django,elena/django,wweiradio/django,shaistaansari/django,tomchristie/django,eugena/django,mathspace/django,davidharrigan/django,blueyed/django,zanderle/django,jasonwzhy/django,baylee/django,aroche/django,yigitguler/django,tragiclifestories/django,caotianwei/django,gdi2290/django,Mixser/django,erikr/django,divio/django,ivandevp/django,h4r5h1t/django-hauthy,Korkki/django,andela-ooladayo/django,ar45/django,ericfc/django,vincepandolfo/django,HonzaKral/django,drjeep/django,moreati/django,shacker/django,charettes/django,HonzaKral/django,caotianwei/django,Adnn/django,jn7163/django,mjtamlyn/django,sam-tsai/django,adelton/django,jmcarp/django,bikong2/django,BMJHayward/django,hunter007/django,craynot/django,kholidfu/django,ptoraskar/django,ghedsouza/django,marctc/django,delhivery/django,DasIch/django,PolicyStat/django,poiati/django,dracos/django,jrrembert/django,hobarrera/django,x111ong/django,ptoraskar/django,unaizalakain/django,djbaldey/django,darjeeling/django,aroche/django,mathspace/django,pquentin/django,zsiciarz/django,apollo13/django,wweiradio/django,charettes/django,dfdx2/django,hobarrera/django,eyohansa/django,Balachan27/django,jasonwzhy/django,salamer/django,tuhangdi/django,ArnossArnossi/django,curtisstpierre/django,bitcity/django,jeezybrick/django,MikeAmy/django,ulope/django,wweiradio/django,SoftwareMaven/django,BlindHunter/django,stewartpark/django,ifduyue/django,TridevGuha/django,rlugojr/django,curtisstpierre/django,jpic/django,KokareIITP/django,haxoza/django,arun6582/django,dwightgunning/django,BrotherPhil/django,BlindHunter/django,dfdx2/django,neiudemo1/django,shtouff/django,rsvip/Django,sbellem/django,jhoos/django,pasqualguerrero/django,jsoref/django,django/django,pipermerriam/django,jarshwah/django,asser/django,marckuz/django,anant-dev/django,frankvdp/django,daniponi/django,dpetzold/django,blighj/django,programadorjc/django,GhostThrone/django,mmardini/django,devops2014/djangosite,YangSongzhou/django,ryanahall/django,jylaxp/django,abomyi/django,mrfuxi/django,oscaro/django,beni55/django,himleyb85/django,ghedsouza/django,leeon/annotated-django,MarcJoan/django,vincepandolfo/django,runekaagaard/django-contrib-locking,kevintaw/django,liuliwork/django,tbeadle/django,alimony/django,pquentin/django,jpic/django,dursk/django,HonzaKral/django,mcella/django,duqiao/django,alrifqi/django,rmboggs/django,benjaminjkraft/django,kamyu104/django,synasius/django,davgibbs/django,daniponi/django,dgladkov/django,asser/django,sdcooke/django,frankvdp/django,denis-pitul/django,yigitguler/django,areski/django,mitya57/django,marissazhou/django,hottwaj/django,helenst/django,adamchainz/django,evansd/django,payeldillip/django,akintoey/django,jdelight/django,avanov/django,webgeodatavore/django,TimBuckley/effective_django,akintoey/django,techdragon/django,piquadrat/django,pauloxnet/django,mitya57/django,dbaxa/django,wweiradio/django,Y3K/django,indevgr/django,dfunckt/django,jejimenez/django,harisibrahimkv/django,TridevGuha/django,beckastar/django,zhaodelong/django,chyeh727/django,roselleebarle04/django,tcwicklund/django,stevenewey/django,shaib/django,hnakamur/django,MikeAmy/django,marissazhou/django,poiati/django,ghedsouza/django,mcardillo55/django,ryanahall/django,duqiao/django,hkchenhongyi/django,MatthewWilkes/django,jyotsna1820/django,vmarkovtsev/django,gdub/django,jsoref/django,beckastar/django,PetrDlouhy/django,gitaarik/django,zhaodelong/django,haxoza/django,DONIKAN/django,petecummings/django,bikong2/django,drjeep/django,joakim-hove/django,alexallah/django,liu602348184/django,jn7163/django,ataylor32/django,jdelight/django,salamer/django,sopier/django,filias/django,bspink/django,GitAngel/django,yewang15215/django,nju520/django,djbaldey/django,mewtaylor/django,megaumi/django,nhippenmeyer/django,uranusjr/django,BlindHunter/django,delhivery/django,jgoclawski/django,gohin/django,zanderle/django,delhivery/django,alexallah/django,asser/django,rizumu/django,MarcJoan/django,shaistaansari/django,coldmind/django,drjeep/django,weiawe/django,ajoaoff/django,labcodes/django,Y3K/django,archen/django,kcpawan/django,koniiiik/django,sbellem/django,rockneurotiko/django,mcrowson/django,Adnn/django,robhudson/django,zhaodelong/django,synasius/django,sarthakmeh03/django,Anonymous-X6/django,vitan/django,loic/django,charettes/django,archen/django,jnovinger/django,ghickman/django,RossBrunton/django,eugena/django,yewang15215/django,Vixionar/django,mcardillo55/django,feroda/django,sarthakmeh03/django,dracos/django,sephii/django,willhardy/django,jrrembert/django,hackerbot/DjangoDev,jyotsna1820/django,archen/django,tomchristie/django,gitaarik/django,BMJHayward/django,nemesisdesign/django,AndrewGrossman/django,marcelocure/django,follow99/django,ajaali/django,jasonwzhy/django,rwillmer/django,felixxm/django,gchp/django,fafaman/django,syaiful6/django,uranusjr/django,b-me/django,stewartpark/django,programadorjc/django,xadahiya/django,knifenomad/django,eyohansa/django,takeshineshiro/django,yamila-moreno/django,mcrowson/django,theo-l/django,JavML/django,Leila20/django,tysonclugg/django,zulip/django,fpy171/django,follow99/django,kangfend/django,GaussDing/django,gcd0318/django,MatthewWilkes/django,barbuza/django,RevelSystems/django,yakky/django,tuhangdi/django,sadaf2605/django,sam-tsai/django,ebar0n/django,baylee/django,payeldillip/django,dsanders11/django,b-me/django,ckirby/django,NullSoldier/django,simonw/django,arun6582/django,rockneurotiko/django,filias/django,jenalgit/django,zulip/django,himleyb85/django,digimarc/django,rizumu/django,piquadrat/django,codepantry/django,sjlehtin/django,monetate/django,asser/django,twz915/django,JorgeCoock/django,avanov/django,rrrene/django,akaariai/django,mcardillo55/django,kosz85/django,szopu/django,huang4fstudio/django,katrid/django,xadahiya/django,moreati/django,edmorley/django,peterlauri/django,apocquet/django,darkryder/django,blighj/django,adelton/django,EliotBerriot/django,frankvdp/django,crazy-canux/django,druuu/django,syphar/django,frishberg/django,KokareIITP/django,nemesisdesign/django,DasIch/django,WSDC-NITWarangal/django,darjeeling/django,jrrembert/django,PolicyStat/django,dpetzold/django,lmorchard/django,vitaly4uk/django,loic/django,sopier/django,auready/django,huang4fstudio/django,dursk/django,shownomercy/django,jpic/django,eyohansa/django,digimarc/django,filias/django,xrmx/django,jgoclawski/django,sgzsh269/django,gdub/django,spisneha25/django,atul-bhouraskar/django,hottwaj/django,carljm/django,ojengwa/django-1,quxiaolong1504/django,piquadrat/django,avanov/django,fpy171/django,marissazhou/django,aisipos/django,mitya57/django,dgladkov/django,tanmaythakur/django,takeshineshiro/django,Nepherhotep/django,kosz85/django,rajsadho/django,felixxm/django,xwolf12/django,rtindru/django,YYWen0o0/python-frame-django,tayfun/django,googleinterns/django,mewtaylor/django,elijah513/django,irwinlove/django,lwiecek/django,ajaali/django,spisneha25/django,yask123/django,kevintaw/django,maxsocl/django,elena/django,MikeAmy/django,MatthewWilkes/django,oscaro/django,supriyantomaftuh/django,DrMeers/django,TimYi/django,mmardini/django,kaedroho/django,anant-dev/django,jallohm/django,vitaly4uk/django,tuhangdi/django,litchfield/django,beck/django,tuhangdi/django,roselleebarle04/django,blindroot/django,codepantry/django,aerophile/django,yamila-moreno/django,extremewaysback/django,EliotBerriot/django,Matt-Deacalion/django,gchp/django,gohin/django,guettli/django,anant-dev/django,jarshwah/django,etos/django,elky/django,savoirfairelinux/django,alilotfi/django,mshafiq9/django,beckastar/django,rhertzog/django,kisna72/django,saydulk/django,z0by/django,RossBrunton/django,mmardini/django,myang321/django,ziima/django,Argon-Zhou/django,xwolf12/django,solarissmoke/django,ojake/django,liuliwork/django,marqueedev/django,rmboggs/django,SujaySKumar/django,wkschwartz/django,ojengwa/django-1,leeon/annotated-django,caotianwei/django,jaywreddy/django,lsqtongxin/django,aerophile/django,tanmaythakur/django,yograterol/django,auvipy/django,duqiao/django,jpic/django,sarvex/django,dydek/django,rajsadho/django,avneesh91/django,takeshineshiro/django,intgr/django,pipermerriam/django,shacker/django,lmorchard/django,mitya57/django,elkingtonmcb/django,PetrDlouhy/django,ghedsouza/django,marckuz/django,nhippenmeyer/django,craynot/django,barbuza/django,adelton/django,hackerbot/DjangoDev,leeon/annotated-django,apollo13/django,katrid/django,olasitarska/django,charettes/django,anant-dev/django,willharris/django,yakky/django,henryfjordan/django,takis/django,tragiclifestories/django,YangSongzhou/django,sgzsh269/django,marissazhou/django,extremewaysback/django,shownomercy/django,MarcJoan/django,waytai/django,jgoclawski/django,yask123/django,vmarkovtsev/django,knifenomad/django,HousekeepLtd/django,syphar/django,aspidites/django,blueyed/django,schinckel/django,Endika/django,edevil/django,seanwestfall/django,liuliwork/django,alexmorozov/django,Adnn/django,apollo13/django,PetrDlouhy/django,salamer/django,ecederstrand/django,haxoza/django,robhudson/django,krisys/django,oscaro/django,zedr/django,ytjiang/django,ebar0n/django,benjaminjkraft/django,zhoulingjun/django,IRI-Research/django,takis/django,manhhomienbienthuy/django,ytjiang/django,rynomster/django,knifenomad/django,zhaodelong/django,jvkops/django,GitAngel/django,JavML/django,googleinterns/django,davgibbs/django,jhg/django,sbellem/django,t0in4/django,tanmaythakur/django,oinopion/django,djbaldey/django,rsalmaso/django,timgraham/django,xwolf12/django,gdi2290/django,darkryder/django,jasonbot/django,mitchelljkotler/django,GaussDing/django,GaussDing/django,frdb194/django,jnovinger/django,gcd0318/django,jn7163/django,jasonbot/django,mmardini/django,sopier/django,webgeodatavore/django,karyon/django,Endika/django,lunafeng/django,barbuza/django,yamila-moreno/django,adamchainz/django,ckirby/django,cainmatt/django,mojeto/django,zedr/django,alexallah/django,labcodes/django,JorgeCoock/django,techdragon/django,Vixionar/django,taaviteska/django,leekchan/django_test,hynekcer/django,guettli/django,deployed/django,zerc/django,mttr/django,hkchenhongyi/django,irwinlove/django,simonw/django,alexallah/django,treyhunner/django,hcsturix74/django,jvkops/django,fenginx/django,apocquet/django,maxsocl/django,dfunckt/django,dursk/django,intgr/django,1013553207/django,areski/django,blindroot/django,HousekeepLtd/django,syaiful6/django,rrrene/django,denys-duchier/django,shaistaansari/django,shownomercy/django,gunchleoc/django,wetneb/django,jejimenez/django,Sonicbids/django,claudep/django,salamer/django,marqueedev/django,supriyantomaftuh/django,ckirby/django,rrrene/django,divio/django,stevenewey/django,irwinlove/django,ecederstrand/django,coldmind/django,mattrobenolt/django,SebasSBM/django,liu602348184/django,whs/django,ar45/django,ajoaoff/django,bobcyw/django,aroche/django,kisna72/django,marctc/django,IRI-Research/django,jylaxp/django,zsiciarz/django,BrotherPhil/django,Mixser/django,YYWen0o0/python-frame-django,ziima/django,petecummings/django,hcsturix74/django,sam-tsai/django,frdb194/django,twz915/django,jscn/django,cainmatt/django,liavkoren/djangoDev,camilonova/django,nju520/django,timgraham/django,zedr/django,h4r5h1t/django-hauthy,akshatharaj/django,fenginx/django,dydek/django,davidharrigan/django,aspidites/django,katrid/django,harisibrahimkv/django,reinout/django,TimYi/django,nealtodd/django,jgoclawski/django,solarissmoke/django,seocam/django,TridevGuha/django,marcelocure/django,kamyu104/django,1013553207/django,joakim-hove/django,rajsadho/django,ryangallen/django,x111ong/django,ojengwa/django-1,vitan/django,takis/django,henryfjordan/django,rsvip/Django,rhertzog/django,loic/django,gunchleoc/django,SebasSBM/django,koniiiik/django,mattseymour/django,mitchelljkotler/django,errx/django,matiasb/django,Yong-Lee/django,pquentin/django,tcwicklund/django,Mixser/django,Leila20/django,Balachan27/django,IRI-Research/django,mlavin/django,irwinlove/django,koniiiik/django,follow99/django,HousekeepLtd/django,solarissmoke/django,iambibhas/django,areski/django,nju520/django,bobcyw/django,gcd0318/django,spisneha25/django,ArnossArnossi/django,1013553207/django,carljm/django,wsmith323/django,varunnaganathan/django,dhruvagarwal/django,quamilek/django,github-account-because-they-want-it/django,payeldillip/django,beck/django,fenginx/django,aidanlister/django,jvkops/django,baylee/django,atul-bhouraskar/django,yewang15215/django,neiudemo1/django,risicle/django,edevil/django,frankvdp/django,jaywreddy/django,lsqtongxin/django,mshafiq9/django,dfunckt/django,huang4fstudio/django,rlugojr/django,Beauhurst/django,jhoos/django,bspink/django,neiudemo1/django,ironbox360/django,mbox/django,gannetson/django,MikeAmy/django,ptoraskar/django,saydulk/django,kamyu104/django,rapilabs/django,rhertzog/django,jylaxp/django,petecummings/django,mewtaylor/django,loic/django,litchfield/django,kutenai/django,koordinates/django,DasIch/django,rmboggs/django,digimarc/django,weiawe/django,liu602348184/django,hybrideagle/django,joequery/django,jsoref/django,HousekeepLtd/django,pasqualguerrero/django,mattrobenolt/django,dracos/django,Korkki/django,joakim-hove/django,gannetson/django,alimony/django,andela-ooladayo/django,divio/django,benjaminjkraft/django,weiawe/django,denys-duchier/django,feroda/django,dydek/django,bobcyw/django,MarkusH/django,taaviteska/django,georgemarshall/django,rsvip/Django,risicle/django,feroda/django,dpetzold/django,quxiaolong1504/django,Balachan27/django,waytai/django,krishna-pandey-git/django,sarthakmeh03/django,pipermerriam/django,payeldillip/django,joequery/django,andela-ifageyinbo/django,runekaagaard/django-contrib-locking,unaizalakain/django,jallohm/django,AltSchool/django,eyohansa/django,gdi2290/django,scorphus/django,rtindru/django,t0in4/django,elkingtonmcb/django,litchfield/django,wsmith323/django,gannetson/django,ytjiang/django,ASCrookes/django,elena/django,MoritzS/django,rajsadho/django,rizumu/django,jejimenez/django,edmorley/django,poiati/django,mewtaylor/django,savoirfairelinux/django,h4r5h1t/django-hauthy,RevelSystems/django,kutenai/django,xadahiya/django,solarissmoke/django,SujaySKumar/django,rapilabs/django,akshatharaj/django,bak1an/django,beck/django,yograterol/django,uranusjr/django,dudepare/django,Y3K/django,TimYi/django,kamyu104/django,sjlehtin/django,indevgr/django,donkirkby/django,mttr/django,elijah513/django,WillGuan105/django,katrid/django,drjeep/django,akshatharaj/django,unaizalakain/django,RossBrunton/django,django/django,ghickman/django,quxiaolong1504/django,wetneb/django,EliotBerriot/django,lmorchard/django,pauloxnet/django,mojeto/django,andresgz/django,ivandevp/django,mattrobenolt/django,raphaelmerx/django,gannetson/django,sergei-maertens/django,shaistaansari/django,simone/django-gb,sam-tsai/django,bspink/django,benjaminjkraft/django,yask123/django,jeezybrick/django,tomchristie/django,avneesh91/django,synasius/django,gdub/django,yamila-moreno/django,riteshshrv/django,Sonicbids/django,DrMeers/django,frishberg/django,aroche/django,Beauhurst/django,donkirkby/django,gengue/django,sgzsh269/django,dgladkov/django,digimarc/django,darkryder/django,AndrewGrossman/django,Matt-Deacalion/django,tanmaythakur/django,SebasSBM/django,hnakamur/django,scorphus/django,andreif/django,bak1an/django,codepantry/django,denis-pitul/django,seanwestfall/django,tayfun/django,bikong2/django,ajoaoff/django,marqueedev/django,oinopion/django,mlavin/django,ojengwa/django-1,bitcity/django,ironbox360/django,chyeh727/django,whs/django,double-y/django,jallohm/django,DONIKAN/django,github-account-because-they-want-it/django,YangSongzhou/django,rwillmer/django,mlavin/django,YYWen0o0/python-frame-django,jhg/django,aidanlister/django,django/django,georgemarshall/django,oberlin/django,gitaarik/django,liu602348184/django,monetate/django,stewartpark/django,druuu/django,kevintaw/django,andresgz/django,freakboy3742/django,andresgz/django,vitaly4uk/django,mattseymour/django,errx/django,hunter007/django,rlugojr/django,hottwaj/django,TimBuckley/effective_django,stewartpark/django,areski/django,rynomster/django,auready/django,syaiful6/django,nielsvanoch/django,dbaxa/django,maxsocl/django,georgemarshall/django,Argon-Zhou/django,shaib/django,marcelocure/django,BrotherPhil/django,alimony/django,neiudemo1/django,sarvex/django,lwiecek/django,jyotsna1820/django,akintoey/django,kisna72/django,lmorchard/django,freakboy3742/django,mcrowson/django,doismellburning/django,edmorley/django,hnakamur/django,mattrobenolt/django,tayfun/django,szopu/django,marckuz/django,ecederstrand/django,jasonbot/django,shaib/django,rsalmaso/django,caotianwei/django,delinhabit/django,ticosax/django,camilonova/django,kswiat/django,t0in4/django,andela-ifageyinbo/django,JavML/django,olasitarska/django,vincepandolfo/django,Yong-Lee/django,kaedroho/django,ABaldwinHunter/django-clone,nealtodd/django,kcpawan/django,t0in4/django,ulope/django,nju520/django,davgibbs/django,mrbox/django,KokareIITP/django,krisys/django,camilonova/django,PolicyStat/django,yakky/django,mshafiq9/django,SoftwareMaven/django,riteshshrv/django,ar45/django,zulip/django,MoritzS/django,hybrideagle/django,donkirkby/django,ecederstrand/django,andyzsf/django,dfdx2/django,Endika/django,darkryder/django,carljm/django,peterlauri/django,marqueedev/django,webgeodatavore/django,chyeh727/django,henryfjordan/django,mrbox/django,nemesisdesign/django,monetate/django,marcelocure/django,xadahiya/django,andresgz/django,craynot/django,runekaagaard/django-contrib-locking,ABaldwinHunter/django-clone-classic,ojake/django,frishberg/django,dwightgunning/django,jaywreddy/django,mcella/django,MarkusH/django,elijah513/django,ABaldwinHunter/django-clone-classic,theo-l/django,EmadMokhtar/Django,moreati/django,shacker/django,mshafiq9/django,elkingtonmcb/django,leekchan/django_test,aisipos/django,gitaarik/django,MoritzS/django,kswiat/django,ABaldwinHunter/django-clone,seanwestfall/django,tragiclifestories/django,tragiclifestories/django,alrifqi/django,ticosax/django,nealtodd/django,darjeeling/django,beni55/django,b-me/django,ABaldwinHunter/django-clone,wkschwartz/django,peterlauri/django,MoritzS/django,druuu/django,ericfc/django,willharris/django,dgladkov/django,ziima/django,quamilek/django,vmarkovtsev/django,helenst/django,hybrideagle/django,shacker/django,raphaelmerx/django,dursk/django,edmorley/django,jscn/django,rsvip/Django,olasitarska/django,nealtodd/django,bobcyw/django,sergei-maertens/django,mcrowson/django,joequery/django,jaywreddy/django,andela-ooladayo/django,kutenai/django,koordinates/django,fafaman/django,beckastar/django,krishna-pandey-git/django,delinhabit/django,abomyi/django,guettli/django,tcwicklund/django,BrotherPhil/django,auready/django,yigitguler/django,WSDC-NITWarangal/django,ticosax/django,joequery/django,andreif/django,myang321/django,yewang15215/django,rwillmer/django,kangfend/django,shtouff/django,whs/django,theo-l/django,megaumi/django,programadorjc/django,MounirMesselmeni/django,ptoraskar/django,jmcarp/django,camilonova/django,z0by/django,dudepare/django,hunter007/django,hackerbot/DjangoDev,MounirMesselmeni/django,x111ong/django,apocquet/django,rockneurotiko/django,sarthakmeh03/django,ericfc/django,bitcity/django,treyhunner/django,bitcity/django,GaussDing/django,rwillmer/django,ifduyue/django,MounirMesselmeni/django,Yong-Lee/django,phalt/django,stevenewey/django,blighj/django,uranusjr/django,weiawe/django,felixxm/django,ryangallen/django,jeezybrick/django,maxsocl/django,EmadMokhtar/Django,rtindru/django,deployed/django,henryfjordan/django,tcwicklund/django,akaariai/django,AltSchool/django,hassanabidpk/django,mojeto/django,nhippenmeyer/django,beni55/django,karyon/django,ulope/django,ziima/django,tayfun/django,jhg/django,Endika/django,dudepare/django,jrrembert/django,jylaxp/django,apollo13/django,EliotBerriot/django,xrmx/django,WillGuan105/django,vitan/django,GitAngel/django,avneesh91/django,rapilabs/django,andela-ooladayo/django,atul-bhouraskar/django,dsanders11/django,matiasb/django,hynekcer/django,double-y/django,ryanahall/django,pauloxnet/django,rynomster/django,kholidfu/django,github-account-because-they-want-it/django,RossBrunton/django,manhhomienbienthuy/django,frishberg/django,blindroot/django,cainmatt/django,willhardy/django,xwolf12/django,fenginx/django,ytjiang/django,MarkusH/django,fafaman/django,lsqtongxin/django,doismellburning/django,kevintaw/django,bak1an/django,liavkoren/djangoDev,ericfc/django,delinhabit/django,waytai/django,mlavin/django,robhudson/django,BlindHunter/django,sephii/django,simone/django-gb,hackerbot/DjangoDev,karyon/django,zulip/django,gchp/django,dydek/django,darjeeling/django,devops2014/djangosite,kangfend/django,szopu/django,iambibhas/django,blueyed/django,phalt/django,rockneurotiko/django,jsoref/django,simonw/django,hkchenhongyi/django,andreif/django,elena/django,DasIch/django,dfdx2/django,willharris/django,adamchainz/django,jnovinger/django,intgr/django,atul-bhouraskar/django,oinopion/django,oberlin/django,errx/django,akaariai/django,KokareIITP/django,koordinates/django,varunnaganathan/django,hnakamur/django,reinout/django,kisna72/django,oberlin/django,donkirkby/django,elkingtonmcb/django,EmadMokhtar/Django,jvkops/django,chyeh727/django,ticosax/django,mttr/django,petecummings/django,alexmorozov/django,evansd/django,Argon-Zhou/django,alilotfi/django,marctc/django,Leila20/django,ABaldwinHunter/django-clone,gohin/django,jasonwzhy/django,ASCrookes/django,intgr/django,willhardy/django,dwightgunning/django,gchp/django,ajoaoff/django,reinout/django,himleyb85/django,tbeadle/django,jhoos/django,simone/django-gb,auready/django,mattseymour/django,memtoko/django,zhoulingjun/django,aspidites/django,zerc/django,mitchelljkotler/django,mattseymour/django,blindroot/django,ifduyue/django,beni55/django,Beauhurst/django,1013553207/django,erikr/django,saydulk/django,Mixser/django,jasonbot/django,aidanlister/django,zhoulingjun/django,AndrewGrossman/django,divio/django,wetneb/django,ABaldwinHunter/django-clone-classic,aisipos/django,vincepandolfo/django,roselleebarle04/django,devops2014/djangosite,crazy-canux/django,moreati/django,monetate/django,Sonicbids/django,savoirfairelinux/django,krishna-pandey-git/django,marctc/django,Balachan27/django,coldmind/django,zsiciarz/django,ajaali/django,iambibhas/django,zsiciarz/django,Korkki/django,curtisstpierre/django,Beauhurst/django,HonzaKral/django,mrbox/django,ryangallen/django,alexmorozov/django,frePPLe/django,mbox/django,matiasb/django,ataylor32/django,hobarrera/django,poiati/django,quamilek/django,h4r5h1t/django-hauthy,frdb194/django,aerophile/django,phalt/django,RevelSystems/django,rynomster/django,SebasSBM/django,ebar0n/django,ghickman/django,sergei-maertens/django,double-y/django,xrmx/django,mitchelljkotler/django,andela-ifageyinbo/django,kutenai/django,sergei-maertens/django,sjlehtin/django,BMJHayward/django,krishna-pandey-git/django,jhg/django,shtouff/django,nielsvanoch/django,ojake/django,dsanders11/django,sdcooke/django,haxoza/django,oscaro/django,tomchristie/django,robhudson/django,NullSoldier/django,JorgeCoock/django,kosz85/django,TimBuckley/effective_django,SoftwareMaven/django,ghickman/django,ebar0n/django,gengue/django,ataylor32/django,andyzsf/django,yask123/django,dsanders11/django,harisibrahimkv/django,feroda/django,gohin/django,stevenewey/django,huang4fstudio/django,Nepherhotep/django,hassanabidpk/django,lwiecek/django,curtisstpierre/django,simonw/django,jarshwah/django,ASCrookes/django,MounirMesselmeni/django,bspink/django,rtindru/django,timgraham/django,lunafeng/django,crazy-canux/django,avanov/django,wetneb/django,willharris/django,mjtamlyn/django,WSDC-NITWarangal/django,aisipos/django,crazy-canux/django,Anonymous-X6/django,freakboy3742/django,helenst/django,webgeodatavore/django,dhruvagarwal/django,wsmith323/django,ABaldwinHunter/django-clone-classic,andyzsf/django,timgraham/django,GhostThrone/django,extremewaysback/django,lwiecek/django,zhoulingjun/django,rapilabs/django,zerc/django,jn7163/django,DONIKAN/django,jenalgit/django,jdelight/django,sadaf2605/django,seanwestfall/django,labcodes/django,epandurski/django,wkschwartz/django,tysonclugg/django,Matt-Deacalion/django,druuu/django,schinckel/django,epandurski/django,claudep/django,duqiao/django,wsmith323/django,Y3K/django,denys-duchier/django,andela-ifageyinbo/django,raphaelmerx/django,ASCrookes/django,ataylor32/django,cainmatt/django,adelton/django,denys-duchier/django,WillGuan105/django,googleinterns/django,sopier/django,Anonymous-X6/django,synasius/django,shownomercy/django,lsqtongxin/django,phalt/django,mrfuxi/django,jarshwah/django,elky/django,piquadrat/django,ArnossArnossi/django,coldmind/django,auvipy/django,saydulk/django,BMJHayward/django,JavML/django,avneesh91/django,auvipy/django,github-account-because-they-want-it/django,shaib/django,fafaman/django,zanderle/django,pipermerriam/django | django/__init__.py | django/__init__.py | VERSION = (1, 7, 0, 'alpha', 1)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
def setup():
"""
Configure the settings (this happens as a side effect of accessing the
first setting), configure logging and populate the app registry.
"""
from django.apps import apps
from django.conf import settings
from django.utils.log import configure_logging
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
apps.populate(settings.INSTALLED_APPS)
| VERSION = (1, 7, 0, 'alpha', 0)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
def setup():
"""
Configure the settings (this happens as a side effect of accessing the
first setting), configure logging and populate the app registry.
"""
from django.apps import apps
from django.conf import settings
from django.utils.log import configure_logging
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
apps.populate(settings.INSTALLED_APPS)
| bsd-3-clause | Python |
e494e38b28fbafc70a1e5315a780d64e315113b4 | Make the way chameleon settings are defined more generic; any Chameleon setting can now be in the chameleon config section. | morepath/more.chameleon | more/chameleon/main.py | more/chameleon/main.py | import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(path, original_render, settings):
config = settings.chameleon.__dict__
template = chameleon.PageTemplateFile(path, **config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
| import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {
'auto_reload': False
}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(path, original_render, settings):
config = {'auto_reload': settings.chameleon.auto_reload}
template = chameleon.PageTemplateFile(path, **config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
| bsd-3-clause | Python |
b88abd98834529f1342d69e2e91b79efd68e5e8d | Add get parameter parsing for fakeshibboleth auto mode | uclapi/uclapi,uclapi/uclapi,uclapi/uclapi,uclapi/uclapi | backend/uclapi/dashboard/middleware/fake_shibboleth_middleware.py | backend/uclapi/dashboard/middleware/fake_shibboleth_middleware.py | from django.utils.deprecation import MiddlewareMixin
class FakeShibbolethMiddleWare(MiddlewareMixin):
def process_request(self, request):
if request.POST.get("convert-post-headers") == "1":
for key in request.POST:
request.META[key] = request.POST[key]
if request.GET.get("convert-get-headers") == "1":
for key in request.GET:
http_key = key.upper()
http_key.replace("-", "_")
http_key = "HTTP_" + http_key
request.META[http_key] = request.GET[key]
| from django.utils.deprecation import MiddlewareMixin
class FakeShibbolethMiddleWare(MiddlewareMixin):
def process_request(self, request):
if request.POST.get("convert-post-headers") == "1":
for key in request.POST:
request.META[key] = request.POST[key]
| mit | Python |
7f7e3eb729be0c204b54c1fb11d92986abdf00e1 | fix version for python 3 | msune/dpdk,john-mcnamara-intel/dpdk,msune/dpdk,tsphillips/dpdk-fork,venkynv/dpdk-mirror,msune/dpdk,mixja/dpdk,john-mcnamara-intel/dpdk,john-mcnamara-intel/dpdk,tsphillips/dpdk-fork,mixja/dpdk,venkynv/dpdk-mirror,john-mcnamara-intel/dpdk,venkynv/dpdk-mirror,mixja/dpdk,msune/dpdk,venkynv/dpdk-mirror,tsphillips/dpdk-fork,tsphillips/dpdk-fork,mixja/dpdk | doc/guides/conf.py | doc/guides/conf.py | # BSD LICENSE
# Copyright(c) 2010-2015 Intel Corporation. All rights reserved.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import subprocess
project = 'DPDK'
html_show_copyright = False
version = subprocess.check_output(['make', '-sRrC', '../../', 'showversion']).decode('utf-8')
release = version
master_doc = 'index'
latex_documents = [
('index',
'doc.tex',
'',
'',
'manual')
]
| # BSD LICENSE
# Copyright(c) 2010-2015 Intel Corporation. All rights reserved.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import subprocess
project = 'DPDK'
html_show_copyright = False
version = subprocess.check_output(['make', '-sRrC', '../../', 'showversion'])
release = version
master_doc = 'index'
latex_documents = [
('index',
'doc.tex',
'',
'',
'manual')
]
| mit | Python |
063d3b0b531dd6e59181094eccc3f88085fc988a | Add option to update newsletter subscription. | geotagx/pybossa,jean/pybossa,stefanhahmann/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,Scifabric/pybossa,geotagx/pybossa,OpenNewsLabs/pybossa,PyBossa/pybossa,OpenNewsLabs/pybossa,inteligencia-coletiva-lsd/pybossa,PyBossa/pybossa,Scifabric/pybossa,stefanhahmann/pybossa | pybossa/newsletter/__init__.py | pybossa/newsletter/__init__.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""PyBossa module for subscribing users to Mailchimp lists."""
import mailchimp
class Newsletter(object):
"""Newsletter class to handle mailchimp subscriptions."""
def __init__(self, app=None):
"""Init method for flask extensions."""
self.app = app
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
"""Configure newsletter Mailchimp client."""
self.app = app
self.client = mailchimp.Mailchimp(app.config.get('MAILCHIMP_API_KEY'))
self.list_id = app.config.get('MAILCHIMP_LIST_ID')
def subscribe_user(self, user, list_id=None, old_email=None):
"""Subscribe, update a user of a mailchimp list."""
try:
update_existing = False
if list_id is None:
list_id = self.list_id
merge_vars = {'FNAME': user.fullname}
if old_email:
email = {'email': old_email}
merge_vars['new-email'] = user.email_addr
update_existing = True
else:
email = {'email': user.email_addr}
merge_vars['email'] = user.email_addr
self.client.lists.subscribe(list_id, email, merge_vars,
update_existing=update_existing)
except mailchimp.Error, e:
msg = 'MAILCHIMP: An error occurred: %s - %s' % (e.__class__, e)
self.app.logger.error(msg)
| # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""PyBossa module for subscribing users to Mailchimp lists."""
import mailchimp
class Newsletter(object):
"""Newsletter class to handle mailchimp subscriptions."""
def __init__(self, app=None):
"""Init method for flask extensions."""
self.app = app
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
"""Configure newsletter Mailchimp client."""
self.app = app
self.client = mailchimp.Mailchimp(app.config.get('MAILCHIMP_API_KEY'))
self.list_id = app.config.get('MAILCHIMP_LIST_ID')
def subscribe_user(self, user, list_id=None):
"""Subscribe a user to a mailchimp list."""
try:
if list_id is None:
list_id = self.list_id
self.client.lists.subscribe(list_id, {'email': user.email_addr},
{'FNAME': user.fullname})
except mailchimp.Error, e:
msg = 'MAILCHIMP: An error occurred: %s - %s' % (e.__class__, e)
self.app.logger.error(msg)
| agpl-3.0 | Python |
595fd1627095d1945f3826c1f7b6223458f14c21 | Add additional tests to TestCharacter | BeagleInc/PyReadableDiff | pydiff/tests/test_character.py | pydiff/tests/test_character.py | import pydiff
import utils
class TestCharacter(utils.TestBase):
def setUp(self):
super(TestCharacter, self).setUp(pydiff.CharacterDiff)
def test_diff_characters_general(self):
self.check_xml('New Value.', 'New ValueMoreData.',
'New Value<ins>MoreData</ins>.')
self.check_xml('New ValueMoreData.', 'New Value.',
'New Value<del>MoreData</del>.')
self.check_xml(' helloworld ', 'Hello, world',
'<del> h</del><ins>H</ins>ello<ins>, </ins>world<del> </del>')
self.check_xml('restaura', 'aurant',
'<del>rest</del>aura<ins>nt</ins>')
def test_diff_characters_corner_cases(self):
self.check_xml('New Value', 'New Value', 'New Value')
self.check_xml('', '', '')
self.check_xml('New Value', '', '<del>New Value</del>')
self.check_xml('', 'New Value', '<ins>New Value</ins>')
self.check_xml('abc', 'xyz', '<del>abc</del><ins>xyz</ins>')
def test_diff_characters_additional(self):
self.check_changes(
'y' + 'a' * 1000 + 'x' + 'b' * 1000,
'a' * 1000 + 'y' + 'b' * 1000 + 'x',
[{'count': 1, 'value': 'y', 'removed': True, 'added': None},
{'count': 1000, 'value': 'a' * 1000},
{'count': 1, 'value': 'x', 'removed': True, 'added': None},
{'count': 1, 'value': 'y', 'removed': None, 'added': True},
{'count': 1000, 'value': 'b' * 1000},
{'count': 1, 'value': 'x', 'removed': None, 'added': True}]
)
| import pydiff
import utils
class TestCharacter(utils.TestBase):
def setUp(self):
super(TestCharacter, self).setUp(pydiff.CharacterDiff)
def test_diff_characters(self):
self.check_xml('New Value.', 'New ValueMoreData.',
'New Value<ins>MoreData</ins>.')
self.check_xml('New ValueMoreData.', 'New Value.',
'New Value<del>MoreData</del>.')
self.check_xml(' helloworld ', 'Hello, world',
'<del> h</del><ins>H</ins>ello<ins>, </ins>world<del> </del>')
self.check_xml('restaura', 'aurant',
'<del>rest</del>aura<ins>nt</ins>')
# Some corner cases
self.check_xml('New Value', 'New Value', 'New Value')
self.check_xml('', '', '')
self.check_xml('New Value', '', '<del>New Value</del>')
self.check_xml('', 'New Value', '<ins>New Value</ins>')
self.check_xml('abc', 'xyz', '<del>abc</del><ins>xyz</ins>')
| apache-2.0 | Python |
ceae688f8428f109e8bc0ce3a9dde332caa4ef01 | Improve __unicode__ string | GotlingSystem/apnea,GotlingSystem/apnea | src/apps/dive_log/models.py | src/apps/dive_log/models.py | # coding=utf-8
from django.db import models
from django.utils.translation import ugettext as _
from discipline.models import Discipline
class Session(models.Model):
#pool = models.ForeignKey(Pool)
date = models.DateField(verbose_name=_(u'Datum'))
time = models.TimeField(verbose_name=_(u'Tid'))
comment = models.CharField(verbose_name=_(u'Kommentar'), max_length=512, blank=True)
class Meta:
verbose_name = _(u'Session')
verbose_name_plural = _(u'Sessioner')
ordering = ['date', 'time']
def __unicode__(self):
return "{} {}".format(self.date, self.time)
class Dive(models.Model):
session = models.ForeignKey(Session)
discipline = models.ForeignKey(Discipline, verbose_name=_(u'Disciplin'), null=True, blank=True)
rest_duration = models.DurationField(_(u'Vila'), null=True, blank=True)
start = models.TimeField(null=True, blank=True)
dive_duration = models.DurationField(_(u'Dyktid'), null=True, blank=True)
distance = models.IntegerField(_(u'Distans'), help_text=_(u'i meter'), null=True)
temperature = models.IntegerField(_(u'Temperatur'), help_text=_(u'i celsius'), null=True, blank=True)
comment = models.CharField(_(u'Kommentar'), max_length=512, blank=True)
# TODO: Tag migrations broken with Django 1.7.2 https://github.com/alex/django-taggit/issues/285
#tags = TaggableManager(verbose_name=_(u'Taggar'), blank=True)
class Meta:
verbose_name = _(u'Dyk')
verbose_name_plural = _(u'Dyk')
ordering = ['id']
def __unicode__(self):
if self.discipline:
return "{} - {}".format(self.discipline.abbreviation, str(self.dive_duration))
else:
return str(self.dive_duration)
class DataPoint(models.Model):
dive = models.ForeignKey(Dive)
second = models.IntegerField(verbose_name=_(u'Sekund'))
depth = models.DecimalField(verbose_name=_(u'Djup'), decimal_places=1, max_digits=4, null=True, blank=True)
temperature = models.DecimalField(verbose_name=_(u'Temperatur'), decimal_places=1, max_digits=3, null=True, blank=True)
heart_rate = models.IntegerField(verbose_name=_(u'Puls'), null=True, blank=True)
class Meta:
verbose_name = _(u'Datapunkt')
verbose_name_plural = _(u'Datapunkter')
ordering = ['second']
def __unicode__(self):
return u'{} - {} m'.format(self.second, self.depth) | # coding=utf-8
from django.db import models
from django.utils.translation import ugettext as _
from discipline.models import Discipline
class Session(models.Model):
#pool = models.ForeignKey(Pool)
date = models.DateField(verbose_name=_(u'Datum'))
time = models.TimeField(verbose_name=_(u'Tid'))
comment = models.CharField(verbose_name=_(u'Kommentar'), max_length=512, blank=True)
class Meta:
verbose_name = _(u'Session')
verbose_name_plural = _(u'Sessioner')
ordering = ['date', 'time']
def __unicode__(self):
return "{} {}".format(self.date, self.time)
class Dive(models.Model):
session = models.ForeignKey(Session)
discipline = models.ForeignKey(Discipline, verbose_name=_(u'Disciplin'), null=True, blank=True)
rest_duration = models.DurationField(_(u'Vila'), null=True, blank=True)
start = models.TimeField(null=True, blank=True)
dive_duration = models.DurationField(_(u'Dyktid'), null=True, blank=True)
distance = models.IntegerField(_(u'Distans'), help_text=_(u'i meter'), null=True)
temperature = models.IntegerField(_(u'Temperatur'), help_text=_(u'i celsius'), null=True, blank=True)
comment = models.CharField(_(u'Kommentar'), max_length=512, blank=True)
# TODO: Tag migrations broken with Django 1.7.2 https://github.com/alex/django-taggit/issues/285
#tags = TaggableManager(verbose_name=_(u'Taggar'), blank=True)
class Meta:
verbose_name = _(u'Dyk')
verbose_name_plural = _(u'Dyk')
ordering = ['id']
def __unicode__(self):
if self.discipline:
return "{} - {}".format(self.discipline.abbreviation, str(self.dive_duration))
else:
return str(self.dive_duration)
class DataPoint(models.Model):
dive = models.ForeignKey(Dive)
second = models.IntegerField(verbose_name=_(u'Sekund'))
depth = models.DecimalField(verbose_name=_(u'Djup'), decimal_places=1, max_digits=4, null=True, blank=True)
temperature = models.DecimalField(verbose_name=_(u'Temperatur'), decimal_places=1, max_digits=3, null=True, blank=True)
heart_rate = models.IntegerField(verbose_name=_(u'Puls'), null=True, blank=True)
class Meta:
verbose_name = _(u'Datapunkt')
verbose_name_plural = _(u'Datapunkter')
ordering = ['second']
def __unicode__(self):
return u'{} - {} m - {} C - {} slag/minut' | mit | Python |
5c7f0b8515ca59bc464f81f36dc0f4419530c7b1 | add import_validate api url | praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo | molo/core/content_import/urls.py | molo/core/content_import/urls.py | from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'^api-auth/', include(
'rest_framework.urls', namespace='rest_framework')),
url(
r'^repos/$',
'molo.core.content_import.views.get_repos',
name='get_repos'
),
url(
r'^repos/(?P<name>[\w-]+)/$',
'molo.core.content_import.views.get_repo_languages',
name='get_repo_languages'
),
url(
r'^repos/(?P<name>[\w-]+)/import/',
'molo.core.content_import.views.import_content',
name='import_content'
),
url(
r'^languages/$',
'molo.core.content_import.views.get_available_languages',
name='get_available_languages'
),
url(
r'^repos/(?P<name>[\w-]+)/validate/',
'molo.core.content_import.views.import_validate',
name='import_validate'
),
)
| from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'^api-auth/', include(
'rest_framework.urls', namespace='rest_framework')),
url(
r'^repos/$',
'molo.core.content_import.views.get_repos',
name='get_repos'
),
url(
r'^repos/(?P<name>[\w-]+)/$',
'molo.core.content_import.views.get_repo_languages',
name='get_repo_languages'
),
url(
r'^repos/(?P<name>[\w-]+)/import/',
'molo.core.content_import.views.import_content',
name='import_content'
),
url(
r'^languages/$',
'molo.core.content_import.views.get_available_languages',
name='get_available_languages'
),
)
| bsd-2-clause | Python |
74650b7cad318403e0d6c75ec693962c1124fe16 | Allow user to set speeds | rzzzwilson/morse,rzzzwilson/morse | morse_trainer/test_send_morse.py | morse_trainer/test_send_morse.py | #!/bin/env python3
# -*- coding: utf-8 -*-
"""
Test the 'send_morse' module.
"""
import sys
import os
import getopt
from send_morse import SendMorse
# get program name from sys.argv
prog_name = sys.argv[0]
if prog_name.endswith('.py'):
prog_name = prog_name[:-3]
def usage(msg=None):
if msg:
print(('*'*80 + '\n%s\n' + '*'*80) % msg)
print("\n"
"CLI program to send morse strings from CLI input.\n\n"
"Usage: %s [-h] [-s c,w]\n\n"
"where -h means print this help and stop\n"
" -s c,w means set char and word speeds" % prog_name)
# parse the CLI params
argv = sys.argv[1:]
try:
(opts, args) = getopt.getopt(argv, 'hs:', ['help', '--speed='])
except getopt.GetoptError as err:
usage(err)
sys.exit(1)
cwpm = 25
wpm = 15
for (opt, param) in opts:
if opt in ['-h', '--help']:
usage()
sys.exit(0)
elif opt in ['-s', '--speed']:
speeds = param.split(',')
if len(speeds) != 2:
usage('-s option must be followed by two speeds, eg: -s 10,5')
(cwpm, wpm) = speeds
cwpm = int(cwpm)
wpm = int(wpm)
morse = SendMorse()
morse.set_speeds(cwpm=cwpm, wpm=wpm)
(cwpm, wpm) = morse.get_speeds()
prompt = '%d/%d> ' % (cwpm, wpm)
while True:
try:
code = input(prompt)
except (EOFError, KeyboardInterrupt):
sys.exit(0)
if not code:
break
morse.send(code)
| #!/bin/env python3
# -*- coding: utf-8 -*-
"""
Test the 'send_morse' module.
"""
import sys
import os
import getopt
from send_morse import SendMorse
# get program name from sys.argv
prog_name = sys.argv[0]
if prog_name.endswith('.py'):
prog_name = prog_name[:-3]
def usage(msg=None):
if msg:
print(('*'*80 + '\n%s\n' + '*'*80) % msg)
print("\n"
"CLI program to send morse strings from CLI input.\n\n"
"Usage: %s [-h]\n\n"
"where -h means priont this help and stop" % prog_name)
# parse the CLI params
argv = sys.argv[1:]
try:
(opts, args) = getopt.getopt(argv, 'h', ['help'])
except getopt.GetoptError as err:
usage(err)
sys.exit(1)
for (opt, param) in opts:
if opt in ['-h', '--help']:
usage()
sys.exit(0)
morse = SendMorse()
cwpm = 25
wpm = 15
morse.set_speeds(cwpm=cwpm, wpm=wpm)
(cwpm, wpm) = morse.get_speeds()
prompt = '%d/%d> ' % (cwpm, wpm)
while True:
try:
code = input(prompt)
except (EOFError, KeyboardInterrupt):
sys.exit(0)
if not code:
break
morse.send(code)
| mit | Python |
89fb6886752170520a8dad800bdab37cf77daf6c | Test the handling of non-JSON payloads | python/the-knights-who-say-ni,python/the-knights-who-say-ni | ni/test/test_github.py | ni/test/test_github.py | import asyncio
import unittest
from aiohttp import hdrs, web
from .. import github
# Inheriting from web.Request is bad as the docs explicitly say not to create
# instances manually.
# http://aiohttp.readthedocs.org/en/stable/web_reference.html#aiohttp.web.Request
class FakeRequest:
def __init__(self, payload={}, content_type='application/json'):
self.content_type = content_type
self._payload = payload
async def json(self):
return self._payload
class GitHubTests(unittest.TestCase):
acceptable = {github.PullRequestEvent.opened,
github.PullRequestEvent.unlabeled,
github.PullRequestEvent.synchronize}
def run_awaitable(self, coroutine):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
return loop.run_until_complete(coroutine)
def test_bad_content_type(self):
# Only accept 'application/json'.
# https://developer.github.com/webhooks/creating/#content-type
request = FakeRequest(content_type='application/x-www-form-urlencoded')
result = self.run_awaitable(github.Host.process(request))
self.assertIsInstance(result, web.StreamResponse)
self.assertEqual(result.status, 415)
def test_ping(self):
# GitHub can ping a webhook to verify things are set up.
# https://developer.github.com/webhooks/#ping-event
payload = {'zen': 'something pithy'}
result = self.run_awaitable(github.Host.process(FakeRequest(payload)))
self.assertIsInstance(result, web.StreamResponse)
self.assertEqual(result.status, 204)
def test_process_skipping(self):
# Only create a ContibHost object if the PR is opened, unlabeled, or
# synchronized.
for event in github.PullRequestEvent:
if event in self.acceptable:
continue
payload = {'action': event.value}
request = FakeRequest(payload)
result = self.run_awaitable(github.Host.process(request))
self.assertIsInstance(result, web.StreamResponse)
self.assertEqual(result.status, 204)
@unittest.skip('not implemented')
def test_process_opened(self):
...
@unittest.skip('not implemented')
def test_process_unlabeled(self):
...
@unittest.skip('not implemented')
def test_process_synchronize(self):
... | import asyncio
import unittest
from aiohttp import hdrs, web
from .. import github
# Inheriting from web.Request is bad as the docs explicitly say not to create
# instances manually.
# http://aiohttp.readthedocs.org/en/stable/web_reference.html#aiohttp.web.Request
class FakeRequest:
def __init__(self, payload, content_type='application/json'):
self.content_type = content_type
self._payload = payload
async def json(self):
return self._payload
class GitHubTests(unittest.TestCase):
acceptable = {github.PullRequestEvent.opened,
github.PullRequestEvent.unlabeled,
github.PullRequestEvent.synchronize}
def run_coroutine(self, coroutine):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
return loop.run_until_complete(coroutine)
@unittest.skip('not implemented')
def test_bad_content_type(self):
# Only accept 'application/json'.
...
def test_ping(self):
# GitHub can ping a webhook to verify things are set up.
# https://developer.github.com/webhooks/#ping-event
payload = {'zen': 'something pithy'}
result = self.run_coroutine(github.Host.process(FakeRequest(payload)))
self.assertIsInstance(result, web.StreamResponse)
self.assertEqual(result.status, 204)
def test_process_skipping(self):
# Only create a ContibHost object if the PR is opened, unlabeled, or
# synchronized.
for event in github.PullRequestEvent:
if event in self.acceptable:
continue
payload = {'action': event.value}
request = FakeRequest(payload)
result = self.run_coroutine(github.Host.process(request))
self.assertIsInstance(result, web.StreamResponse)
self.assertEqual(result.status, 204)
@unittest.skip('not implemented')
def test_process_opened(self):
...
@unittest.skip('not implemented')
def test_process_unlabeled(self):
...
@unittest.skip('not implemented')
def test_process_synchronize(self):
... | apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.