commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
0e46b47a3053e63f50d6fd90b1ba810e4694c9be | Implement system configurations load from file. | 10nin/blo,10nin/blo | blo/__init__.py | blo/__init__.py | import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self.template_dir = config['TEMPLATE']['TEMPLATE_DIR']
self.db_file_path = config['DB']['DB_PATH']
# create tables
self.db_control = DBControl(self.db_file_path)
self.db_control.create_tables()
self.db_control.close_connect()
def insert_article(self, file_path):
self.db_control = DBControl(self.db_file_path)
article = BloArticle(self.template_dir)
article.load_from_file(file_path)
self.db_control.insert_article(article)
self.db_control.close_connect()
| from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, db_file_path, template_dir=""):
self.template_dir = template_dir
# create tables
self.db_file_path = db_file_path
self.db_control = DBControl(self.db_file_path)
self.db_control.create_tables()
self.db_control.close_connect()
def insert_article(self, file_path):
self.db_control = DBControl(self.db_file_path)
article = BloArticle(self.template_dir)
article.load_from_file(file_path)
self.db_control.insert_article(article)
self.db_control.close_connect()
| mit | Python |
5576ad27979a4143c2194e1ba2ab47a007e42ea9 | update utils/add_articles.py | longjj/BeeBlog,longjj/BeeBlog | utils/add_articles.py | utils/add_articles.py | #!usr/bin/python3
"""
This module is used to assist me to add articles faster.
"""
import os
import yaml
import datetime
import shutil
def modification_date(filename):
t = os.path.getmtime(filename)
return datetime.datetime.fromtimestamp(t)
def add_config(postname):
target_path = '../articles/config'
curr_posts_num = len(os.listdir(target_path))
template = {
'description': '',
'post_id': curr_posts_num + 1,
'tags': [],
'file': postname,
'keywords': [],
'lang': 'zh-cmn-Hans',
'column': -1,
# 'column': 'LeetCode Learning',
}
target = os.path.join(target_path, os.path.splitext(postname)[0]+'.yml')
# print (template)
with open(target, 'w', encoding='utf-8') as outfile:
yaml.dump(template, outfile, default_flow_style=False)
if __name__ == '__main__':
if os.path.isdir('./drafts') is not True:
print ('No drafts dir!')
os.makedirs('./drafts')
exit(1)
drafts = os.listdir('./drafts')
drafts.sort()
for x in drafts:
filepath = os.path.join('./drafts',x)
d = modification_date(filepath)
pdate_str = d.strftime('%Y-%m-%d')
# move the artcle to the articles dir
tmp = x.replace(' ', '-')
tmp = tmp.replace('_', '-')
postname = pdate_str + '-' + tmp
post_path = os.path.join('../articles/posts/', postname)
shutil.move(filepath, post_path)
# add articles config
add_config(postname)
print ('Done!')
| #!usr/bin/python3
"""
This module is used to assist me to add articles faster.
"""
import os
import yaml
import datetime
import shutil
def modification_date(filename):
t = os.path.getmtime(filename)
return datetime.datetime.fromtimestamp(t)
def add_config(postname):
target_path = '../articles/config'
curr_posts_num = len(os.listdir(target_path))
template = {
'description': '',
'post_id': curr_posts_num + 1,
'tags': [],
'file': postname,
'keywords': [],
'lang': 'zh-cmn-Hans',
'column': -1,
}
target = os.path.join(target_path, os.path.splitext(postname)[0]+'.yml')
# print (template)
with open(target, 'w', encoding='utf-8') as outfile:
yaml.dump(template, outfile, default_flow_style=False)
if __name__ == '__main__':
if os.path.isdir('./drafts') is not True:
print ('No drafts dir!')
os.makedirs('./drafts')
exit(1)
drafts = os.listdir('./drafts')
drafts.sort()
for x in drafts:
filepath = os.path.join('./drafts',x)
d = modification_date(filepath)
pdate_str = d.strftime('%Y-%m-%d')
# move the artcle to the articles dir
tmp = x.replace(' ', '-')
tmp = tmp.replace('_', '-')
postname = pdate_str + '-' + tmp
post_path = os.path.join('../articles/posts/', postname)
shutil.move(filepath, post_path)
# add articles config
add_config(postname)
print ('Done!')
| mit | Python |
d15d9c49183af21f020750475427c51e611ade4a | check if there is a token | okfn-brasil/viralata,okfn-brasil/viralata,okfn-brasil/viralata,okfn-brasil/viralata | viralata/utils.py | viralata/utils.py | #!/usr/bin/env python
# coding: utf-8
def decode_validate_token(token, sv, api):
"""This tries to be a general function to decode and validade any token.
Receives a token, a SignerVerifier and an API.
"""
if not token:
api.abort(400, "Error: No token received!")
try:
decoded = sv.decode(token)
# options={"verify_exp": False})
except sv.ExpiredSignatureError:
api.abort(400, "Error: Expired token!")
except:
# TODO: tratar erros... quais são?
raise
# Verify if token has all fields
for fields in ['username', 'type', 'exp']:
if fields not in decoded.keys():
api.abort(400, "Error: Malformed token! No: %s" % fields)
return decoded
def decode_token(token, sv, api):
"""This function tries to decode and valitade a token used by a client micro
service. A client micro service is anyone without knowlegde of revoked main
tokens. Because of this, they should only accept micro tokens."""
decoded = decode_validate_token(token, sv, api)
if decoded['type'] != 'micro':
api.abort(400, "Error: This is not a micro token!")
return decoded
| #!/usr/bin/env python
# coding: utf-8
def decode_validate_token(token, sv, api):
"""This tries to be a general function to decode and validade any token.
Receives a token, a SignerVerifier and an API.
"""
try:
decoded = sv.decode(token)
# options={"verify_exp": False})
except sv.ExpiredSignatureError:
api.abort(400, "Error: Expired token!")
except:
# TODO: tratar erros... quais são?
raise
# Verify if token has all fields
for fields in ['username', 'type', 'exp']:
if fields not in decoded.keys():
api.abort(400, "Error: Malformed token! No: %s" % fields)
return decoded
def decode_token(token, sv, api):
"""This function tries to decode and valitade a token used by a client micro
service. A client micro service is anyone without knowlegde of revoked main
tokens. Because of this, they should only accept micro tokens."""
decoded = decode_validate_token(token, sv, api)
if decoded['type'] != 'micro':
api.abort(400, "Error: This is not a micro token!")
return decoded
| agpl-3.0 | Python |
87ebf8c9c096939e52f35f0b717add165d2288ad | Add query caching for nhmmer | RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode | rnacentral/nhmmer/models.py | rnacentral/nhmmer/models.py | """
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from caching.base import CachingManager, CachingMixin # django-cache-machine
from django.db import models
class Query(CachingMixin, models.Model):
id = models.CharField(max_length=36, primary_key=True)
query = models.TextField()
length = models.PositiveIntegerField()
objects = CachingManager()
class Meta:
db_table = 'nhmmer_query'
class Results(CachingMixin, models.Model):
id = models.AutoField(primary_key=True)
query_id = models.CharField(max_length=36, db_index=True)
result_id = models.PositiveIntegerField(db_index=True)
rnacentral_id = models.CharField(max_length=13, null=True)
description = models.TextField(null=True)
bias = models.FloatField(null=True)
query_start = models.PositiveIntegerField(null=True)
query_end = models.PositiveIntegerField(null=True)
target_start = models.PositiveIntegerField(null=True)
target_end = models.PositiveIntegerField(null=True)
target_length = models.PositiveIntegerField(null=True)
alignment = models.TextField(null=True)
score = models.FloatField(null=True)
e_value = models.FloatField(null=True)
objects = CachingManager()
class Meta:
db_table = 'nhmmer_results'
| """
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.db import models
class Query(models.Model):
id = models.CharField(max_length=36, primary_key=True)
query = models.TextField()
length = models.PositiveIntegerField()
class Meta:
db_table = 'nhmmer_query'
class Results(models.Model):
id = models.AutoField(primary_key=True)
query_id = models.CharField(max_length=36, db_index=True)
result_id = models.PositiveIntegerField(db_index=True)
rnacentral_id = models.CharField(max_length=13, null=True)
description = models.TextField(null=True)
bias = models.FloatField(null=True)
query_start = models.PositiveIntegerField(null=True)
query_end = models.PositiveIntegerField(null=True)
target_start = models.PositiveIntegerField(null=True)
target_end = models.PositiveIntegerField(null=True)
target_length = models.PositiveIntegerField(null=True)
alignment = models.TextField(null=True)
score = models.FloatField(null=True)
e_value = models.FloatField(null=True)
class Meta:
db_table = 'nhmmer_results'
| apache-2.0 | Python |
e616538202111d823cb23941bd8e0fa0ecb4f052 | Read the docs. | morganbengtsson/mos | doc/source/conf.py | doc/source/conf.py | import sys
import os
import subprocess
#Read the docs
#read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True'
#if read_the_docs_build:
# subprocess.call('cd ../; make xml', shell=True)
def run_doxygen(folder):
"""Run the doxygen make command in the designated folder"""
try:
retcode = subprocess.call("cd %s; make xml" % folder, shell=True)
if retcode < 0:
sys.stderr.write("doxygen terminated by signal %s" % (-retcode))
except OSError as e:
sys.stderr.write("doxygen execution failed: %s" % e)
def generate_doxygen_xml(app):
"""Run the doxygen make commands if we're on the ReadTheDocs server"""
read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True'
if read_the_docs_build:
run_doxygen("../")
# -- General configuration ------------------------------------------------
extensions = ["breathe"]
breathe_projects = {"MOS": "../build/xml/"}
breate_default_project = "MOS"
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'MOS'
copyright = '2015, Morgan Bengtsson'
version = '1.0.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
exclude_patterns = []
pygments_style = 'sphinx'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'MOSdoc'
| import sys
import os
import subprocess
#Read the docs
#read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True'
#if read_the_docs_build:
# subprocess.call('cd ../; make xml', shell=True)
def run_doxygen(folder):
"""Run the doxygen make command in the designated folder"""
try:
retcode = subprocess.call("cd %s; make xml" % folder, shell=True)
if retcode < 0:
sys.stderr.write("doxygen terminated by signal %s" % (-retcode))
except OSError as e:
sys.stderr.write("doxygen execution failed: %s" % e)
def generate_doxygen_xml(app):
"""Run the doxygen make commands if we're on the ReadTheDocs server"""
read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True'
if read_the_docs_build:
run_doxygen("../../examples/doxygen")
# -- General configuration ------------------------------------------------
extensions = ["breathe"]
breathe_projects = {"MOS": "../build/xml/"}
breate_default_project = "MOS"
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'MOS'
copyright = '2015, Morgan Bengtsson'
version = '1.0.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
exclude_patterns = []
pygments_style = 'sphinx'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'MOSdoc'
| mit | Python |
91d24b3ce272ff166d1e828f0822e7b9a0124d2c | Fix broken tests after moving _autoconvert to autotype | hkbakke/zfssnap,hkbakke/zfssnap | tests/test_dataset.py | tests/test_dataset.py | import pytest
from zfssnap import autotype, Host, Dataset
import subprocess
PROPERTY_PREFIX = 'zfssnap'
class TestDataset(object):
@pytest.fixture
def fs(self):
fs_name = 'zpool/dataset'
host = Host()
return Dataset(host, fs_name)
@pytest.fixture
def ssh_fs(self):
ssh_user = 'root'
ssh_host = 'host'
fs_name = 'zpool/dataset'
host = Host(ssh_user=ssh_user, ssh_host=ssh_host)
return Dataset(host, fs_name)
def test_autotype_to_int(self):
assert isinstance(autotype('123'), int)
def test_autotype_to_str(self):
assert isinstance(autotype('12f'), str)
def test_return_local_location(self, fs):
assert fs.location == 'zpool/dataset'
def test_return_ssh_location(self, ssh_fs):
assert ssh_fs.location == 'root@host:zpool/dataset' | import pytest
from zfssnap import Host, Dataset
import subprocess
PROPERTY_PREFIX = 'zfssnap'
class TestDataset(object):
@pytest.fixture
def fs(self):
fs_name = 'zpool/dataset'
host = Host()
return Dataset(host, fs_name)
@pytest.fixture
def ssh_fs(self):
ssh_user = 'root'
ssh_host = 'host'
fs_name = 'zpool/dataset'
host = Host(ssh_user=ssh_user, ssh_host=ssh_host)
return Dataset(host, fs_name)
def test_autoconvert_to_int(self):
assert isinstance(Dataset._autoconvert('123'), int)
def test_autoconvert_to_str(self):
assert isinstance(Dataset._autoconvert('12f'), str)
def test_return_local_location(self, fs):
assert fs.location == 'zpool/dataset'
def test_return_ssh_location(self, ssh_fs):
assert ssh_fs.location == 'root@host:zpool/dataset' | mit | Python |
ff71d66c2763adf554e07fdd42ae83437eef1f75 | Add bna.__version__ | Adys/python-bna,jleclanche/python-bna | bna/__init__.py | bna/__init__.py | """
python-bna
Battle.net Authenticator routines in Python.
Specification can be found here:
* <http://bnetauth.freeportal.us/specification.html>
Note: Link likely dead. Check webarchive.
"""
import pkg_resources
from .crypto import get_restore_code, get_token
from .http import HTTPError, get_time_offset, request_new_serial, restore
from .utils import get_otpauth_url, normalize_serial, prettify_serial
__all__ = [
"get_restore_code", "get_token",
"get_time_offset", "HTTPError", "request_new_serial", "restore",
"get_otpauth_url", "normalize_serial", "prettify_serial",
]
__version__ = pkg_resources.require("bna")[0].version
| """
python-bna
Battle.net Authenticator routines in Python.
Specification can be found here:
* <http://bnetauth.freeportal.us/specification.html>
Note: Link likely dead. Check webarchive.
"""
from .crypto import get_restore_code, get_token
from .http import get_time_offset, HTTPError, request_new_serial, restore
from .utils import get_otpauth_url, normalize_serial, prettify_serial
__all__ = [
"get_restore_code", "get_token",
"get_time_offset", "HTTPError", "request_new_serial", "restore",
"get_otpauth_url", "normalize_serial", "prettify_serial",
]
| mit | Python |
4aae64f59707d0fdedb4bf729655c735f2c751a3 | refactor unit test for ensembl service | Proteogenomics/trackhub-creator,Proteogenomics/trackhub-creator | tests/test_ensembl.py | tests/test_ensembl.py | #
# Author : Manuel Bernal Llinares
# Project : trackhub-creator
# Timestamp : 03-07-2017 11:51
# ---
# © 2017 Manuel Bernal Llinares <mbdebian@gmail.com>
# All rights reserved.
#
"""
Unit tests for Ensembl module
"""
import unittest
# App modules
import main_app
import config_manager
import ensembl.service
class TestEnsemblService(unittest.TestCase):
__CONFIG_FILE_NAME = "config_ensembl_module.json"
def test_test(self):
"""
This test has been used just for setting up the unit testing subsystem.
It always passes.
:return: no return value
"""
pass
def test_get_ensembl_current_release(self):
service = ensembl.service.get_service()
current_release_number = service.get_release_number()
print("Current release number ---> {}".format(current_release_number))
if __name__ == '__main__':
main_app.app_bootstrap()
unittest.main()
| #
# Author : Manuel Bernal Llinares
# Project : trackhub-creator
# Timestamp : 03-07-2017 11:51
# ---
# © 2017 Manuel Bernal Llinares <mbdebian@gmail.com>
# All rights reserved.
#
"""
Unit tests for Ensembl module
"""
import unittest
# App modules
import main_app
import config_manager
from ensembl.service import Service as EnsemblService
class TestEnsemblService(unittest.TestCase):
__CONFIG_FILE_NAME = "config_ensembl_module.json"
def test_test(self):
"""
This test has been used just for setting up the unit testing subsystem.
It always passes.
:return: no return value
"""
pass
def test_get_ensembl_current_release(self):
service = EnsemblService(config_manager.read_config_from_file(self.__CONFIG_FILE_NAME), self.__CONFIG_FILE_NAME)
current_release_number = service.get_release_number()
print("Current release number ---> {}".format(current_release_number))
if __name__ == '__main__':
main_app.app_bootstrap()
unittest.main()
| apache-2.0 | Python |
bc2395eb473a203d11df52d48968b6ab61e2c95e | Fix keyring issue where there were name space problems | varunarya10/python-openstackclient,openstack/python-openstackclient,openstack/python-openstackclient,BjoernT/python-openstackclient,dtroyer/python-openstackclient,metacloud/python-openstackclient,varunarya10/python-openstackclient,metacloud/python-openstackclient,dtroyer/python-openstackclient,redhat-openstack/python-openstackclient,BjoernT/python-openstackclient,derekchiang/python-openstackclient,derekchiang/python-openstackclient,redhat-openstack/python-openstackclient | openstackclient/common/openstackkeyring.py | openstackclient/common/openstackkeyring.py | # Copyright 2011-2013 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Keyring backend for Openstack, to store encrypted password in a file."""
from Crypto.Cipher import AES
import keyring
import os
KEYRING_FILE = os.path.join(os.path.expanduser('~'), '.openstack-keyring.cfg')
class OpenstackKeyring(keyring.backends.file.BaseKeyring):
"""Openstack Keyring to store encrypted password."""
filename = KEYRING_FILE
def supported(self):
"""Applicable for all platforms, but not recommend."""
pass
def _init_crypter(self):
"""Initialize the crypter using the class name."""
block_size = 32
padding = '0'
# init the cipher with the class name, upto block_size
password = __name__[block_size:]
password = password + (block_size - len(password) %
block_size) * padding
return AES.new(password, AES.MODE_CFB)
def encrypt(self, password):
"""Encrypt the given password."""
crypter = self._init_crypter()
return crypter.encrypt(password)
def decrypt(self, password_encrypted):
"""Decrypt the given password."""
crypter = self._init_crypter()
return crypter.decrypt(password_encrypted)
def os_keyring():
"""Initialize the openstack keyring."""
ring = 'openstackclient.common.openstackkeyring.OpenstackKeyring'
return keyring.core.load_keyring(None, ring)
| # Copyright 2011-2013 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Keyring backend for Openstack, to store encrypted password in a file."""
from Crypto.Cipher import AES
import keyring
import os
KEYRING_FILE = os.path.join(os.path.expanduser('~'), '.openstack-keyring.cfg')
class OpenstackKeyring(keyring.backends.file.BaseKeyring):
"""Openstack Keyring to store encrypted password."""
filename = KEYRING_FILE
def supported(self):
"""Applicable for all platforms, but not recommend."""
pass
def _init_crypter(self):
"""Initialize the crypter using the class name."""
block_size = 32
padding = '0'
# init the cipher with the class name, upto block_size
password = __name__[block_size:]
password = password + (block_size - len(password) %
block_size) * padding
return AES.new(password, AES.MODE_CFB)
def encrypt(self, password):
"""Encrypt the given password."""
crypter = self._init_crypter()
return crypter.encrypt(password)
def decrypt(self, password_encrypted):
"""Decrypt the given password."""
crypter = self._init_crypter()
return crypter.decrypt(password_encrypted)
def os_keyring():
"""Initialize the openstack keyring."""
keyring = 'openstackclient.common.openstackkeyring.OpenstackKeyring'
return keyring.core.load_keyring(None, keyring)
| apache-2.0 | Python |
ab07caf8c00e8e2047e7c45cde89e8980fde325c | Stop using intersphinx | klmitch/python-glanceclient,alexpilotti/python-glanceclient,varunarya10/python-glanceclient,JioCloud/python-glanceclient,mmasaki/python-glanceclient,klmitch/python-glanceclient,alexpilotti/python-glanceclient,openstack/python-glanceclient,JioCloud/python-glanceclient,mmasaki/python-glanceclient,varunarya10/python-glanceclient,openstack/python-glanceclient | doc/source/conf.py | doc/source/conf.py | # -*- coding: utf-8 -*-
#
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..')))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'oslosphinx']
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'python-glanceclient'
copyright = u'OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
man_pages = [
('man/glance', 'glance', u'Client for OpenStack Images API',
[u'OpenStack Foundation'], 1),
]
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
#html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
(
'index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack Foundation',
'manual'
),
]
| # -*- coding: utf-8 -*-
#
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..')))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'oslosphinx']
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'python-glanceclient'
copyright = u'OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
man_pages = [
('man/glance', 'glance', u'Client for OpenStack Images API',
[u'OpenStack Foundation'], 1),
]
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
#html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
(
'index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack Foundation',
'manual'
),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| apache-2.0 | Python |
d6b6b789a90a49ca74fb2cf3b2349e5722ba9c5e | Fix conf.py for the ReadTheDocs site | stackforge/fuel-plugin-influxdb-grafana,stackforge/fuel-plugin-influxdb-grafana,stackforge/fuel-plugin-influxdb-grafana,stackforge/fuel-plugin-influxdb-grafana | doc/source/conf.py | doc/source/conf.py | extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'The InfluxDB-Grafana plugin for Fuel'
copyright = u'2015, Mirantis Inc.'
version = '0.9'
release = '0.9.0'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
latex_documents = [
('index', 'InfluxDBGrafana.tex', u'The InfluxDB-Grafana plugin for Fuel Documentation',
u'Mirantis Inc.', 'manual'),
]
# make latex stop printing blank pages between sections
# http://stackoverflow.com/questions/5422997/sphinx-docs-remove-blank-pages-from-generated-pdfs
latex_elements = {'classoptions': ',openany,oneside', 'babel':
'\\usepackage[english]{babel}'}
| # Always use the default theme for Readthedocs
RTD_NEW_THEME = True
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'The InfluxDB-Grafana plugin for Fuel'
copyright = u'2015, Mirantis Inc.'
version = '0.9'
release = '0.9.0'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'classic'
html_static_path = ['_static']
latex_documents = [
('index', 'InfluxDBGrafana.tex', u'The InfluxDB-Grafana plugin for Fuel Documentation',
u'Mirantis Inc.', 'manual'),
]
# make latex stop printing blank pages between sections
# http://stackoverflow.com/questions/5422997/sphinx-docs-remove-blank-pages-from-generated-pdfs
latex_elements = {'classoptions': ',openany,oneside', 'babel':
'\\usepackage[english]{babel}'}
| apache-2.0 | Python |
45b4cf15128e0b422a55b905ef936ca24f7cafe5 | Update longest-common-prefix.py | yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,githubutilities/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,githubutilities/LeetCode,githubutilities/LeetCode,jaredkoontz/leetcode,jaredkoontz/leetcode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,githubutilities/LeetCode,jaredkoontz/leetcode,jaredkoontz/leetcode,githubutilities/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,yiwen-luo/LeetCode | Python/longest-common-prefix.py | Python/longest-common-prefix.py | # Time: O(n1 + n2 + ...)
# Space: O(1)
#
# Write a function to find the longest common prefix string amongst an array of strings.
#
class Solution:
# @return a string
def longestCommonPrefix(self, strs):
if not strs:
return ""
longest = strs[0]
for string in strs[1:]:
i = 0
while i < len(string) and i < len(longest) and string[i] == longest[i]:
i += 1
longest = longest[:i]
return longest
if __name__ == "__main__":
print Solution().longestCommonPrefix(["hello", "heaven", "heavy"])
| # Time: O(n1 + n2 + ...)
# Space: O(1)
#
# Write a function to find the longest common prefix string amongst an array of strings.
#
class Solution:
# @return a string
def longestCommonPrefix(self, strs):
if len(strs) == 0:
return ""
longest = strs[0]
for string in strs[1:]:
i = 0
while i < len(string) and i < len(longest) and string[i] == longest[i]:
i += 1
longest = longest[:i]
return longest
if __name__ == "__main__":
print Solution().longestCommonPrefix(["hello", "heaven", "heavy"])
| mit | Python |
eecd2913380717be5b59d290bc933992ed549447 | remove useless test data in test_init_db.py | JING-TIME/ustc-course,JING-TIME/ustc-course,JING-TIME/ustc-course,JING-TIME/ustc-course,JING-TIME/ustc-course | tests/test_init_db.py | tests/test_init_db.py | #!/usr/bin/env python3
# A SQLite database will be created at /tmp/test.db
# If you want to clear the database, just delete /tmp/test.db
import sys
sys.path.append('..') # fix import directory
from app import app,db
from app.models import *
from random import randint
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db'
db.create_all()
| #!/usr/bin/env python3
# Test database creation and basic data insertion
# A SQLite database will be created at /tmp/test.db
# If you want to clear the database, just delete /tmp/test.db
import sys
sys.path.append('..') # fix import directory
from app import app,db
from app.models import *
from random import randint
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db'
db.create_all()
for i in range(1, 10):
Student.create(sno='PB10' + str(randint(100000, 999999)), name='李博杰', dept= '11')
course = Course.create(cno='test'+str(randint(100000,999999)),term='20142',name='线性代数',dept='test')
print(course)
CourseReview.create(author_id=1, course_id=1, rate=4, upvote=2, content='Hello World')
CourseReview.create(author_id=2, course_id=1, rate=4, upvote=2, content='Hello World1')
CourseReview.create(author_id=3, course_id=1, rate=4, upvote=2, content='Hello World2')
CourseReview.create(author_id=4, course_id=1, rate=4, upvote=2, content='Hello World3')
CourseReview.create(author_id=5, course_id=1, rate=4, upvote=2, content='Hello World4')
CourseReview.create(author_id=6, course_id=1, rate=4, upvote=2, content='Hello World5')
try:
uuser_datastore.create_user(email='test@163.com',password='password')
except:
pass
print(Student.query.all())
print()
print(Student.query.filter_by(dept='11').first())
print(Course.query.all())
| agpl-3.0 | Python |
a0a1606d115efd3521ac957aa9a39efec60eda8c | Test the first item of the list, not the last | mollie/mollie-api-python | tests/test_issuers.py | tests/test_issuers.py | from mollie.api.objects.issuer import Issuer
from .utils import assert_list_object
def test_get_issuers(client, response):
"""Get all the iDeal issuers via the include querystring parameter."""
response.get('https://api.mollie.com/v2/methods/ideal?include=issuers', 'method_get_ideal_with_includes')
issuers = client.methods.get('ideal', include='issuers').issuers
assert_list_object(issuers, Issuer)
# check a single retrieved issuer
issuer = next(issuers)
assert issuer.image_svg == 'https://www.mollie.com/external/icons/ideal-issuers/ABNANL2A.svg'
assert issuer.image_size1x == 'https://www.mollie.com/images/checkout/v2/ideal-issuer-icons/ABNANL2A.png'
assert issuer.image_size2x == 'https://www.mollie.com/images/checkout/v2/ideal-issuer-icons/ABNANL2A%402x.png'
assert issuer.name == 'ABN AMRO'
assert issuer.resource == 'issuer'
assert issuer.id == 'ideal_ABNANL2A'
| from mollie.api.objects.issuer import Issuer
from .utils import assert_list_object
def test_get_issuers(client, response):
"""Get all the iDeal issuers via the include querystring parameter."""
response.get('https://api.mollie.com/v2/methods/ideal?include=issuers', 'method_get_ideal_with_includes')
issuers = client.methods.get('ideal', include='issuers').issuers
assert_list_object(issuers, Issuer)
# check the last issuer
assert issuer.image_svg == 'https://www.mollie.com/external/icons/ideal-issuers/FVLBNL22.svg'
assert issuer.image_size1x == 'https://www.mollie.com/images/checkout/v2/ideal-issuer-icons/FVLBNL22.png'
assert issuer.image_size2x == 'https://www.mollie.com/images/checkout/v2/ideal-issuer-icons/FVLBNL22.png'
assert issuer.name == 'van Lanschot'
assert issuer.resource == 'issuer'
assert issuer.id == 'ideal_FVLBNL22'
| bsd-2-clause | Python |
df836309dfe3ae1a6f34e23fd823e0888d883824 | use auth prod | mozilla-iam/cis,mozilla-iam/cis | e2e/test_person_api.py | e2e/test_person_api.py | import boto3
import json
import jsonschema
import http.client
from cis_profile import fake_profile
from cis_profile import WellKnown
client_id_name = '/iam/cis/development/change_client_id'
client_secret_name = '/iam/cis/development/change_service_client_secret'
base_url = 'api.dev.sso.allizom.org'
client = boto3.client('ssm')
def get_secure_parameter(parameter_name):
response = client.get_parameter(
Name=parameter_name,
WithDecryption=True
)
return response['Parameter']['Value']
def get_client_secret():
return get_secure_parameter(client_secret_name)
def get_client_id():
return get_secure_parameter(client_id_name)
def exchange_for_access_token():
conn = http.client.HTTPSConnection("auth.mozilla.auth0.com")
payload_dict = dict(
client_id=get_client_id(),
client_secret=get_client_secret(),
audience="api.dev.sso.allizom.org",
grant_type="client_credentials",
scopes="read:fullprofile"
)
payload = json.dumps(payload_dict)
headers = {'content-type': "application/json"}
conn.request("POST", "/oauth/token", payload, headers)
res = conn.getresponse()
data = json.loads(res.read())
return data['access_token']
def test_paginated_users():
access_token = exchange_for_access_token()
conn = http.client.HTTPSConnection(base_url)
headers = {'authorization': "Bearer {}".format(access_token)}
conn.request("GET", "/v2/users", headers=headers)
res = conn.getresponse()
data = json.loads(res.read())
return data
def test_get_single_user():
access_token = exchange_for_access_token()
conn = http.client.HTTPSConnection(base_url)
headers = {'authorization': "Bearer {}".format(access_token)}
conn.request("GET", "/v2/user/jeffreygreen%40gmail.com", headers=headers)
res = conn.getresponse()
data = json.loads(res.read())
return data
if __name__ == "__main__":
import pprint
print(pprint.pprint(test_paginated_users()))
print(pprint.pprint(test_get_single_user()))
| import boto3
import json
import jsonschema
import http.client
from cis_profile import fake_profile
from cis_profile import WellKnown
client_id_name = '/iam/cis/development/change_client_id'
client_secret_name = '/iam/cis/development/change_service_client_secret'
base_url = 'api.dev.sso.allizom.org'
client = boto3.client('ssm')
def get_secure_parameter(parameter_name):
response = client.get_parameter(
Name=parameter_name,
WithDecryption=True
)
return response['Parameter']['Value']
def get_client_secret():
return get_secure_parameter(client_secret_name)
def get_client_id():
return get_secure_parameter(client_id_name)
def exchange_for_access_token():
conn = http.client.HTTPSConnection("auth-dev.mozilla.auth0.com")
payload_dict = dict(
client_id=get_client_id(),
client_secret=get_client_secret(),
audience="api.dev.sso.allizom.org",
grant_type="client_credentials",
scopes="read:fullprofile"
)
payload = json.dumps(payload_dict)
headers = {'content-type': "application/json"}
conn.request("POST", "/oauth/token", payload, headers)
res = conn.getresponse()
data = json.loads(res.read())
return data['access_token']
def test_paginated_users():
access_token = exchange_for_access_token()
conn = http.client.HTTPSConnection(base_url)
headers = {'authorization': "Bearer {}".format(access_token)}
conn.request("GET", "/v2/users", headers=headers)
res = conn.getresponse()
data = json.loads(res.read())
return data
def test_get_single_user():
access_token = exchange_for_access_token()
conn = http.client.HTTPSConnection(base_url)
headers = {'authorization': "Bearer {}".format(access_token)}
conn.request("GET", "/v2/user/jeffreygreen%40gmail.com", headers=headers)
res = conn.getresponse()
data = json.loads(res.read())
return data
if __name__ == "__main__":
import pprint
print(pprint.pprint(test_paginated_users()))
print(pprint.pprint(test_get_single_user()))
| mpl-2.0 | Python |
08bfc1171c8f66fbf2ca161a9ee1fcf002a50e0f | implement tests for old routes | em92/pickup-rating,em92/quakelive-local-ratings,em92/quakelive-local-ratings,em92/pickup-rating,em92/quakelive-local-ratings,em92/quakelive-local-ratings,em92/quakelive-local-ratings,em92/pickup-rating | tests/test_matches.py | tests/test_matches.py | from collections import OrderedDict
from .fixture import AppTestCase
class TestMatches(AppTestCase):
ORDER = 4
def test_matches_all(self):
cases = [
("/matches/", 0, None, 2),
("/matches/1/", 1, None, 2),
("/matches/ad/", 0, "ad", 1),
("/matches/ad/1/", 1, "ad", 1),
("/matches/tdm/", 0, "tdm", 1),
("/matches/player/76561198260599288/", 0, None, 1)
]
for case in cases:
uri = case[0]
page = case[1]
gametype = case[2]
page_count = case[3]
resp = self.get(uri)
self.assertEqual(resp.template.name, "match_list.html")
context = resp.context
self.assertIn('request', context)
self.assertIn('current_page', context)
self.assertIn('gametype', context)
self.assertIn('page_count', context)
self.assertEqual(context['current_page'], page)
self.assertEqual(context['gametype'], gametype)
self.assertEqual(context['page_count'], page_count)
sample_filename = "match_list_{}".format(cases.index(case)+1)
self.assertEqual(
context['matches'],
self.read_json_sample(sample_filename)
)
def test_old_routes(self):
pairs = [
("/player/123/matches", "/matches/player/123/"),
("/player/123/matches/", "/matches/player/123/"),
("/player/123/matches/456/", "/matches/player/123/456/"),
("/player/123/matches/blablabla/456/", "/matches/player/123/blablabla/456/")
]
for pair in pairs:
old_uri = pair[0]
new_uri = pair[1]
resp = self.get(old_uri, 302)
self.assertTrue(resp.headers['Location'].endswith(new_uri))
| from collections import OrderedDict
from .fixture import AppTestCase
class TestMatches(AppTestCase):
ORDER = 4
def test_matches_all(self):
cases = [
("/matches/", 0, None, 2),
("/matches/1/", 1, None, 2),
("/matches/ad/", 0, "ad", 1),
("/matches/ad/1/", 1, "ad", 1),
("/matches/tdm/", 0, "tdm", 1),
("/matches/player/76561198260599288/", 0, None, 1)
]
for case in cases:
uri = case[0]
page = case[1]
gametype = case[2]
page_count = case[3]
resp = self.get(uri)
self.assertEqual(resp.template.name, "match_list.html")
context = resp.context
self.assertIn('request', context)
self.assertIn('current_page', context)
self.assertIn('gametype', context)
self.assertIn('page_count', context)
self.assertEqual(context['current_page'], page)
self.assertEqual(context['gametype'], gametype)
self.assertEqual(context['page_count'], page_count)
sample_filename = "match_list_{}".format(cases.index(case)+1)
self.assertEqual(
context['matches'],
self.read_json_sample(sample_filename)
)
def test_old_routes(self):
assert 0
resp = self.test_cli.get("/player/123/matches/blablabla/456/", allow_redirects=False)
self.assertTrue(resp.headers['Location'].endswith("/matches/player/123/blablabla/456"))
resp = self.test_cli.get("/player/123/matches/blablabla/456/", allow_redirects=False)
self.assertTrue(resp.headers['Location'].endswith("/matches/player/123/blablabla/456")) | agpl-3.0 | Python |
1ccd9e7f15cfaccfadf7e4e977dbde724885cab9 | Add a `PlayRec` app unit test | sangoma/switchy,wwezhuimeng/switch | tests/test_sync_call.py | tests/test_sync_call.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Tests for synchronous call helper
"""
import time
from switchy import sync_caller
from switchy.apps.players import TonePlay, PlayRec
def test_toneplay(fsip):
'''Test the synchronous caller with a simple toneplay
'''
with sync_caller(fsip, apps={"TonePlay": TonePlay}) as caller:
# have the external prof call itself by default
assert 'TonePlay' in caller.app_names
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'TonePlay',
timeout=3,
)
assert sess.is_outbound()
time.sleep(1)
sess.hangup()
time.sleep(0.1)
assert caller.client.listener.count_calls() == 0
def test_playrec(fsip):
'''Test the synchronous caller with a simulated conversation using the the
`PlayRec` app. Currently this test does no audio checking but merely verifies
the callback chain is invoked as expected.
'''
with sync_caller(fsip, apps={"PlayRec": PlayRec}) as caller:
# have the external prof call itself by default
caller.apps.PlayRec.sim_convo = True
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'PlayRec',
timeout=10,
)
waitfor(sess, 'recorded', timeout=15)
waitfor(sess.call.get_peer(sess), 'recorded', timeout=15)
assert sess.call.vars['record']
time.sleep(1)
assert sess.hungup
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Tests for synchronous call helper
"""
import time
from switchy import sync_caller
from switchy.apps.players import TonePlay
def test_toneplay(fsip):
'''Test the synchronous caller with a simple toneplay
'''
with sync_caller(fsip, apps={"TonePlay": TonePlay}) as caller:
# have the external prof call itself by default
assert 'TonePlay' in caller.app_names
sess, waitfor = caller(
"doggy@{}:{}".format(caller.client.server, 5080),
'TonePlay',
timeout=3,
)
assert sess.is_outbound()
time.sleep(1)
sess.hangup()
time.sleep(0.1)
assert caller.client.listener.count_calls() == 0
| mpl-2.0 | Python |
e0c05f524b475b7c72e02ff602f24a2fd3dd74b3 | Integrate Highlight Build Errors | deadfoxygrandpa/Elm.tmLanguage,sekjun9878/Elm.tmLanguage,sekjun9878/Elm.tmLanguage,rtfeldman/Elm.tmLanguage,deadfoxygrandpa/Elm.tmLanguage,rtfeldman/Elm.tmLanguage | elm_make.py | elm_make.py | import json
import re
from importlib import import_module
try:
default_exec = import_module('Highlight Build Errors').HighlightBuildErrors
except:
import Default.exec as default_exec
class ElmMakeCommand(default_exec.ExecCommand):
'''Inspired by:
http://www.sublimetext.com/forum/viewtopic.php?t=12028
https://github.com/search?q=sublime+filename%3Aexec.py
https://github.com/search?q=finish+ExecCommand+NOT+ProcessListener+extension%3Apy
https://github.com/bblanchon/SublimeText-HighlightBuildErrors/blob/master/HighlightBuildErrors.py
'''
def run(self, error_format, **kwargs):
self.error_format = error_format
super(ElmMakeCommand, self).run(**kwargs)
self.debug_text = 'To highlight build errors : '
try:
if default_exec.g_show_errors:
self.debug_text = ''
else:
self.debug_text += 'Open Command Pallete : Show Build Errors'
except:
self.debug_text += 'Install with Package Control : Highlight Build Errors'
def on_data(self, proc, json_data):
result_str = json_data.decode(self.encoding)
json_str, success_str = result_str.split('\n', 1)
decode_error = lambda dict: self.format_error(**dict) if 'type' in dict else dict
error_list = json.loads(json_str, object_hook=decode_error)
error_list.append(success_str)
error_str = '\n'.join(error_list)
error_data = error_str.encode(self.encoding)
super(ElmMakeCommand, self).on_data(proc, error_data)
def format_error(self, type, file, region, overview, details, **kwargs):
line = region['start']['line']
column = region['start']['column']
message = overview
if details:
message += '\n' + re.sub(r'(\n)+', r'\1', details)
return self.error_format.format(**locals())
| import json
import re
import Default.exec as default_exec
class ElmMakeCommand(default_exec.ExecCommand):
'''Inspired by:
http://www.sublimetext.com/forum/viewtopic.php?t=12028
https://github.com/search?q=sublime+filename%3Aexec.py
https://github.com/search?q=finish+ExecCommand+NOT+ProcessListener+extension%3Apy
'''
def run(self, error_format, **kwargs):
self.error_format = error_format
super(ElmMakeCommand, self).run(**kwargs)
self.debug_text = ''
def on_data(self, proc, json_data):
result_str = json_data.decode(self.encoding)
json_str, success_str = result_str.split('\n', 1)
decode_error = lambda dict: self.format_error(**dict) if 'type' in dict else dict
error_list = json.loads(json_str, object_hook=decode_error)
error_list.append(success_str)
error_str = '\n'.join(error_list)
error_data = error_str.encode(self.encoding)
super(ElmMakeCommand, self).on_data(proc, error_data)
def format_error(self, type, file, region, overview, details, **kwargs):
line = region['start']['line']
column = region['start']['column']
message = overview
if details:
message += '\n' + re.sub(r'(\n)+', r'\1', details)
return self.error_format.format(**locals())
| mit | Python |
5073bb5ac1527108e1d3894902071d677a4477c6 | fix exception in exception handling | edx-solutions/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform | openedx/core/djangoapps/course_groups/migrations/0003_populate_membership_data.py | openedx/core/djangoapps/course_groups/migrations/0003_populate_membership_data.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from django.db import migrations
log = logging.getLogger(__name__)
def forwards(apps, schema_editor):
"""
Populates data in CohortMembership table
"""
CohortMembership = apps.get_model("course_groups", "CohortMembership")
CourseUserGroup = apps.get_model("course_groups", "CourseUserGroup")
for course_group in CourseUserGroup.objects.filter(group_type='cohort'):
for user in course_group.users.all():
membership = CohortMembership(
course_user_group=course_group,
user=user,
course_id=course_group.course_id
)
try:
membership.save()
except Exception:
log.info(
(
"Faild to add membership with course_user_group %s, "
"user %s, "
"course_id %s, "
),
unicode(course_group),
unicode(user),
unicode(course_group.course_id)
)
def backwards(apps, schema_editor):
"""
Removes existing data in CohortMembership
"""
CohortMembership = apps.get_model("course_groups", "CohortMembership")
CohortMembership.objects.all.delete()
class Migration(migrations.Migration):
dependencies = [
('course_groups', '0002_cohort_membership_model'),
]
operations = [
migrations.RunPython(forwards, backwards)
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from django.db import migrations
log = logging.getLogger(__name__)
def forwards(apps, schema_editor):
"""
Populates data in CohortMembership table
"""
CohortMembership = apps.get_model("course_groups", "CohortMembership")
CourseUserGroup = apps.get_model("course_groups", "CourseUserGroup")
for course_group in CourseUserGroup.objects.filter(group_type='cohort'):
for user in course_group.users.all():
membership = CohortMembership(
course_user_group=course_group,
user=user,
course_id=course_group.course_id
)
try:
membership.save()
except Exception:
log.info(
(
"Faild to add membership with course_user_group %, "
"user %s, "
"course_id %s, "
),
unicode(course_group),
unicode(user),
unicode(course_group.course_id)
)
def backwards(apps, schema_editor):
"""
Removes existing data in CohortMembership
"""
CohortMembership = apps.get_model("course_groups", "CohortMembership")
CohortMembership.objects.all.delete()
class Migration(migrations.Migration):
dependencies = [
('course_groups', '0002_cohort_membership_model'),
]
operations = [
migrations.RunPython(forwards, backwards)
]
| agpl-3.0 | Python |
14217dd3d225924d197527c8e97ba78724ff2c14 | update docs | deepchem/deepchem,deepchem/deepchem,peastman/deepchem,peastman/deepchem | deepchem/feat/reaction_featurizer.py | deepchem/feat/reaction_featurizer.py | from deepchem.feat import Featurizer
from typing import List
import numpy as np
try:
from transformers import RobertaTokenizerFast
except ModuleNotFoundError:
raise ImportError(
'Transformers must be installed for RxnFeaturizer to be used!')
pass
class RxnFeaturizer(Featurizer):
"""Reaction Featurizer.
RxnFeaturizer is a wrapper class for HuggingFace's RobertaTokenizerFast,
that is intended for featurizing chemical reaction datasets. The featurizer
computes the source and target required for a seq2seq task and applies the
RobertaTokenizer on them separately. Additionally, it can also separate or
mix the reactants and reagents before tokenizing.
"""
def __init__(self, tokenizer: RobertaTokenizerFast, sep_reagent: bool):
"""Initialises the featurizer with an appropriate HuggingFace Tokenizer."""
if not isinstance(tokenizer, RobertaTokenizerFast):
raise TypeError(
f"""`tokenizer` must be a constructed `RobertaTokenizerFast`
object, not {type(tokenizer)}""")
else:
self.tokenizer = tokenizer
self.sep_reagent = sep_reagent
def _featurize(self, datapoint: str, **kwargs) -> List[List[List[int]]]:
"""Featurizes a datapoint.
Processes each entry in the dataset by first applying the reactant-reagent
mixing, the source/target separation and then the pretrained tokenizer on the
separated strings.
"""
datapoint_list = [datapoint]
reactant = list(map(lambda x: x.split('>')[0], datapoint_list))
reagent = list(map(lambda x: x.split('>')[1], datapoint_list))
product = list(map(lambda x: x.split('>')[2], datapoint_list))
if self.sep_reagent:
source = [x + '>' + y for x, y in zip(reactant, reagent)]
else:
source = [
x + '.' + y + '>' if y else x + '>' + y
for x, y in zip(reactant, reagent)
]
target = product
source_encoding = list(
self.tokenizer(source, padding=True, **kwargs).values())
target_encoding = list(
self.tokenizer(target, padding=True, **kwargs).values())
return [source_encoding, target_encoding]
def __call__(self, *args, **kwargs) -> np.ndarray:
return self.featurize(*args, **kwargs)
def __str__(self) -> str:
"""Handles file name error.
Overrides the __str__ method of the Featurizer base class to avoid errors
while saving the dataset, due to the large default name of the HuggingFace
tokenizer.
"""
return 'RxnFeaturizer_' + str(self.sep_reagent)
| from deepchem.feat import Featurizer
from typing import List
import numpy as np
try:
from transformers import RobertaTokenizerFast
except ModuleNotFoundError:
raise ImportError(
'Transformers must be installed for RxnFeaturizer to be used!')
pass
class RxnFeaturizer(Featurizer):
"""Reaction Featurizer.
RxnFeaturizer is a wrapper class for HuggingFace's RobertaTokenizerFast,
that is intended for featurizing chemical reaction datasets. The featurizer
computes the source and target required for a seq2seq task and applies the
RobertaTokenizer on them separately.
"""
def __init__(self, tokenizer: RobertaTokenizerFast, sep_reagent: bool):
if not isinstance(tokenizer, RobertaTokenizerFast):
raise TypeError(
f"""`tokenizer` must be a constructed `RobertaTokenizerFast`
object, not {type(tokenizer)}""")
else:
self.tokenizer = tokenizer
self.sep_reagent = sep_reagent
def _featurize(self, datapoint: str, **kwargs) -> List[List[List[int]]]:
# if dont want to tokenize, return raw reaction SMILES.
# sep_reagent then tokenize, source and target separately.
datapoint_list = [datapoint]
reactant = list(map(lambda x: x.split('>')[0], datapoint_list))
reagent = list(map(lambda x: x.split('>')[1], datapoint_list))
product = list(map(lambda x: x.split('>')[2], datapoint_list))
if self.sep_reagent:
source = [x + '>' + y for x, y in zip(reactant, reagent)]
else:
source = [
x + '.' + y + '>' if y else x + '>' + y
for x, y in zip(reactant, reagent)
]
target = product
source_encoding = list(
self.tokenizer(source, padding=True, **kwargs).values())
target_encoding = list(
self.tokenizer(target, padding=True, **kwargs).values())
return [source_encoding, target_encoding]
def __call__(self, *args, **kwargs) -> np.ndarray:
return self.featurize(*args, **kwargs)
def __str__(self) -> str:
# perhaps a bit hacky?!
return 'RxnFeaturizer_' + str(self.sep_reagent)
| mit | Python |
dd7a2b8abc37262d801c36baa427b3a05f6f0938 | Add instance() method | Mause/statistical_atlas_of_au | saau/sections/__init__.py | saau/sections/__init__.py | SERVICES = [
'aus_map.AusMap',
'towns.TownsData'
]
def get_key(cls):
return cls.__module__ + '.' + cls.__name__
def instance(cls):
try:
return Singleton.table[get_key(cls)]
except KeyError:
raise KeyError('Singleton uninitialized')
class Singleton(type):
table = {}
def __init__(cls, object_or_name, bases, attrs):
cls.instance = classmethod(instance)
def __call__(cls, *args, **kw):
key = get_key(cls)
try:
return Singleton.table[key]
except KeyError:
pass
Singleton.table[key] = cls.__new__(cls)
Singleton.table[key].__init__(*args, **kw)
return Singleton.table[key]
| SERVICES = [
'aus_map.AusMap',
'towns.TownsData'
]
class Singleton(type):
table = {}
def __call__(cls, *args, **kw):
key = cls.__module__ + '.' + cls.__name__
try:
return Singleton.table[key]
except KeyError:
pass
Singleton.table[key] = cls.__new__(cls)
Singleton.table[key].__init__(*args, **kw)
return Singleton.table[key]
| mit | Python |
6adf760df12882cdcc9c4da5e2e1b4d79188d0bc | Send a receipt_email attribute when charging a card so the user gets sent a receipt by Stripe. | mwillmott/techbikers,Techbikers/techbikers,mwillmott/techbikers,Techbikers/techbikers,mwillmott/techbikers,Techbikers/techbikers,mwillmott/techbikers,Techbikers/techbikers | sales/models.py | sales/models.py | import stripe
from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
class Sale(models.Model):
sale_date = models.DateTimeField(default=datetime.now())
charge_id = models.CharField(max_length=32) # store the stripe charge id for this sale
amount = models.IntegerField(max_length=6, null=True, blank=True)
currency = models.CharField(max_length = 3, choices=(('gbp', 'GBP'), ('usd', 'USD'), ('eur', 'EUR')), default = 'gbp')
livemode = models.BooleanField()
card = models.CharField(blank=True, null=True, max_length=255)
# also store the rider id
rider = models.ForeignKey(User)
def __init__(self, *args, **kwargs):
super(Sale, self).__init__(*args, **kwargs)
def __unicode__(self):
return self.charge_id
@classmethod
def charge(cls, user, ride, token):
"""
Takes a the price and a Stripe token.
Raises a stripe.CardError on errors.
"""
instance = cls()
stripe.api_key = ride.chapter.get_priv_key()
try:
response = stripe.Charge.create(
amount = int(ride.price * 100), # in cents
currency = ride.currency,
card = token,
receipt_email = user.email,
description = "{}: {}".format(ride.name, user.email))
except stripe.CardError:
raise # Just be explicit about what we are raising
instance.charge_id = response.id
instance.amount = response.amount
instance.currency = response.currency
instance.livemode = response.livemode
instance.card = response.card.id
instance.rider_id = user.id
return instance
class Meta:
db_table = 'sales'
app_label = 'sales' | import stripe
from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
class Sale(models.Model):
sale_date = models.DateTimeField(default=datetime.now())
charge_id = models.CharField(max_length=32) # store the stripe charge id for this sale
amount = models.IntegerField(max_length=6, null=True, blank=True)
currency = models.CharField(max_length = 3, choices=(('gbp', 'GBP'), ('usd', 'USD'), ('eur', 'EUR')), default = 'gbp')
livemode = models.BooleanField()
card = models.CharField(blank=True, null=True, max_length=255)
# also store the rider id
rider = models.ForeignKey(User)
def __init__(self, *args, **kwargs):
super(Sale, self).__init__(*args, **kwargs)
def __unicode__(self):
return self.charge_id
@classmethod
def charge(cls, user, ride, token):
"""
Takes a the price and a Stripe token.
Raises a stripe.CardError on errors.
"""
instance = cls()
stripe.api_key = ride.chapter.get_priv_key()
try:
response = stripe.Charge.create(
amount = int(ride.price * 100), # in cents
currency = ride.currency,
card = token,
description = "{}: {}".format(ride.name, user.email))
except stripe.CardError:
raise # Just be explicit about what we are raising
instance.charge_id = response.id
instance.amount = response.amount
instance.currency = response.currency
instance.livemode = response.livemode
instance.card = response.card.id
instance.rider_id = user.id
return instance
class Meta:
db_table = 'sales'
app_label = 'sales' | mit | Python |
ba4485ec05206d126cb4833ab743a9707b20567d | Bump version 0.1.2. | junaruga/rpm-py-installer,junaruga/rpm-py-installer | rpm_py_installer/version.py | rpm_py_installer/version.py | VERSION = '0.1.2'
| VERSION = '0.1.1'
| mit | Python |
fff4a95b0d2ecea47442ab5e319f330d1ac6c145 | bump version | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/version.py | salt/version.py | import sys
__version_info__ = (0, 10, 3)
__version__ = '.'.join(map(str, __version_info__))
def versions_report():
libs = (
("Jinja2", "jinja2", "__version__"),
("M2Crypto", "M2Crypto", "version"),
("msgpack-python", "msgpack", "version"),
("msgpack-pure", "msgpack_pure", "version"),
("pycrypto", "Crypto", "__version__"),
("PyYAML", "yaml", "__version__"),
("PyZMQ", "zmq", "__version__"),
)
padding = len(max([lib[0] for lib in libs], key=len)) + 1
fmt = '{0:>{pad}}: {1}'
yield fmt.format("Salt", __version__, pad=padding)
yield fmt.format(
"Python", sys.version.rsplit('\n')[0].strip(), pad=padding
)
for name, imp, attr in libs:
try:
imp = __import__(imp)
version = getattr(imp, attr)
if not isinstance(version, basestring):
version = '.'.join(map(str, version))
yield fmt.format(name, version, pad=padding)
except ImportError:
yield fmt.format(name, "not installed", pad=padding)
if __name__ == '__main__':
print(__version__)
| import sys
__version_info__ = (0, 10, 3, 'd')
__version__ = '.'.join(map(str, __version_info__))
def versions_report():
libs = (
("Jinja2", "jinja2", "__version__"),
("M2Crypto", "M2Crypto", "version"),
("msgpack-python", "msgpack", "version"),
("msgpack-pure", "msgpack_pure", "version"),
("pycrypto", "Crypto", "__version__"),
("PyYAML", "yaml", "__version__"),
("PyZMQ", "zmq", "__version__"),
)
padding = len(max([lib[0] for lib in libs], key=len)) + 1
fmt = '{0:>{pad}}: {1}'
yield fmt.format("Salt", __version__, pad=padding)
yield fmt.format(
"Python", sys.version.rsplit('\n')[0].strip(), pad=padding
)
for name, imp, attr in libs:
try:
imp = __import__(imp)
version = getattr(imp, attr)
if not isinstance(version, basestring):
version = '.'.join(map(str, version))
yield fmt.format(name, version, pad=padding)
except ImportError:
yield fmt.format(name, "not installed", pad=padding)
if __name__ == '__main__':
print(__version__)
| apache-2.0 | Python |
f91efb2c5d161bb5d13ffa1f2dd9037bcdf270da | allow users to specify commit message | reincubate/django-templatesadmin,buriy/django-templatesadmin,pombredanne/django-templatesadmin,reincubate/django-templatesadmin,dkopitsa/django-templatesadmin,bartTC/django-templatesadmin,buriy/django-templatesadmin,barrabinfc/django-templatesadmin,rlaager/django-templatesadmin,dkopitsa/django-templatesadmin,Alarik/django-templatesadmin,Alarik/django-templatesadmin,Alarik/django-templatesadmin,NervosaX/django-templatesadmin,Alarik/django-templatesadmin,dkopitsa/django-templatesadmin,GrandComicsDatabase/django-templatesadmin,fdintino/django-templatesadmin,dkopitsa/django-templatesadmin,barrabinfc/django-templatesadmin,barttc-archive/django-templatesadmin,GrandComicsDatabase/django-templatesadmin,artpar/django-templatesadmin,GrandComicsDatabase/django-templatesadmin | edithooks/gitcommit.py | edithooks/gitcommit.py | from django import forms
from django.utils.translation import ugettext_lazy
from templatesadmin import TemplatesAdminException
from templatesadmin.forms import TemplateForm
import subprocess
import os
class ChangeCommentTemplateForm(TemplateForm):
backup = forms.CharField(
widget=forms.TextInput(attrs={'size':'100'}),
label = ugettext_lazy(u'Change message'),
required = False,
)
class GitCommitHook():
'''
Backup File before saving
'''
@classmethod
def pre_save(cls, request, form, template_path):
pass
@classmethod
def post_save(cls, request, form, template_path):
dir, file = os.path.dirname(template_path) + "/", os.path.basename(template_path)
if request.user.first_name and request.user.last_name:
author = "%s %s" % (request.user.first_name, request.user.last_name)
else:
author = request.user.username
message = '--'
backup = form.cleaned_data['backup']
if backup:
message = form.cleaned_data['backup']
command = '''git commit %s --author "%s <%s>" -F -''' % (file, author, request.user.email)
# Stolen from gitpython's git/cmd.py
proc = subprocess.Popen(
args=command,
shell=True,
cwd=dir,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
try:
proc.stdin.write(message)
proc.stdin.close()
stderr_value = proc.stderr.read()
stdout_value = proc.stdout.read()
status = proc.wait()
finally:
proc.stderr.close()
if status != 0:
print status
raise TemplatesAdminException("Error while executing %s: %s" % (command, stderr_value.rstrip(), ))
@classmethod
def generate_form(cls, *args, **kwargs):
return ChangeCommentTemplateForm(*args, **kwargs)
| from django import forms
from templatesadmin.forms import TemplateForm
from templatesadmin import TemplatesAdminException
import subprocess
import os
class GitCommitHook():
'''
Backup File before saving
'''
@classmethod
def pre_save(cls, request, form, template_path):
pass
@classmethod
def post_save(cls, request, form, template_path):
dir, file = os.path.dirname(template_path) + "/", os.path.basename(template_path)
if request.user.first_name and request.user.last_name:
author = "%s %s" % (request.user.first_name, request.user.last_name)
else:
author = request.user.username
command = '''git commit %s --author "%s <%s>" -m "Template change, using templatesadmin"''' % (file, author, request.user.email)
# Stolen from gitpython's git/cmd.py
proc = subprocess.Popen(args=command, shell=True, cwd=dir, stderr=subprocess.PIPE)
try:
stderr_value = proc.stderr.read()
status = proc.wait()
finally:
proc.stderr.close()
if status != 0:
print status
raise TemplatesAdminException("Error while executing %s: %s" % (command, stderr_value.rstrip(), ))
@classmethod
def generate_form(cls, *args, **kwargs):
return TemplateForm(*args, **kwargs)
| bsd-3-clause | Python |
9d7d643af005a204dc1851d370747775f729d7c8 | make turbomole parsing less strict | hein09/pwtoolbox,hein09/pwtoolbox | vipster/ftypeplugins/turbomole.py | vipster/ftypeplugins/turbomole.py | # -*- coding: utf-8 -*-
from ..molecule import Molecule
name = 'turbomole'
extension = 'turbo'
argument = 'turbo'
param = None
def parser(name,data):
""" Parse turbomole specific input file """
tmol = Molecule(name)
for i in range(len(data)):
if "$coord" in data[i]:
for l in data[i+1:]:
if '$' in l: break
line = l.split()
tmol.newAtom(line[3].capitalize(),line[0:3],'bohr')
break
return tmol,None
writer = None
| # -*- coding: utf-8 -*-
from ..molecule import Molecule
name = 'turbomole'
extension = ''
argument = 'turbomole'
param = None
def parser(name,data):
""" Parse turbomole specific input file """
tmol = Molecule(name)
tmol.setVec([[1,0,0],[0,1,0],[0,0,1]])
tmol.setCellDim(40.0)
for l in data[1:]:
if '$' in l: break
line = l.split()
tmol.newAtom(line[3].upper(),line[0:3],'bohr')
return tmol
writer = None
| bsd-2-clause | Python |
8793b1a2c4adf480534cf2a669337032edf77020 | Remove header, this will be imported by a runner | hatchery/Genepool2,hatchery/genepool | golang/main.py | golang/main.py | from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
| #!/usr/bin/env python
from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
| mit | Python |
72c2d987f541e8136aa8c89c122739c9f3ed82b9 | Add unit tests for the internal _getprofiledir function. | makerbot/s3g,Jnesselr/s3g,Jnesselr/s3g,makerbot/s3g,makerbot/s3g,makerbot/s3g | tests/test_profile.py | tests/test_profile.py | import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import unittest
import json
import s3g
import s3g.profile
class ProfileInitTests(unittest.TestCase):
def test_bad_profile_name(self):
bad_name = 'this_is_going_to_fail :('
self.assertRaises(IOError, s3g.Profile, bad_name)
def test_good_profile_name(self):
name = "ReplicatorSingle"
p = s3g.Profile(name)
with open('s3g' + os.path.sep + 'profiles' + os.path.sep +name+'.json') as f:
expected_vals = json.load(f)
self.assertEqual(expected_vals, p.values)
def test_profile_access(self):
"""
Make sure we have no issues accessing the information in the machine profile
"""
expected_name = "The Replicator Dual"
name = "ReplicatorDual"
p = s3g.Profile(name)
self.assertEqual(p.values['type'], expected_name)
def test_list_profiles(self):
expected_profiles = [
'ReplicatorDual',
'ReplicatorSingle',
]
self.assertEqual(sorted(expected_profiles), sorted(list(s3g.list_profiles())))
def test__getprofiledir(self):
'''Make sure that _getprofiledir returns its argument when that argument is
not None.
'''
profiledir = 'x'
self.assertEqual(profiledir, s3g.profile._getprofiledir(profiledir))
def test__getprofiledir_default(self):
'''Make sure that _getprofiledir returns the default profile directory when
its argument is None.
'''
profiledir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 's3g', 'profiles'))
self.assertEqual(profiledir, s3g.profile._getprofiledir(None))
if __name__ == '__main__':
unittest.main()
| import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import unittest
import json
import s3g
class ProfileInitTests(unittest.TestCase):
def test_bad_profile_name(self):
bad_name = 'this_is_going_to_fail :('
self.assertRaises(IOError, s3g.Profile, bad_name)
def test_good_profile_name(self):
name = "ReplicatorSingle"
p = s3g.Profile(name)
with open('s3g' + os.path.sep + 'profiles' + os.path.sep +name+'.json') as f:
expected_vals = json.load(f)
self.assertEqual(expected_vals, p.values)
def test_profile_access(self):
"""
Make sure we have no issues accessing the information in the machine profile
"""
expected_name = "The Replicator Dual"
name = "ReplicatorDual"
p = s3g.Profile(name)
self.assertEqual(p.values['type'], expected_name)
def test_list_profiles(self):
expected_profiles = [
'ReplicatorDual',
'ReplicatorSingle',
]
self.assertEqual(sorted(expected_profiles), sorted(list(s3g.list_profiles())))
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | Python |
578dffb174ef6601fbd90d811033d9a3442f0616 | set flags dump_sql and dry_run | moskytw/mosql,uranusjr/mosql | tests/test_result2.py | tests/test_result2.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import psycopg2
from mosql.result2 import Model
class PostgreSQL(Model):
getconn = classmethod(lambda cls: psycopg2.connect(database='mosky'))
putconn = classmethod(lambda cls, conn: None)
class Person(PostgreSQL):
clauses = dict(table='person')
arrange_by = ('person_id', )
class Detail(PostgreSQL):
clauses = dict(table='detail')
arrange_by = ('person_id', 'key')
squashed = arrange_by
ident_by = ('detail_id', )
if __name__ == '__main__':
from pprint import pprint
d = next(Detail.arrange(where={'person_id': 'mosky'}))
Detail.dump_sql = True
Detail.dry_run = True
print '# original'
pprint(dict(d))
print
print '# setitem'
d['val', 0] = 'modified@email.com'
pprint(dict(d))
print
print '# append'
d.append({'val': 'new@email.com'})
pprint(dict(d))
print
print '# pop it'
d.pop(0)
pprint(dict(d))
print
print '# save it'
print d.save()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import psycopg2
from mosql.result2 import Model
class PostgreSQL(Model):
getconn = classmethod(lambda cls: psycopg2.connect(database='mosky'))
putconn = classmethod(lambda cls, conn: None)
class Person(PostgreSQL):
clauses = dict(table='person')
arrange_by = ('person_id', )
class Detail(PostgreSQL):
clauses = dict(table='detail')
arrange_by = ('person_id', 'key')
squashed = arrange_by
ident_by = ('detail_id', )
if __name__ == '__main__':
from pprint import pprint
d = next(Detail.arrange(where={'person_id': 'mosky'}))
print '# original'
pprint(dict(d))
print
print '# setitem'
d['val', 0] = 'modified@email.com'
pprint(dict(d))
print
print '# append'
d.append({'val': 'new@email.com'})
pprint(dict(d))
print
print '# pop it'
d.pop(0)
pprint(dict(d))
print
print '# save it'
print d.save()
| mit | Python |
2483e38054d12d315fd6aa3395aa9dd0126bcfea | fix broken test | chfw/pyexcel,chfw/pyexcel | tests/test_sources.py | tests/test_sources.py | from nose.tools import raises, eq_
from pyexcel.sources.factory import Source
from pyexcel.sources.factory import FileSource
from pyexcel.sources.factory import InputSource
from pyexcel.sources.file_source_output import WriteSheetToMemory
from pyexcel.sources.file_source_output import OutputSource
def test_io_source():
status = OutputSource.can_i_handle("read", "xls")
eq_(status, False)
def test_input_source():
status = InputSource.can_i_handle("write", "xls")
eq_(status, False)
def test_source():
source = Source(source="asource", params="params")
info = source.get_source_info()
assert info, (None, None)
def test_source_class_method():
assert Source.is_my_business('read', source="asource") is True
assert Source.is_my_business('read', file_name="asource") is False
@raises(Exception)
def test_read_only_source():
source = Source()
source.write_data("something")
@raises(Exception)
def test_write_only_source():
source = Source()
source.get_data()
@raises(Exception)
def test_write_only_sheet_source():
source = WriteSheetToMemory()
source.get_data()
def test_file_source_class_method():
assert FileSource.can_i_handle('read', "csv") is False
assert FileSource.can_i_handle('write', "csv") is False
assert FileSource.can_i_handle('wrong action', "csv") is False
| from nose.tools import raises, eq_
from pyexcel.sources.factory import Source
from pyexcel.sources.factory import FileSource
from pyexcel.sources.file_source_output import WriteSheetToMemory
from pyexcel.sources.file_source_output import OutputSource
from pyexcel.sources.file_source_input import InputSource
def test_io_source():
status = OutputSource.can_i_handle("read", "xls")
eq_(status, False)
def test_input_source():
status = InputSource.can_i_handle("write", "xls")
eq_(status, False)
def test_source():
source = Source(source="asource", params="params")
info = source.get_source_info()
assert info, (None, None)
def test_source_class_method():
assert Source.is_my_business('read', source="asource") is True
assert Source.is_my_business('read', file_name="asource") is False
@raises(Exception)
def test_read_only_source():
source = Source()
source.write_data("something")
@raises(Exception)
def test_write_only_source():
source = Source()
source.get_data()
@raises(Exception)
def test_write_only_sheet_source():
source = WriteSheetToMemory()
source.get_data()
def test_file_source_class_method():
assert FileSource.can_i_handle('read', "csv") is False
assert FileSource.can_i_handle('write', "csv") is False
assert FileSource.can_i_handle('wrong action', "csv") is False
| bsd-3-clause | Python |
a34c85063567adf09363cc9ad5670ad2e0ddf926 | Bump to 3.3 | ghickman/tvrenamr,wintersandroid/tvrenamr | tvrenamr/__init__.py | tvrenamr/__init__.py | __author__ = 'George Hickman'
__copyright__ = 'Copyright 2012 George Hickman'
__license__ = 'MIT'
__title__ = 'tvrenamr'
__version__ = '3.3'
| __author__ = 'George Hickman'
__copyright__ = 'Copyright 2012 George Hickman'
__license__ = 'MIT'
__title__ = 'tvrenamr'
__version__ = '3.2.1'
| mit | Python |
2ccb49b541caee6abfdc36dd4dae9ce9dab43c16 | add docstring | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/states/nfs_export.py | salt/states/nfs_export.py | # -*- coding: utf-8 -*-
'''
Management of NFS exports
===============================================
To ensure an NFS export exists:
.. code-block:: yaml
add_simple_export:
nfs_export.present:
- name: '/srv/nfs'
- hosts: '10.0.2.0/24'
- options: 'rw'
For more complex exports with multiple groups of hosts:
.. code-block:: yaml
add_complex_export:
nfs_export.present:
- name: '/srv/nfs'
- exports:
# First export, same as simple one above
- hosts:
- '10.0.2.0/24'
options:
- 'rw'
# Second export
- hosts:
- '192.168.0.0/24'
- '172.19.0.0/16'
options:
- 'ro'
- 'subtree_check'
This creates the following in /etc/exports:
.. code-block:: bash
/srv/nfs 10.0.2.0/24(rw)
Any export of the given path will be modified to match the one specified.
To ensure an NFS export is absent:
.. code-block:: yaml
delete_export:
nfs_export.absent:
- name: '/srv/nfs'
'''
#from __future__ import absolute_import
def absent(name, exports='/etc/exports'):
'''
Ensure that the named path is not exported
name
The export path to remove
'''
path = name
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
old = __salt__['nfs3.list_exports'](exports)
if path in old:
if __opts__['test']:
ret['comment'] = 'Export {0} would be removed'.format(path)
ret['result'] = None
return ret
__salt__['nfs3.del_export'](exports, path)
ret['comment'] = 'Export {0} removed'.format(path)
ret['changes'][path] = old[path]
ret['result'] = True
else:
ret['comment'] = 'Export {0} already absent'.format(path)
ret['result'] = True
return ret
| # -*- coding: utf-8 -*-
'''
Management of NFS exports
===============================================
To ensure an NFS export exists:
.. code-block:: yaml
add_simple_export:
nfs_export.present:
- name: '/srv/nfs'
- hosts: '10.0.2.0/24'
- options: 'rw'
For more complex exports with multiple groups of hosts:
.. code-block:: yaml
add_complex_export:
nfs_export.present:
- name: '/srv/nfs'
- exports:
# First export, same as simple one above
- hosts:
- '10.0.2.0/24'
options:
- 'rw'
# Second export
- hosts:
- '192.168.0.0/24'
- '172.19.0.0/16'
options:
- 'ro'
- 'subtree_check'
This creates the following in /etc/exports:
.. code-block:: bash
/srv/nfs 10.0.2.0/24(rw)
Any export of the given path will be modified to match the one specified.
To ensure an NFS export is absent:
.. code-block:: yaml
delete_export:
nfs_export.absent:
- name: '/srv/nfs'
'''
#from __future__ import absolute_import
def absent(name, exports='/etc/exports'):
path = name
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
old = __salt__['nfs3.list_exports'](exports)
if path in old:
if __opts__['test']:
ret['comment'] = 'Export {0} would be removed'.format(path)
ret['result'] = None
return ret
__salt__['nfs3.del_export'](exports, path)
ret['comment'] = 'Export {0} removed'.format(path)
ret['changes'][path] = old[path]
ret['result'] = True
else:
ret['comment'] = 'Export {0} already absent'.format(path)
ret['result'] = True
return ret
| apache-2.0 | Python |
a1151be46a0ea78e5b53bb48bc125da026f7c2f7 | test example update | chenjiandongx/pyecharts,chenjiandongx/pyecharts,chenjiandongx/pyecharts | test/test_graph.py | test/test_graph.py | #!/usr/bin/env python
# coding=utf-8
from __future__ import unicode_literals
import os
import sys
from pyecharts import Graph
PY2 = sys.version_info[0] == 2
def test_graph():
# graph_0
nodes = [{"name": "结点1", "symbolSize": 10},
{"name": "结点2", "symbolSize": 20},
{"name": "结点3", "symbolSize": 30},
{"name": "结点4", "symbolSize": 40},
{"name": "结点5", "symbolSize": 50},
{"name": "结点6", "symbolSize": 40},
{"name": "结点7", "symbolSize": 30},
{"name": "结点8", "symbolSize": 20}]
links = []
for i in nodes:
for j in nodes:
links.append({"source": i.get('name'), "target": j.get('name')})
graph = Graph("关系图-力引导布局示例")
graph.add("", nodes, links, repulsion=8000, line_color='#aaa')
graph.show_config()
graph.render()
# graph_1
graph = Graph("关系图-环形布局示例")
graph.add("", nodes, links, is_label_show=True, graph_repulsion=8000,
graph_layout='circular', label_text_color=None, line_color='#aaa')
graph.render()
# graph_2
import json
if PY2:
import codecs
with codecs.open(os.path.join("..", "json", "weibo.json"), "rb") as f:
j = json.load(f)
else:
with open(os.path.join("..", "json", "weibo.json"), "r", encoding="utf-8") as f:
j = json.load(f)
nodes, links, categories, cont, mid, userl = j
graph = Graph("微博转发关系图", width=1200, height=600)
graph.add("", nodes, links, categories, label_pos="right", graph_repulsion=50,
is_legend_show=False, line_curve=0.2, label_text_color=None,
line_color='#aaa')
graph.render()
| #!/usr/bin/env python
# coding=utf-8
from __future__ import unicode_literals
import os
import sys
from pyecharts import Graph
PY2 = sys.version_info[0] == 2
def test_graph():
# graph_0
nodes = [{"name": "结点1", "symbolSize": 10},
{"name": "结点2", "symbolSize": 20},
{"name": "结点3", "symbolSize": 30},
{"name": "结点4", "symbolSize": 40},
{"name": "结点5", "symbolSize": 50},
{"name": "结点6", "symbolSize": 40},
{"name": "结点7", "symbolSize": 30},
{"name": "结点8", "symbolSize": 20}]
links = []
for i in nodes:
for j in nodes:
links.append({"source": i.get('name'), "target": j.get('name')})
graph = Graph("关系图-力引导布局示例")
graph.add("", nodes, links, repulsion=8000)
graph.show_config()
graph.render()
# graph_1
graph = Graph("关系图-环形布局示例")
graph.add("", nodes, links, is_label_show=True, graph_repulsion=8000,
graph_layout='circular', label_text_color=None)
graph.render()
# graph_2
import json
if PY2:
import codecs
with codecs.open(os.path.join("..", "json", "weibo.json"), "rb") as f:
j = json.load(f)
else:
with open(os.path.join("..", "json", "weibo.json"), "r", encoding="utf-8") as f:
j = json.load(f)
nodes, links, categories, cont, mid, userl = j
graph = Graph("微博转发关系图", width=1200, height=600)
graph.add("", nodes, links, categories, label_pos="right", graph_repulsion=50,
is_legend_show=False, line_curve=0.2, label_text_color=None)
graph.render()
| mit | Python |
bd0bfcd33052d7bedcda95b9cf9da204f93c06f0 | Add DotDict nesting test | thiderman/piper | test/test_utils.py | test/test_utils.py | from piper.utils import DotDict
from piper.utils import dynamic_load
import pytest
class TestDotDict(object):
def test_get_nonexistant_raises_keyerror(self):
with pytest.raises(KeyError):
dd = DotDict({})
dd.does_not_exist
def test_get_item(self):
dd = DotDict({'danger': 'zone'})
assert dd.danger == 'zone'
def test_get_item_dict_access(self):
dd = DotDict({'danger': 'zone'})
assert dd['danger'] == 'zone'
def test_dict_items_become_dotdicts(self):
dd = DotDict({'highway': {'danger': 'zone'}})
assert isinstance(dd.highway, DotDict) is True
def test_dict_items_become_dotdicts_when_using_dict_access(self):
dd = DotDict({'highway': {'danger': 'zone'}})
assert isinstance(dd['highway'], DotDict) is True
def test_nested_access(self):
dd = DotDict({'highway': {'danger': {'zone': True}}})
assert dd.highway.danger.zone is True
def test_make_dotdict_out_of_dotdict_does_not_nest(self):
data = {'another': {'angel': 'down'}}
dd1 = DotDict(data)
dd2 = DotDict(dd1)
assert dd1.data == dd2.data
class TestDynamicLoad(object):
def test_proper_load(self):
cls = dynamic_load('piper.utils.DotDict')
assert cls is DotDict
def test_nonexistant_target(self):
with pytest.raises(ImportError):
dynamic_load('gammaray.empire.Avalon')
| from piper.utils import DotDict
from piper.utils import dynamic_load
import pytest
class TestDotDict(object):
def test_get_nonexistant_raises_keyerror(self):
with pytest.raises(KeyError):
dd = DotDict({})
dd.does_not_exist
def test_get_item(self):
dd = DotDict({'danger': 'zone'})
assert dd.danger == 'zone'
def test_get_item_dict_access(self):
dd = DotDict({'danger': 'zone'})
assert dd['danger'] == 'zone'
def test_dict_items_become_dotdicts(self):
dd = DotDict({'highway': {'danger': 'zone'}})
assert isinstance(dd.highway, DotDict) is True
def test_dict_items_become_dotdicts_when_using_dict_access(self):
dd = DotDict({'highway': {'danger': 'zone'}})
assert isinstance(dd['highway'], DotDict) is True
def test_nested_access(self):
dd = DotDict({'highway': {'danger': {'zone': True}}})
assert dd.highway.danger.zone is True
class TestDynamicLoad(object):
def test_proper_load(self):
cls = dynamic_load('piper.utils.DotDict')
assert cls is DotDict
def test_nonexistant_target(self):
with pytest.raises(ImportError):
dynamic_load('gammaray.empire.Avalon')
| mit | Python |
e81f93ea925861ccd426f7f7682e25fb3d183e13 | Modify laser ldr code | CarlosPena00/Mobbi,CarlosPena00/Mobbi | Rasp/laser/laser.py | Rasp/laser/laser.py | import RPi.GPIO as GPIO
import time
import Adafruit_ADS1x15
import sys
#from mpu6050 import mpu6050
#sensor = mpu6050(0x68)
class Laser:
adc = Adafruit_ADS1x15.ADS1015()
def __init__(self, gain):
self.gain = gain
print('Reading ADS1x15 values, press Ctrl-C to quit...')
print('| {0:>6} | {1:>6} |'.format(*range(2)))
print('-' * 37)
def getLdr(self):
value = [0] * 2
for i in range(2):
value[i] = self.adc.read_adc(i, gain=self.gain)
b = '| {0:>6} | {1:>6} |'.format(*value)
sys.stdout.write('\r' + b)
sys.stdout.flush()
time.sleep(0.5)
#sys.stdout.write('\r' + str(sensor.get_temp()))
#sys.stdout.flush()
time.sleep(1)
return b
a = Laser(1)
while 1:
a.getLdr()
| import RPi.GPIO as GPIO
import time
import Adafruit_ADS1x15
import sys
#from mpu6050 import mpu6050
#sensor = mpu6050(0x68)
class Laser:
adc = Adafruit_ADS1x15.ADS1015()
def __init__(self, gain):
self.gain = gain
print('Reading ADS1x15 values, press Ctrl-C to quit...')
print('| {0:>6} | {1:>6} | {2:>6} | {3:>6} |'.format(*range(4)))
print('-' * 37)
def getAdc(self):
while True:
value = [0] * 4
for i in range(4):
value[i] = self.adc.read_adc(i, gain=self.gain)
b = '| {0:>6} | {1:>6} | {2:>6} | {3:>6} |'.format(*value)
sys.stdout.write('\r' + b)
sys.stdout.flush()
time.sleep(0.5)
#sys.stdout.write('\r' + str(sensor.get_temp()))
#sys.stdout.flush()
time.sleep(1)
a = Laser(1)
a.getAdc()
| mit | Python |
c85fb3d130cfae8ba9b1c0b9d009a1d8b235589d | Update version | luckytianyiyan/TyStrings,luckytianyiyan/TyStrings | tystrings/version.py | tystrings/version.py | __version__ = '1.2.0' | __version__ = '1.1.2' | mit | Python |
c403df0facd9e3751dabdf9bcd1f22540acb218a | remove unnecessary dict boxing | dragoon/kilogram,dragoon/kilogram,dragoon/kilogram | mapreduce/dbpedia_dbm.py | mapreduce/dbpedia_dbm.py | """
Creates DBPedia labels-types Shelve file of the following format:
{ LABEL: {'types': [Type1, Type2, ...]}, ...}
For example:
Tramore: Town, Settlement, PopulatedPlace, Place
Tramore,_Ireland: Town, Settlement, PopulatedPlace, Place
"""
import subprocess
import urllib
from collections import defaultdict
import shelve
TYPES_FILE = 'instance_types_en.nt.bz2'
EXCLUDES = {'Agent', 'TimePeriod', 'PersonFunction', 'Year'}
dbpediadb_types = defaultdict(list)
# BZ2File module cannot process multi-stream files, so use subprocess
p = subprocess.Popen('bzcat -q ' + TYPES_FILE, shell=True, stdout=subprocess.PIPE)
for line in p.stdout:
if '<BAD URI: Illegal character' in line:
continue
try:
uri, predicate, type_uri = line.split(' ', 2)
except:
continue
if 'http://dbpedia.org/ontology/' not in type_uri:
continue
uri = urllib.unquote(uri.replace('<http://dbpedia.org/resource/', '')[:-1])
type_uri = type_uri.replace('<http://dbpedia.org/ontology/', '')[:-4]
if type_uri in EXCLUDES:
continue
dbpediadb_types[uri].append(type_uri)
dbpediadb = shelve.open('dbpedia_types.dbm')
# write canonical labels first
for uri, types in dbpediadb_types.items():
dbpediadb[uri] = types
REDIRECTS_FILE = 'redirects_transitive_en.nt.bz2'
dbpediadb_labels = {}
# BZ2File module cannot process multi-stream files, so use subprocess
p = subprocess.Popen('bzcat -q ' + REDIRECTS_FILE, shell=True, stdout=subprocess.PIPE)
for line in p.stdout:
try:
uri_redirect, predicate, uri_canon = line.split(' ', 2)
except:
continue
name_redirect = urllib.unquote(uri_redirect.replace('<http://dbpedia.org/resource/', '')[:-1])
name_canon = urllib.unquote(uri_canon.replace('<http://dbpedia.org/resource/', '')[:-4])
if '(disambiguation)' in name_redirect:
continue
# skip entities that have no types
if name_canon not in dbpediadb_types:
continue
dbpediadb[name_redirect] = dbpediadb_types[name_canon]
dbpediadb.close()
| """
Creates DBPedia labels-types Shelve file of the following format:
{ LABEL: {'types': [Type1, Type2, ...]}, ...}
For example:
Tramore: Town, Settlement, PopulatedPlace, Place
Tramore,_Ireland: Town, Settlement, PopulatedPlace, Place
"""
import subprocess
import urllib
from collections import defaultdict
import shelve
TYPES_FILE = 'instance_types_en.nt.bz2'
EXCLUDES = {'Agent', 'TimePeriod', 'PersonFunction', 'Year'}
dbpediadb_types = defaultdict(list)
# BZ2File module cannot process multi-stream files, so use subprocess
p = subprocess.Popen('bzcat -q ' + TYPES_FILE, shell=True, stdout=subprocess.PIPE)
for line in p.stdout:
if '<BAD URI: Illegal character' in line:
continue
try:
uri, predicate, type_uri = line.split(' ', 2)
except:
continue
if 'http://dbpedia.org/ontology/' not in type_uri:
continue
uri = urllib.unquote(uri.replace('<http://dbpedia.org/resource/', '')[:-1])
type_uri = type_uri.replace('<http://dbpedia.org/ontology/', '')[:-4]
if type_uri in EXCLUDES:
continue
dbpediadb_types[uri].append(type_uri)
dbpediadb = shelve.open('dbpedia_types.dbm')
# write canonical labels first
for uri, types in dbpediadb_types.items():
dbpediadb[uri] = {'types': types}
REDIRECTS_FILE = 'redirects_transitive_en.nt.bz2'
dbpediadb_labels = {}
# BZ2File module cannot process multi-stream files, so use subprocess
p = subprocess.Popen('bzcat -q ' + REDIRECTS_FILE, shell=True, stdout=subprocess.PIPE)
for line in p.stdout:
try:
uri_redirect, predicate, uri_canon = line.split(' ', 2)
except:
continue
name_redirect = urllib.unquote(uri_redirect.replace('<http://dbpedia.org/resource/', '')[:-1])
name_canon = urllib.unquote(uri_canon.replace('<http://dbpedia.org/resource/', '')[:-4])
if '(disambiguation)' in name_redirect:
continue
# skip entities that have no types
if name_canon not in dbpediadb_types:
continue
dbpediadb[name_redirect] = {'types': dbpediadb_types[name_canon]}
dbpediadb.close()
| apache-2.0 | Python |
3270b0013e19ec8d33f33eae051fb08e2c5f1b52 | stop pop code ... placeholder | OpenTransitTools/otp_client_py | ott/otp_client/pyramid/views.py | ott/otp_client/pyramid/views.py | from pyramid.response import Response
from pyramid.view import view_config
from ott.otp_client.transit_index.routes import Routes
from ott.otp_client.transit_index.stops import Stops
import logging
log = logging.getLogger(__file__)
cache_long=5555
def do_view_config(cfg):
cfg.add_route('stops', '/stops')
cfg.add_route('routes', '/routes')
cfg.add_route('stop_routes', '/stops/{stop}/routes')
@view_config(route_name='stops', renderer='json', http_cache=cache_long)
def stops(request):
# if request contains 'radius' and lat and lon:
ret_val = Stops.nearest_stops(2, 3, 5)
# elif request contains ....
ret_val = Stops.bbox_stops(1, 3, 5, 4)
return ret_val
@view_config(route_name='routes', renderer='json', http_cache=cache_long)
def routes(request):
ret_val = Routes.routes_factory()
return ret_val
@view_config(route_name='stop_routes', renderer='json', http_cache=cache_long)
def stop_routes(request):
ret_val = []
try:
stop = request.matchdict['stop']
agency_id, stop_id = stop.split(':')
ret_val = Routes.stop_routes_factory(agency_id, stop_id)
except Exception as e:
log.warn(e)
return ret_val
| from pyramid.response import Response
from pyramid.view import view_config
from ott.otp_client.transit_index.routes import Routes
from ott.otp_client.transit_index.stops import Stops
import logging
log = logging.getLogger(__file__)
cache_long=5555
def do_view_config(cfg):
cfg.add_route('stops', '/stops')
cfg.add_route('routes', '/routes')
cfg.add_route('stop_routes', '/stops/{stop}/routes')
@view_config(route_name='stops', renderer='json', http_cache=cache_long)
def stops(request):
ret_val = Stops.nearest_stops(2, 3, 5.5)
return ret_val
@view_config(route_name='routes', renderer='json', http_cache=cache_long)
def routes(request):
ret_val = Routes.routes_factory()
return ret_val
@view_config(route_name='stop_routes', renderer='json', http_cache=cache_long)
def stop_routes(request):
ret_val = []
try:
stop = request.matchdict['stop']
agency_id, stop_id = stop.split(':')
ret_val = Routes.stop_routes_factory(agency_id, stop_id)
except Exception as e:
log.warn(e)
return ret_val
| mpl-2.0 | Python |
41ea993a2bd795e4a2aa9b054f0820664a34573e | Test fixes | Vanuan/mock,5monkeys/mock | tests/testcallable.py | tests/testcallable.py | # Copyright (C) 2007-2011 Michael Foord & the mock team
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# http://www.voidspace.org.uk/python/mock/
from tests.support import unittest2
from mock import Mock, MagicMock, NonCallableMagicMock, NonCallableMock
class TestCallable(unittest2.TestCase):
def test_non_callable(self):
for mock in NonCallableMagicMock(), NonCallableMock():
self.assertRaises(TypeError, mock)
self.assertFalse(hasattr(mock, '__call__'))
def test_attributes(self):
one = NonCallableMock()
self.assertTrue(issubclass(type(one.one), Mock))
two = NonCallableMagicMock()
self.assertTrue(issubclass(type(two.two), MagicMock))
def test_subclasses(self):
class MockSub(Mock):
pass
one = MockSub()
self.assertTrue(issubclass(type(one.one), MockSub))
class MagicSub(MagicMock):
pass
two = MagicSub()
self.assertTrue(issubclass(type(two.two), MagicSub))
| # Copyright (C) 2007-2011 Michael Foord & the mock team
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# http://www.voidspace.org.uk/python/mock/
from tests.support import unittest2
from mock import Mock, MagicMock, NonCallableMagicMock, NonCallableMock
class TestCallable(unittest2.TestCase):
def test_non_callable(self):
for mock in NonCallableMagicMock(), NonCallableMock():
self.assertRaises(TypeError, mock())
self.assertFalse(hasattr(mock, '__call__'))
def test_attributes(self):
one = NonCallableMock()
self.assertTrue(issubclass(type(one.one), Mock))
two = NonCallableMagicMock()
self.assertTrue(issubclass(type(two.two), MagicMock))
def test_side_effect_return_value(self):
for mock in NonCallableMagicMock(), NonCallableMock():
for attr in 'side_effect', 'return_value':
self.assertRaises(TypeError, getattr, mock, attr)
self.assertRaises(TypeError, setattr, mock, attr, 'foo')
def test_subclasses(self):
class MockSub(Mock):
pass
one = MockSub()
self.assertTrue(issubclass(type(one.one), MockSub))
class MagicSub(MagicMock):
pass
two = MagicSub()
self.assertTrue(issubclass(type(two.two), MagicSub))
| bsd-2-clause | Python |
831eeb1ea0503782eb8ed08170776a1c2c0dbdeb | Fix hashing bug in createUser arguments in travis-setup | ollien/Timpani,ollien/Timpani,ollien/Timpani | tests/travis-setup.py | tests/travis-setup.py | import bcrypt
import sys
sys.path.insert(0, "..")
import timpani
connection = timpani.database.DatabaseConnection()
timpani.database.ConnectionManager.addConnection(connection, "main")
timpani.auth.createUser("tests", "Timpani Tests", "password", True, True)
connection.close()
| import bcrypt
import sys
sys.path.insert(0, "..")
import timpani
connection = timpani.database.DatabaseConnection()
timpani.database.ConnectionManager.addConnection(connection, "main")
hashedPassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
timpani.auth.createUser("tests", "Timpani Tests", hashedPassword, True, True)
connection.close()
| mit | Python |
b916b702419a90f26ae18b5ac31bda3972d98832 | Bump version | dianchen96/gym,machinaut/gym,Farama-Foundation/Gymnasium,dianchen96/gym,Farama-Foundation/Gymnasium,d1hotpep/openai_gym,machinaut/gym,d1hotpep/openai_gym | gym/version.py | gym/version.py | VERSION = '0.1.1'
| VERSION = '0.1.0'
| mit | Python |
6f24aa5e1e1ff78e95ed17ff75acc2646280bdd8 | Add None identifier / repr print | puhitaku/typedmarshal | typedmarshal/util.py | typedmarshal/util.py | def pretty_print_recursive(obj, indent=0):
def i_print(s):
print(' ' * indent + s)
if obj is None:
i_print('None')
elif isinstance(obj, (int, float, str)):
i_print(f'{obj}')
elif isinstance(obj, list):
for l in obj:
pretty_print_recursive(l, indent=indent+2)
elif isinstance(obj, dict):
for k, v in obj.items():
i_print(f'{k}: {repr(v)}')
else:
for k, v in obj.__dict__.items():
if not k.startswith('_'):
if v is None:
i_print(f'{k}: None')
elif v.__class__.__name__ not in __builtins__:
i_print(f'{k}:')
pretty_print_recursive(v, indent=indent+2)
elif isinstance(v, (list, dict)):
i_print(f'{k}:')
pretty_print_recursive(v, indent=indent)
else:
i_print(f'{k}: {repr(v)}')
| def pretty_print_recursive(obj, indent=0):
def i_print(s):
print(' ' * indent + s)
if obj is None:
i_print('None')
elif isinstance(obj, (int, float, str)):
i_print(f'{obj}')
elif isinstance(obj, list):
for l in obj:
pretty_print_recursive(l, indent=indent+2)
elif isinstance(obj, dict):
for k, v in obj:
i_print(f'{k}: {v}')
else:
for k, v in obj.__dict__.items():
if not k.startswith('_'):
if v.__class__.__name__ not in __builtins__:
i_print(f'{k}:')
pretty_print_recursive(v, indent=indent+2)
elif isinstance(v, (list, dict)):
i_print(f'{k}:')
pretty_print_recursive(v, indent=indent)
else:
i_print(f'{k}: {v}')
| bsd-3-clause | Python |
34cbdc805209b44da099a4c603431b34dc2c99ab | add loginShell | gruunday/useradm,gruunday/useradm,gruunday/useradm | rbopt.py | rbopt.py | #-----------------------------------------------------------------------------#
# MODULE DESCRIPTION #
#-----------------------------------------------------------------------------#
"""RedBrick Options Module; contains RBOpt class."""
#-----------------------------------------------------------------------------#
# DATA #
#-----------------------------------------------------------------------------#
__version__ = '$Revision: 1.4 $'
__author__ = 'Cillian Sharkey'
#-----------------------------------------------------------------------------#
# CLASSES #
#-----------------------------------------------------------------------------#
class RBOpt:
"""Class for storing options to be shared by modules"""
def __init__(self):
"""Create new RBOpt object."""
# Used by all modules.
self.override = None
# Used by useradm, RBUserDB & RBAccount.
self.test = None
# Used by useradm & rrs.
self.mode = None
self.setpasswd = None
# Used by useradm.
self.args = []
self.help = None
self.uid = None
self.dbonly = None
self.aconly = None
self.updatedby = None
self.newbie = None
self.mailuser = None
self.usertype = None
self.cn = None
self.altmail = None
self.id = None
self.course = None
self.year = None
self.yearsPaid = None
self.birthday = None
self.loginShell = None
self.quiet = None
self.rrslog = None
self.presync = None
# Used by rrs.
self.action = None
| #-----------------------------------------------------------------------------#
# MODULE DESCRIPTION #
#-----------------------------------------------------------------------------#
"""RedBrick Options Module; contains RBOpt class."""
#-----------------------------------------------------------------------------#
# DATA #
#-----------------------------------------------------------------------------#
__version__ = '$Revision: 1.3 $'
__author__ = 'Cillian Sharkey'
#-----------------------------------------------------------------------------#
# CLASSES #
#-----------------------------------------------------------------------------#
class RBOpt:
"""Class for storing options to be shared by modules"""
def __init__(self):
"""Create new RBOpt object."""
# Used by all modules.
self.override = None
# Used by useradm, RBUserDB & RBAccount.
self.test = None
# Used by useradm & rrs.
self.mode = None
self.setpasswd = None
# Used by useradm.
self.args = []
self.help = None
self.uid = None
self.dbonly = None
self.aconly = None
self.updatedby = None
self.newbie = None
self.mailuser = None
self.usertype = None
self.cn = None
self.altmail = None
self.id = None
self.course = None
self.year = None
self.yearsPaid = None
self.birthday = None
self.quiet = None
self.rrslog = None
self.presync = None
# Used by rrs.
self.action = None
| unlicense | Python |
d96770a6702379881d47582d954595cc155a40b3 | add ability to probe VPC | smithfarm/ceph-auto-aws,smithfarm/ceph-auto-aws | handson/aws.py | handson/aws.py | # -*- mode: python; coding: utf-8 -*-
#
# Copyright (c) 2016, SUSE LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of ceph-auto-aws nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from boto import connect_ec2, ec2, vpc
from handson.myyaml import myyaml
_ss = {} # saved state
class AWS(object):
def ping_ec2(self):
connect_ec2()
def ec2(self):
tree = myyaml.tree()
if 'ec2' not in _ss:
_ss['ec2'] = ec2.connect_to_region(tree['region'])
return _ss['ec2']
def vpc(self):
tree = myyaml.tree()
if 'vpc' not in _ss:
_ss['vpc'] = vpc.connect_to_region(tree['region'])
return _ss['vpc']
def vpc_obj(self):
if 'vpc_obj' in _ss:
return _ss['vpc_obj']
tree = myyaml.tree()
vpc = self.vpc()
if 'id' in tree['vpc']:
vpc_id = tree['vpc']['id']
vpc_list = vpc.get_all_vpcs(vpc_ids=vpc_id)
_ss['vpc_obj'] = vpc_list[0]
return _ss['vpc_obj']
# create a new 10.0.0.0/16
_ss['vpc_obj'] = vpc.create_vpc('10.0.0.0/16')
tree['vpc']['id'] = _ss['vpc_obj'].id
tree['vpc']['cidr_block'] = _ss['vpc_obj'].cidr_block
myyaml.write()
return _ss['vpc_obj']
aws = AWS()
| # -*- mode: python; coding: utf-8 -*-
#
# Copyright (c) 2016, SUSE LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of ceph-auto-aws nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import boto
from handson.myyaml import myyaml
_ss = {} # saved state
class AWS(object):
def ping_ec2(self):
boto.connect_ec2()
def ec2(self):
tree = myyaml.tree()
if 'ec2' not in _ss:
_ss['ec2'] = boto.ec2.connect_to_region(tree['region'])
return _ss['ec2']
def vpc(self):
tree = myyaml.tree()
if 'vpc' not in _ss:
_ss['vpc'] = boto.vpn.connect_to_region(tree['region'])
return _ss['vpc']
aws = AWS()
| bsd-3-clause | Python |
488ea145b1f1179bb5bac21959cfaa4b32ec8d0f | add a download_url to setup | argriffing/hmmus,argriffing/hmmus,argriffing/hmmus | hmmus/setup.py | hmmus/setup.py | """
This is the setup script.
This script is automatically run by
easy_install or by pip on the user's machine when
she installs the module from pypi.
Here is some documentation for this process.
http://docs.python.org/extending/building.html
More info:
http://wiki.python.org/moin/Distutils/Tutorial
Register the metadata with pypi as follows:
python setup.py register
Send to pypi as follows:
python setup.py sdist upload --show-response
"""
from distutils.core import setup
from distutils.core import Extension
import os
myversion_tuple = (0, 1, 4)
myversion = '.'.join(str(x) for x in myversion_tuple)
hummusc = Extension(
name='hmmusc',
sources=['hmmuscmodule.c', 'hmmguts/hmmguts.c'])
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6',
'Programming Language :: C',
'Programming Language :: Unix Shell',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Libraries :: Python Modules']
scripts = [
'bin/create-example-likelihoods-a.py',
'bin/create-example-likelihoods-b.py',
'bin/create-example-likelihoods-c.py',
'bin/view-matrix.py']
download_url_first = 'http://pypi.python.org/packages/source'
download_url_rest = 'h/hmmus/hmmus-' + myversion + '.tar.gz'
download_url = os.path.join(download_url_first, download_url_rest)
setup(
name = 'hmmus',
version = myversion,
author = 'Alex Griffing',
author_email = 'argriffi@ncsu.edu',
maintainer = 'Alex Griffing',
maintainer_email = 'argriffi@ncsu.edu',
url = 'http://github.com/argriffing/hmmus',
download_url = download_url,
description = 'Hidden Markov model stuff',
long_description = open('README').read(),
classifiers = classifiers,
platforms = ['linux'],
license = 'http://www.opensource.org/licenses/mit-license.html',
ext_modules = [hummusc],
packages = ['hmmus'],
scripts = scripts)
| """
This is the setup script.
This script is automatically run by
easy_install or by pip on the user's machine when
she installs the module from pypi.
Here is some documentation for this process.
http://docs.python.org/extending/building.html
More info:
http://wiki.python.org/moin/Distutils/Tutorial
Register the metadata with pypi as follows:
python setup.py register
Send to pypi as follows:
python setup.py sdist upload --show-response
"""
from distutils.core import setup
from distutils.core import Extension
hummusc = Extension(
name='hmmusc',
sources=['hmmuscmodule.c', 'hmmguts/hmmguts.c'])
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6',
'Programming Language :: C',
'Programming Language :: Unix Shell',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Libraries :: Python Modules']
scripts = [
'bin/create-example-likelihoods-a.py',
'bin/create-example-likelihoods-b.py',
'bin/create-example-likelihoods-c.py',
'bin/view-matrix.py']
setup(
name = 'hmmus',
version = '0.1.3',
author = 'Alex Griffing',
author_email = 'argriffi@ncsu.edu',
maintainer = 'Alex Griffing',
maintainer_email = 'argriffi@ncsu.edu',
url = 'http://github.com/argriffing/hmmus',
description = 'Hidden Markov model stuff',
long_description = open('README').read(),
classifiers = classifiers,
platforms = ['linux'],
license = 'http://www.opensource.org/licenses/mit-license.html',
ext_modules = [hummusc],
packages = ['hmmus'],
scripts = scripts)
| mit | Python |
557b97e8424996450e53804a9543629731d5a400 | update call to constructor of `git.GitCommandError` | tulip-control/tulip-control,tulip-control/tulip-control,tulip-control/tulip-control,tulip-control/tulip-control | tests/version_test.py | tests/version_test.py | """Test the management of `tulip.__version__`."""
import imp
import os
import os.path
import git
import mock
from nose.tools import assert_raises
from setuptools.version import pkg_resources
import tulip
import tulip._version
def test_tulip_has_pep440_version():
"""Check that `tulip.__version__` complies to PEP440."""
version = tulip.__version__
assert version is not None, version
version_ = tulip._version.version
assert version == version_, (version, version_)
assert_pep440(version)
@mock.patch('git.Repo')
def test_git_version(mock_repo):
"""Mock `git` repository for testing `setup.git_version`."""
path = os.path.realpath(__file__)
path = os.path.dirname(path)
path = os.path.dirname(path) # parent dir
path = os.path.join(path, 'setup.py')
setup = imp.load_source('setup', path)
# mocking
version = '0.1.2'
instance = mock_repo.return_value
instance.head.commit.hexsha = '0123'
# dirty repo
v = setup.git_version(version)
assert_pep440(v)
assert 'dev' in v, v
assert 'dirty' in v, v
# not dirty, not tagged
instance.is_dirty.return_value = False
instance.git.describe.side_effect = git.GitCommandError('0', 0)
v = setup.git_version(version)
assert_pep440(v)
assert 'dev' in v, v
assert 'dirty' not in v, v
# tagged as version that matches `setup.py`
instance.git.describe.side_effect = None
instance.git.describe.return_value = 'v0.1.2'
v = setup.git_version(version)
assert_pep440(v)
assert v == '0.1.2', v
# tagged as wrong version
instance.git.describe.return_value = 'v0.1.3'
with assert_raises(AssertionError):
setup.git_version(version)
# release: no repo
mock_repo.side_effect = Exception('no repo found')
with assert_raises(Exception):
setup.git_version(version)
def assert_pep440(version):
"""Raise `AssertionError` if `version` violates PEP440."""
v = pkg_resources.parse_version(version)
assert isinstance(v, pkg_resources.SetuptoolsVersion), v
if __name__ == '__main__':
test_git_version()
| """Test the management of `tulip.__version__`."""
import imp
import os
import os.path
import git
import mock
from nose.tools import assert_raises
from setuptools.version import pkg_resources
import tulip
import tulip._version
def test_tulip_has_pep440_version():
"""Check that `tulip.__version__` complies to PEP440."""
version = tulip.__version__
assert version is not None, version
version_ = tulip._version.version
assert version == version_, (version, version_)
assert_pep440(version)
@mock.patch('git.Repo')
def test_git_version(mock_repo):
"""Mock `git` repository for testing `setup.git_version`."""
path = os.path.realpath(__file__)
path = os.path.dirname(path)
path = os.path.dirname(path) # parent dir
path = os.path.join(path, 'setup.py')
setup = imp.load_source('setup', path)
# mocking
version = '0.1.2'
instance = mock_repo.return_value
instance.head.commit.hexsha = '0123'
# dirty repo
v = setup.git_version(version)
assert_pep440(v)
assert 'dev' in v, v
assert 'dirty' in v, v
# not dirty, not tagged
instance.is_dirty.return_value = False
instance.git.describe.side_effect = git.GitCommandError(0, 0)
v = setup.git_version(version)
assert_pep440(v)
assert 'dev' in v, v
assert 'dirty' not in v, v
# tagged as version that matches `setup.py`
instance.git.describe.side_effect = None
instance.git.describe.return_value = 'v0.1.2'
v = setup.git_version(version)
assert_pep440(v)
assert v == '0.1.2', v
# tagged as wrong version
instance.git.describe.return_value = 'v0.1.3'
with assert_raises(AssertionError):
setup.git_version(version)
# release: no repo
mock_repo.side_effect = Exception('no repo found')
with assert_raises(Exception):
setup.git_version(version)
def assert_pep440(version):
"""Raise `AssertionError` if `version` violates PEP440."""
v = pkg_resources.parse_version(version)
assert isinstance(v, pkg_resources.SetuptoolsVersion), v
if __name__ == '__main__':
test_git_version()
| bsd-3-clause | Python |
689781f7adba3b7ec6df8fa0e84547610f5eb3e4 | test that viewset calls utm_zones_for_representing with correctly constructed geometry | geometalab/drf-utm-zone-info,geometalab/drf-utm-zone-info | tests/viewset_test.py | tests/viewset_test.py | import pytest
from django.contrib.gis.geos import Polygon
from rest_framework import status
from rest_framework.reverse import reverse
from rest_framework.test import APIClient
import utm_zone_info
def test_posting_valid_data_returns_utm_zones(mocker, api_client, utm_zone_post_url, payload):
utm_zone_mock = mocker.Mock()
utm_zone_mock.srid = 123456
mocker.patch('utm_zone_info.viewsets.utm_zones_for_representing', return_value=[utm_zone_mock])
post_viewset_result = api_client.post(utm_zone_post_url, payload, format='json')
utm_zone_info.viewsets.utm_zones_for_representing.assert_called_once_with(Polygon(*_valid_geoJSON['coordinates']))
args, kwargs = utm_zone_info.viewsets.utm_zones_for_representing.call_args
assert args[0].srid == 4326
expected_result = {'utm_zone_srids': [utm_zone_mock.srid]}
assert post_viewset_result.status_code == status.HTTP_200_OK
assert post_viewset_result.data == expected_result
def test_posting_invalid_data_returns_error(api_client, utm_zone_post_url, invalid_payload):
post_viewset_result = api_client.post(utm_zone_post_url, invalid_payload, format='json')
assert post_viewset_result.status_code == status.HTTP_400_BAD_REQUEST
_valid_geoJSON = {
"type": "Polygon",
"coordinates": [
[
[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]
]
]
}
@pytest.fixture(params=[
dict(),
dict(geom=''),
dict(srid=None),
dict(geom=None, srid=None),
dict(geom='', srid=None),
dict(geom=None, srid=''),
dict(geom='', srid=4326),
dict(geom=_valid_geoJSON, srid=''),
dict(geom={key: value for key, value in _valid_geoJSON.items() if key != 'type'}, srid=4326),
dict(geom={key: value for key, value in _valid_geoJSON.items() if key != 'coordinates'}, srid=4326),
])
def invalid_payload(request):
return request.param
@pytest.fixture(params=[
dict(geom=_valid_geoJSON, srid=4326)
])
def payload(request):
return request.param
@pytest.fixture
def api_client():
return APIClient(enforce_csrf_checks=True)
@pytest.fixture
def utm_zone_post_url():
return reverse('utm_zone_info:utm_zone_info-list')
| import pytest
from rest_framework import status
from rest_framework.reverse import reverse
from rest_framework.test import APIClient
def test_posting_valid_data_returns_utm_zones(mocker, api_client, utm_zone_post_url, payload):
utm_zone_mock = mocker.Mock()
utm_zone_mock.srid = 123456
mocker.patch('utm_zone_info.viewsets.utm_zones_for_representing', return_value=[utm_zone_mock])
post_viewset_result = api_client.post(utm_zone_post_url, payload, format='json')
expected_result = {'utm_zone_srids': [utm_zone_mock.srid]}
assert post_viewset_result.status_code == status.HTTP_200_OK
assert post_viewset_result.data == expected_result
def test_posting_invalid_data_returns_error(api_client, utm_zone_post_url, invalid_payload):
post_viewset_result = api_client.post(utm_zone_post_url, invalid_payload, format='json')
assert post_viewset_result.status_code == status.HTTP_400_BAD_REQUEST
_valid_geoJSON = {
"type": "Polygon",
"coordinates": [
[
[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]
]
]
}
@pytest.fixture(params=[
dict(),
dict(geom=''),
dict(srid=None),
dict(geom=None, srid=None),
dict(geom='', srid=None),
dict(geom=None, srid=''),
dict(geom='', srid=4326),
dict(geom=_valid_geoJSON, srid=''),
dict(geom={key: value for key, value in _valid_geoJSON.items() if key != 'type'}, srid=4326),
dict(geom={key: value for key, value in _valid_geoJSON.items() if key != 'coordinates'}, srid=4326),
])
def invalid_payload(request):
return request.param
@pytest.fixture(params=[
dict(geom=_valid_geoJSON, srid=4326)
])
def payload(request):
return request.param
@pytest.fixture
def api_client():
return APIClient(enforce_csrf_checks=True)
@pytest.fixture
def utm_zone_post_url():
return reverse('utm_zone_info:utm_zone_info-list')
| isc | Python |
4c8dd2b074d2c2227729ff0bd87cf60d06e97485 | Extend custom exception and add additional logging | duboviy/misc | retry.py | retry.py | # Helper script with retry utility function
# set logging for `retry` channel
import logging
logger = logging.getLogger('retry')
# Define Exception class for retry
class RetryException(Exception):
DESCRIPTION = "Exception ({}) raised after {} tries."
def __init__(self, exception, max_retry):
self.exception = exception
self.max_retry = max_retry
def __unicode__(self):
return self.DESCRIPTION.format(self.exception, self.max_retry)
def __str__(self):
return self.__unicode__()
# Define retry utility function
def retry(func, max_retry=10):
"""
@param func: The function that needs to be retry
(to pass function with arguments use partial object)
@param max_retry: Maximum retry of `func` function, default is `10`
@return: result of func
@raise: RetryException if retries exceeded than max_retry
"""
for retry in range(1, max_retry + 1):
try:
return func()
except Exception, e:
logger.info('Failed to call {}, in retry({}/{})'.format(func.func,
retry, max_retry))
else:
raise RetryException(e, max_retry)
| # Define retry util function
class RetryException(Exception):
pass
def retry(func, max_retry=10):
"""
@param func: The function that needs to be retry
(to pass function with arguments use partial object)
@param max_retry: Maximum retry of `func` function, default is `10`
@return: result of func
@raise: RetryException if retries exceeded than max_retry
"""
for retry in range(1, max_retry + 1):
try:
return func()
except Exception:
print ('Failed to call {}, in retry({}/{})'.format(func, retry, max_retry))
else:
raise RetryException(max_retry)
| mit | Python |
6e3dac3e54c53fc9d97956f6e051096267facd5a | install new board: atmega8 8Mhz | ponty/confduino | confduino/examples/custom_boards.py | confduino/examples/custom_boards.py | from __future__ import division
from confduino.boardinstall import install_board
from confduino.util import AutoBunch
from entrypoint2 import entrypoint
TEMPL_NAME = '{mcu}@{f_cpu}'
TEMPL_ID = '{mcu}_{f_cpu}'
def format_freq(f):
if f >= 1000000:
f = f / 1000000.0
suffix = 'MHz'
elif f >= 1000:
f = f / 1000.0
suffix = 'kHz'
else:
suffix = 'Hz'
f = ('%f' % f).rstrip('0').rstrip('.')
return f + '' + suffix
@entrypoint
def main(
upload='usbasp',
core='arduino',
replace_existing=True,
):
'install custom boards'
def install(mcu, f_cpu, kbyte):
board = AutoBunch()
board.name = TEMPL_NAME.format(mcu=mcu,
f_cpu=format_freq(f_cpu),
upload=upload)
board_id = TEMPL_ID.format(mcu=mcu,
f_cpu=(f_cpu),
upload=upload)
board.upload.using = upload
board.upload.maximum_size = kbyte * 1024
board.build.mcu = mcu
board.build.f_cpu = str(f_cpu) + 'L'
board.build.core = core
# for 1.0
board.build.variant = 'standard'
install_board(board_id, board, replace_existing=replace_existing)
install('atmega8', 1000000, 8)
install('atmega8', 8000000, 8)
install('atmega8', 12000000, 8)
install('atmega88', 1000000, 8)
install('atmega88', 8000000, 8)
install('atmega88', 12000000, 8)
install('atmega88', 20000000, 8)
install('atmega328p', 20000000, 32)
install('atmega328p', 8000000, 32)
install('atmega328p', 1000000, 32)
| from __future__ import division
from confduino.boardinstall import install_board
from confduino.util import AutoBunch
from entrypoint2 import entrypoint
TEMPL_NAME = '{mcu}@{f_cpu}'
TEMPL_ID = '{mcu}_{f_cpu}'
def format_freq(f):
if f >= 1000000:
f = f / 1000000.0
suffix = 'MHz'
elif f >= 1000:
f = f / 1000.0
suffix = 'kHz'
else:
suffix = 'Hz'
f = ('%f' % f).rstrip('0').rstrip('.')
return f + '' + suffix
@entrypoint
def main(
upload='usbasp',
core='arduino',
replace_existing=True,
):
'install custom boards'
def install(mcu, f_cpu, kbyte):
board = AutoBunch()
board.name = TEMPL_NAME.format(mcu=mcu,
f_cpu=format_freq(f_cpu),
upload=upload)
board_id = TEMPL_ID.format(mcu=mcu,
f_cpu=(f_cpu),
upload=upload)
board.upload.using = upload
board.upload.maximum_size = kbyte * 1024
board.build.mcu = mcu
board.build.f_cpu = str(f_cpu) + 'L'
board.build.core = core
# for 1.0
board.build.variant = 'standard'
install_board(board_id, board, replace_existing=replace_existing)
install('atmega8', 1000000, 8)
install('atmega8', 12000000, 8)
install('atmega88', 1000000, 8)
install('atmega88', 8000000, 8)
install('atmega88', 12000000, 8)
install('atmega88', 20000000, 8)
install('atmega328p', 20000000, 32)
install('atmega328p', 8000000, 32)
install('atmega328p', 1000000, 32)
| bsd-2-clause | Python |
15112f3c3b1eafc178eeecf830fd2c7a47e18f50 | fix close | legnaleurc/wcpan.worker | wcpan/worker/queue.py | wcpan/worker/queue.py | import functools as ft
from typing import Callable
from tornado import queues as tq, locks as tl, ioloop as ti
from wcpan.logger import DEBUG, EXCEPTION
from .task import regular_call, ensure_task, MaybeTask, TerminalTask
class AsyncQueue(object):
def __init__(self, maximum=None):
self._max = 1 if maximum is None else maximum
self._lock = tl.Semaphore(self._max)
self._loop = ti.IOLoop.current()
self._running = False
self._reset()
def start(self):
if self._running:
return
self._loop.add_callback(self._process)
self._running = True
async def stop(self):
if not self._running:
return
self._running = False
task = TerminalTask()
for i in range(self._max):
self._queue.put_nowait(task)
self._end = tl.Event()
await self._end.wait()
self._reset()
def flush(self, filter_: Callable[['Task'], bool]):
q = self._get_internal_queue()
nq = filter(lambda _: not filter_(_), q)
nq = list(nq)
DEBUG('wcpan.worker') << 'flush:' << 'before' << len(q) << 'after' << len(nq)
self._set_internal_queue(nq)
def post(self, task: MaybeTask):
task = ensure_task(task)
self._queue.put_nowait(task)
async def _process(self):
while self._running:
await self._lock.acquire()
task = await self._queue.get()
fn = ft.partial(self._run, task)
self._loop.add_callback(fn)
async def _run(self, task):
try:
if isinstance(task, TerminalTask):
return
else:
await regular_call(task)
except Exception as e:
EXCEPTION('wcpan.worker', e) << 'uncaught exception'
finally:
self._queue.task_done()
self._lock.release()
if self._end and self._lock._value == self._max:
self._end.set()
def _reset(self):
self._queue = tq.PriorityQueue()
self._end = None
def _get_internal_queue(self):
return self._queue._queue
def _set_internal_queue(self, nq):
self._queue._queue = nq
| import functools as ft
from typing import Callable
from tornado import queues as tq, locks as tl, ioloop as ti
from wcpan.logger import DEBUG, EXCEPTION
from .task import regular_call, ensure_task, MaybeTask, TerminalTask
class AsyncQueue(object):
def __init__(self, maximum=None):
self._max = 1 if maximum is None else maximum
self._lock = tl.Semaphore(self._max)
self._loop = ti.IOLoop.current()
self._running = False
self._reset()
def start(self):
if self._running:
return
self._loop.add_callback(self._process)
self._running = True
async def stop(self):
task = TerminalTask()
self._running = False
for i in range(self._max):
self._queue.put_nowait(task)
self._end = tl.Event()
await self._end.wait()
self._reset()
def flush(self, filter_: Callable[['Task'], bool]):
q = self._get_internal_queue()
nq = filter(lambda _: not filter_(_), q)
nq = list(nq)
DEBUG('wcpan.worker') << 'flush:' << 'before' << len(q) << 'after' << len(nq)
self._set_internal_queue(nq)
def post(self, task: MaybeTask):
task = ensure_task(task)
self._queue.put_nowait(task)
async def _process(self):
while self._running:
await self._lock.acquire()
task = await self._queue.get()
fn = ft.partial(self._run, task)
self._loop.add_callback(fn)
async def _run(self, task):
try:
if isinstance(task, TerminalTask):
return
else:
await regular_call(task)
except Exception as e:
EXCEPTION('wcpan.worker', e) << 'uncaught exception'
finally:
self._queue.task_done()
self._lock.release()
if self._end and self._lock._value == self._max:
self._end.set()
def _reset(self):
self._queue = tq.PriorityQueue()
self._end = None
def _get_internal_queue(self):
return self._queue._queue
def _set_internal_queue(self, nq):
self._queue._queue = nq
| mit | Python |
86d8a4ff50881840d0b9d8581b3049844273cdb0 | Fix encoding error with path to ssh | thaim/ansible,thaim/ansible | lib/ansible/utils/ssh_functions.py | lib/ansible/utils/ssh_functions.py | # (c) 2016, James Tanner
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import subprocess
from ansible.module_utils._text import to_bytes
_HAS_CONTROLPERSIST = {}
def check_for_controlpersist(ssh_executable):
try:
# If we've already checked this executable
return _HAS_CONTROLPERSIST[ssh_executable]
except KeyError:
pass
b_ssh_exec = to_bytes(ssh_executable, errors='surrogate_or_strict')
has_cp = True
try:
cmd = subprocess.Popen([b_ssh_exec, '-o', 'ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
if b"Bad configuration option" in err or b"Usage:" in err:
has_cp = False
except OSError:
has_cp = False
_HAS_CONTROLPERSIST[ssh_executable] = has_cp
return has_cp
| # (c) 2016, James Tanner
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import subprocess
_HAS_CONTROLPERSIST = {}
def check_for_controlpersist(ssh_executable):
try:
# If we've already checked this executable
return _HAS_CONTROLPERSIST[ssh_executable]
except KeyError:
pass
has_cp = True
try:
cmd = subprocess.Popen([ssh_executable, '-o', 'ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
if b"Bad configuration option" in err or b"Usage:" in err:
has_cp = False
except OSError:
has_cp = False
_HAS_CONTROLPERSIST[ssh_executable] = has_cp
return has_cp
| mit | Python |
5bcce0a022956a25ca41be8a6325d4bccf68f221 | implement HTTP Date class, which parses and converts HTTP dates | spaceone/httoop,spaceone/httoop,spaceone/httoop | httoop/date.py | httoop/date.py | # -*- coding: utf-8 -*-
"""HTTP date
.. seealso:: :rfc:`2616#section-3.3`
"""
__all__ = ['Date']
from httoop.util import HTTPString
from functools import partial
try:
from email.utils import formatdate, parsedate
formatdate = partial(formatdate, usegmt=True)
except ImportError:
from rfc822 import formatdate, parsedate
from datetime import datetime
import time
# TODO: this is bloatet, do we need it?
class Date(HTTPString):
u"""HTTP Date
.. seealso:: :rfc:`2616#section-3.3`
.. seealso:: :rfc:`2616#section-19.3`
"""
def __init__(self, timeval=None):
u"""
:param timeval:
:type timeval:
either seconds since epoch in float
or a datetime object
or a timetuple
"""
self.http_string, self.datetime, self.timestamp = None, None, None
if timeval is None:
self.datetime = datetime.now()
self.timestamp = time.mktime(self.datetime.timetuple())
elif isinstance(timeval, float):
self.timestamp = timeval
elif isinstance(timeval, tuple):
self.timestamp = time.mktime(**timeval)
elif isinstance(timeval, datetime):
self.datetime = timeval
self.timestamp = time.mktime(self.datetime.timetuple())
def to_timetuple(self):
return parsedate(formatdate(self.timestamp))[:7]
def to_datetime(self):
if self.datetime is None:
self.datetime = datetime.fromtimestamp(self.timestamp)
return self.datetime
def to_unix_timestamp(self):
return self.timestamp
def to_http_string(self):
if self.http_string is None:
self.http_string = formatdate(self.to_unix_timestamp())
return self.http_string
def __bytes__(self):
return self.to_http_string()
# TODO: implement __cmp__, __int__, (__float__), etc.
@classmethod
def parse(cls, timestr=None):
u"""parses a HTTP date string and returns a :class:`Date` object
:param timestr: the time string in one of the http formats
:type timestr: str
:returns: the HTTP Date object
:rtype : :class:`Date`
:example:
Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
Sunday, 06-Nov-94 08:49:37 GMT ; RFC 850, obsoleted by RFC 1036
Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format
"""
# parse the most common HTTP date format (RFC 2822)
date = parsedate(timestr)
if date is not None:
return cls(date[:7])
# propably invalid here (if email.utils is installed)
# TODO: export locale=C required?
# parse RFC 1036 date format
try:
date = time.strptime(timestr, '%A, %d-%b-%y %H:%M:%S GMT')
except ValueError:
pass
else:
return cls(date)
# parse C's asctime format
# TODO: export locale=C required?
try:
date = time.strptime(timestr, '%a %b %d %H:%M:%S %Y')
except ValueError:
pass
else:
return cls(date)
| # -*- coding: utf-8 -*-
"""HTTP date
.. seealso:: :rfc:`2616#section-3.3`
"""
__all__ = ['Date']
# ripped from cherrypy (http://www.cherrypy.org/) (MIT license)
# TODO: implement the function wrapper thing from circuits.web
try:
from email.utils import formatdate
def Date(timeval=None):
return formatdate(timeval, usegmt=True)
except ImportError:
from rfc822 import formatdate as Date
# TODO: create a class here which can handle every 3 HTTP Date formats (parse and convert from timestamp/datetime)
| mit | Python |
f91ea2b07274391024fb10ec3f699747deacc29e | Update dependency bazelbuild/bazel to latest version | google/copybara,google/copybara,google/copybara | third_party/bazel.bzl | third_party/bazel.bzl | # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is autogenerated by copybara, please do not edit.
bazel_version = "f3604a6c9c433c42f3adfa2bc1551150482aa73e"
bazel_sha256 = "2fb85f0f5620bac7d86f094ce3466869081cc64f2b9aff836320cfc965e73fe1"
| # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is autogenerated by copybara, please do not edit.
bazel_version = "4009b176cb480eb2f16b86f8d05a2e2beb62f61d"
bazel_sha256 = "43d5f2553e51c6e617e060e2f3be73e9fb321fdb0161acf317fbf58ae6952548"
| apache-2.0 | Python |
c65c9ad42584823694f54b0b5d00d2952ae166f3 | Add complete robot figures | arunlakshmanan/robot-trajectory-tracking,arunlakshmanan/robot-trajectory-tracking,arunlakshmanan/robot-trajectory-tracking,arunlakshmanan/robot-trajectory-tracking | pathgen.py | pathgen.py | import socket
import struct
#Room size
room_length = 5.5
room_width = 3.35
scale_img = 120.0
#Robot dimensions
rob_scale = 1.2
rob_rect_x = int(0.15 * scale_img * rob_scale)
rob_rect_y = int(0.2 * scale_img * rob_scale)
rob_trapz_wid = int(0.05 * scale_img * rob_scale)
rob_trapz_side = int(0.07 * scale_img * rob_scale)
rob_wheel_x = int(0.04 * scale_img * rob_scale)
rob_wheel_y = int(0.02 * scale_img * rob_scale)
rob_wheel_pos = 2.0/3.0 #Value between 1 and 0
UDP = "localhost"
PORT = 20000
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
sock.bind((UDP,PORT))
print("\n\nPress ctrl + C to exit\n\n")
pix_x = int(room_width*scale_img)
pix_y = int(room_length*scale_img)
color_bot = {1: color(71,82,224),
2: color(0,255,30),
3: color(255,50,50),
4: color(240,255,0)}
data = []
def setup():
size(pix_x,pix_y)
colorMode(RGB)
frameRate(600)
noStroke()
def draw():
fill(255,255,255)
rectMode(CORNER)
rect(0.0,0.0,width,height)
udprecv, addr = sock.recvfrom(1024)
x,y,yaw,rbid,status,dataRate = struct.unpack('<dddddd',udprecv)
rbid = int(rbid)
if status == 1:
col = [row[3] for row in data]
if (rbid in col) != 1:
data.append([x,y,yaw,rbid])
else:
data[col.index(rbid)] = [x,y,yaw,rbid]
for row in data:
pushMatrix()
translate(row[0],row[1])
rotate(row[2])
draw_robot(row[3])
popMatrix()
def draw_robot(r_id):
stroke(150)
rectMode(CENTER)
fill(color_bot[r_id])
rect(0.0,0.0,rob_rect_x,rob_rect_y)
stroke(50)
fill(255)
beginShape()
vertex(rob_rect_x/2.0,rob_rect_y/2.0)
vertex(rob_rect_x/2.0+rob_trapz_wid,rob_trapz_side/2.0)
vertex(rob_rect_x/2.0+rob_trapz_wid,-rob_trapz_side/2.0)
vertex(rob_rect_x/2.0,-rob_rect_y/2.0)
endShape()
noStroke()
draw_wheel(rob_wheel_pos*rob_rect_x/2.0,rob_rect_y/2+rob_wheel_y/2)
draw_wheel(rob_wheel_pos*rob_rect_x/2.0,-rob_rect_y/2-rob_wheel_y/2)
draw_wheel(-rob_wheel_pos*rob_rect_x/2.0,rob_rect_y/2+rob_wheel_y/2)
draw_wheel(-rob_wheel_pos*rob_rect_x/2.0,-rob_rect_y/2-rob_wheel_y/2)
def draw_wheel(wx,wy):
fill(0)
rectMode(CENTER)
rect(wx,wy,rob_wheel_x,rob_wheel_y)
| import socket
import struct
#Room size
room_length = 5.5
room_width = 3.35
scale_img = 120.0
#Robot dimensions
rob_x = int(0.15 * scale_img)
rob_y = int(0.2 * scale_img)
UDP = "localhost"
PORT = 20000
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
sock.bind((UDP,PORT))
print("\n\nPress ctrl + C to exit\n\n")
pix_x = int(room_width*scale_img)
pix_y = int(room_length*scale_img)
color_bot = {1: color(58,116,197),
2: color(0,255,30),
3: color(220,20,60),
4: color(240,255,0)}
data = []
def setup():
size(pix_x,pix_y)
colorMode(RGB)
noStroke()
frameRate(600)
def draw():
fill(255,255,255)
rectMode(CORNER)
rect(0.0,0.0,width,height)
udprecv, addr = sock.recvfrom(1024)
x,y,yaw,rbid,status,dataRate = struct.unpack('<dddddd',udprecv)
rbid = int(rbid)
if status == 1:
col = [row[3] for row in data]
if (rbid in col) != 1:
data.append([x,y,yaw,rbid])
else:
data[col.index(rbid)] = [x,y,yaw,rbid]
for row in data:
fill(color_bot[row[3]])
translate(row[0],row[1])
rotate(row[2])
rectMode(CENTER)
rect(0,0,rob_x,rob_y)
rotate(-row[2])
translate(-row[0],-row[1])
| apache-2.0 | Python |
b3f00b40ad4e95f500e42cf05fd00a901da14f72 | make sure that timestamps do not have trailing L | hypebeast/etapi,hypebeast/etapi,hypebeast/etapi,hypebeast/etapi | etapi/weather/views.py | etapi/weather/views.py | # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from flask import (Blueprint, request, render_template)
from etapi.lib.helpers import get_timestamps
from etapi.lib.helpers import get_todays_date
from etapi.weather.helpers import get_daily_temperature_series
weather = Blueprint('weather', __name__, url_prefix='/weather',
static_folder="../static")
@weather.route("/")
@weather.route("/<date>")
def index(date=get_todays_date().strftime('%Y-%m-%d')):
try:
current_date = datetime.strptime(date, '%Y-%m-%d')
except ValueError, TypeError:
current_date = get_todays_date()
yesterday = current_date - timedelta(days=1)
tomorrow = current_date + timedelta(days=1)
temp_data = get_daily_temperature_series(current_date)
timestamps_temp = get_timestamps(temp_data)
timestamps_temp = [str(x).rstrip('L') for x in timestamps_temp]
daily_chart_data = [list(x) for x in zip(timestamps_temp, [(float(d.temp or 0 )) for d in temp_data])]
return render_template("weather/weather.html",
today=current_date, yesterday=yesterday, tomorrow=tomorrow,
data=daily_chart_data)
| # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from flask import (Blueprint, request, render_template)
from etapi.lib.helpers import get_timestamps
from etapi.lib.helpers import get_todays_date
from etapi.weather.helpers import get_daily_temperature_series
weather = Blueprint('weather', __name__, url_prefix='/weather',
static_folder="../static")
@weather.route("/")
@weather.route("/<date>")
def index(date=get_todays_date().strftime('%Y-%m-%d')):
try:
current_date = datetime.strptime(date, '%Y-%m-%d')
except ValueError, TypeError:
current_date = get_todays_date()
yesterday = current_date - timedelta(days=1)
tomorrow = current_date + timedelta(days=1)
temp_data = get_daily_temperature_series(current_date)
timestamps_temp = get_timestamps(temp_data)
daily_chart_data = [list(x) for x in zip(timestamps_temp, [(float(d.temp or 0 )) for d in temp_data])]
return render_template("weather/weather.html",
today=current_date, yesterday=yesterday, tomorrow=tomorrow,
data=daily_chart_data)
| bsd-3-clause | Python |
863068145060aa32d464aafab8cf2ebd43d129ca | update dev version to 0.16.0-dev after tagging 0.15.2 | emory-libraries/eulexistdb,emory-libraries/eulexistdb,emory-libraries/eulexistdb | eulexistdb/__init__.py | eulexistdb/__init__.py | # file eulexistdb/__init__.py
#
# Copyright 2010,2011 Emory University Libraries
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interact with `eXist-db`_ XML databases.
This package provides classes to ease interaction with eXist XML databases.
It contains the following modules:
* :mod:`eulexistdb.db` -- Connect to the database and query
* :mod:`eulexistdb.query` -- Query :class:`~eulxml.xmlmap.XmlObject`
models from eXist with semantics like a Django_ QuerySet
.. _eXist-db: http://exist.sourceforge.net/
.. _Django: http://www.djangoproject.com/
"""
__version_info__ = (0, 16, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([ str(i) for i in __version_info__[:-1] ])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
| # file eulexistdb/__init__.py
#
# Copyright 2010,2011 Emory University Libraries
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interact with `eXist-db`_ XML databases.
This package provides classes to ease interaction with eXist XML databases.
It contains the following modules:
* :mod:`eulexistdb.db` -- Connect to the database and query
* :mod:`eulexistdb.query` -- Query :class:`~eulxml.xmlmap.XmlObject`
models from eXist with semantics like a Django_ QuerySet
.. _eXist-db: http://exist.sourceforge.net/
.. _Django: http://www.djangoproject.com/
"""
__version_info__ = (0, 15, 2, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([ str(i) for i in __version_info__[:-1] ])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
| apache-2.0 | Python |
71ccbb5d89406f178e157bcd7aed59dab4c7957a | update version in number in prep for new release | emory-libraries/eulexistdb,emory-libraries/eulexistdb,emory-libraries/eulexistdb | eulexistdb/__init__.py | eulexistdb/__init__.py | # file eulexistdb/__init__.py
#
# Copyright 2010,2011 Emory University Libraries
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interact with `eXist-db`_ XML databases.
This package provides classes to ease interaction with eXist XML databases.
It contains the following modules:
* :mod:`eulexistdb.db` -- Connect to the database and query
* :mod:`eulexistdb.query` -- Query :class:`~eulxml.xmlmap.XmlObject`
models from eXist with semantics like a Django_ QuerySet
.. _eXist-db: http://exist.sourceforge.net/
.. _Django: http://www.djangoproject.com/
"""
__version_info__ = (0, 16, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
| # file eulexistdb/__init__.py
#
# Copyright 2010,2011 Emory University Libraries
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interact with `eXist-db`_ XML databases.
This package provides classes to ease interaction with eXist XML databases.
It contains the following modules:
* :mod:`eulexistdb.db` -- Connect to the database and query
* :mod:`eulexistdb.query` -- Query :class:`~eulxml.xmlmap.XmlObject`
models from eXist with semantics like a Django_ QuerySet
.. _eXist-db: http://exist.sourceforge.net/
.. _Django: http://www.djangoproject.com/
"""
__version_info__ = (0, 16, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([ str(i) for i in __version_info__[:-1] ])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
| apache-2.0 | Python |
dcb369282a40bcc88eefa4771ecc90d79e7a956e | Make sure that the logistic regression does inherit from ClassifierMixin. | equialgo/scikit-learn,vigilv/scikit-learn,jkarnows/scikit-learn,iismd17/scikit-learn,jmetzen/scikit-learn,procoder317/scikit-learn,jlegendary/scikit-learn,moutai/scikit-learn,betatim/scikit-learn,russel1237/scikit-learn,gclenaghan/scikit-learn,zhenv5/scikit-learn,xwolf12/scikit-learn,jjx02230808/project0223,jereze/scikit-learn,frank-tancf/scikit-learn,untom/scikit-learn,adamgreenhall/scikit-learn,lesteve/scikit-learn,thientu/scikit-learn,loli/semisupervisedforests,devanshdalal/scikit-learn,sanketloke/scikit-learn,Garrett-R/scikit-learn,arabenjamin/scikit-learn,nomadcube/scikit-learn,joshloyal/scikit-learn,f3r/scikit-learn,stylianos-kampakis/scikit-learn,billy-inn/scikit-learn,marcocaccin/scikit-learn,mhdella/scikit-learn,potash/scikit-learn,jm-begon/scikit-learn,jorik041/scikit-learn,vivekmishra1991/scikit-learn,huzq/scikit-learn,zaxtax/scikit-learn,heli522/scikit-learn,xuewei4d/scikit-learn,pompiduskus/scikit-learn,ilo10/scikit-learn,spallavolu/scikit-learn,zorojean/scikit-learn,IssamLaradji/scikit-learn,fyffyt/scikit-learn,vivekmishra1991/scikit-learn,loli/sklearn-ensembletrees,shangwuhencc/scikit-learn,nmayorov/scikit-learn,lbishal/scikit-learn,Titan-C/scikit-learn,Myasuka/scikit-learn,fredhusser/scikit-learn,madjelan/scikit-learn,JPFrancoia/scikit-learn,lenovor/scikit-learn,bthirion/scikit-learn,hrjn/scikit-learn,sinhrks/scikit-learn,wlamond/scikit-learn,zaxtax/scikit-learn,jayflo/scikit-learn,UNR-AERIAL/scikit-learn,jzt5132/scikit-learn,alvarofierroclavero/scikit-learn,AlexRobson/scikit-learn,potash/scikit-learn,MartinDelzant/scikit-learn,pypot/scikit-learn,toastedcornflakes/scikit-learn,RayMick/scikit-learn,iismd17/scikit-learn,evgchz/scikit-learn,DSLituiev/scikit-learn,mattgiguere/scikit-learn,sergeyf/scikit-learn,Garrett-R/scikit-learn,cainiaocome/scikit-learn,fengzhyuan/scikit-learn,ssaeger/scikit-learn,justincassidy/scikit-learn,AlexandreAbraham/scikit-learn,sanketloke/scikit-learn,davidgbe/scikit-learn,abimannans/scikit-learn,bthirion/scikit-learn,wazeerzulfikar/scikit-learn,yask123/scikit-learn,elkingtonmcb/scikit-learn,djgagne/scikit-learn,bthirion/scikit-learn,waterponey/scikit-learn,yonglehou/scikit-learn,zihua/scikit-learn,murali-munna/scikit-learn,mblondel/scikit-learn,massmutual/scikit-learn,abhishekgahlot/scikit-learn,btabibian/scikit-learn,manhhomienbienthuy/scikit-learn,tmhm/scikit-learn,Vimos/scikit-learn,ngoix/OCRF,YinongLong/scikit-learn,henridwyer/scikit-learn,BiaDarkia/scikit-learn,ashhher3/scikit-learn,CforED/Machine-Learning,CforED/Machine-Learning,loli/semisupervisedforests,hsiaoyi0504/scikit-learn,lucidfrontier45/scikit-learn,jakirkham/scikit-learn,elkingtonmcb/scikit-learn,huobaowangxi/scikit-learn,kaichogami/scikit-learn,mlyundin/scikit-learn,Jimmy-Morzaria/scikit-learn,RomainBrault/scikit-learn,siutanwong/scikit-learn,quheng/scikit-learn,TomDLT/scikit-learn,sumspr/scikit-learn,larsmans/scikit-learn,andaag/scikit-learn,andaag/scikit-learn,jblackburne/scikit-learn,shikhardb/scikit-learn,JeanKossaifi/scikit-learn,ElDeveloper/scikit-learn,Vimos/scikit-learn,mlyundin/scikit-learn,AlexRobson/scikit-learn,waterponey/scikit-learn,trungnt13/scikit-learn,zhenv5/scikit-learn,qifeigit/scikit-learn,MechCoder/scikit-learn,MartinSavc/scikit-learn,huzq/scikit-learn,rrohan/scikit-learn,florian-f/sklearn,IssamLaradji/scikit-learn,dsullivan7/scikit-learn,idlead/scikit-learn,joshloyal/scikit-learn,ephes/scikit-learn,carrillo/scikit-learn,shenzebang/scikit-learn,etkirsch/scikit-learn,xavierwu/scikit-learn,ChanChiChoi/scikit-learn,tosolveit/scikit-learn,scikit-learn/scikit-learn,yanlend/scikit-learn,vybstat/scikit-learn,rahul-c1/scikit-learn,zorroblue/scikit-learn,ankurankan/scikit-learn,robin-lai/scikit-learn,zorroblue/scikit-learn,wzbozon/scikit-learn,lazywei/scikit-learn,anntzer/scikit-learn,PatrickOReilly/scikit-learn,dhruv13J/scikit-learn,luo66/scikit-learn,pypot/scikit-learn,IshankGulati/scikit-learn,thientu/scikit-learn,anirudhjayaraman/scikit-learn,andrewnc/scikit-learn,walterreade/scikit-learn,glemaitre/scikit-learn,xuewei4d/scikit-learn,zihua/scikit-learn,fabioticconi/scikit-learn,zhenv5/scikit-learn,ngoix/OCRF,aminert/scikit-learn,shenzebang/scikit-learn,evgchz/scikit-learn,ishanic/scikit-learn,murali-munna/scikit-learn,scikit-learn/scikit-learn,jorik041/scikit-learn,sarahgrogan/scikit-learn,kaichogami/scikit-learn,akionakamura/scikit-learn,poryfly/scikit-learn,frank-tancf/scikit-learn,mxjl620/scikit-learn,ldirer/scikit-learn,stylianos-kampakis/scikit-learn,pv/scikit-learn,manhhomienbienthuy/scikit-learn,pythonvietnam/scikit-learn,shangwuhencc/scikit-learn,JsNoNo/scikit-learn,MohammedWasim/scikit-learn,NelisVerhoef/scikit-learn,jpautom/scikit-learn,voxlol/scikit-learn,hitszxp/scikit-learn,zaxtax/scikit-learn,khkaminska/scikit-learn,kagayakidan/scikit-learn,AlexanderFabisch/scikit-learn,Achuth17/scikit-learn,dsullivan7/scikit-learn,giorgiop/scikit-learn,kjung/scikit-learn,aewhatley/scikit-learn,shyamalschandra/scikit-learn,Obus/scikit-learn,rahul-c1/scikit-learn,mrshu/scikit-learn,mfjb/scikit-learn,nikitasingh981/scikit-learn,rvraghav93/scikit-learn,maheshakya/scikit-learn,glouppe/scikit-learn,roxyboy/scikit-learn,liangz0707/scikit-learn,jmschrei/scikit-learn,moutai/scikit-learn,imaculate/scikit-learn,CVML/scikit-learn,krez13/scikit-learn,mugizico/scikit-learn,loli/sklearn-ensembletrees,lenovor/scikit-learn,sgenoud/scikit-learn,Nyker510/scikit-learn,vermouthmjl/scikit-learn,AlexandreAbraham/scikit-learn,eickenberg/scikit-learn,rishikksh20/scikit-learn,raghavrv/scikit-learn,lucidfrontier45/scikit-learn,nmayorov/scikit-learn,cwu2011/scikit-learn,Nyker510/scikit-learn,sinhrks/scikit-learn,jlegendary/scikit-learn,jaidevd/scikit-learn,bikong2/scikit-learn,plissonf/scikit-learn,robin-lai/scikit-learn,q1ang/scikit-learn,fzalkow/scikit-learn,IshankGulati/scikit-learn,ycaihua/scikit-learn,poryfly/scikit-learn,fbagirov/scikit-learn,ky822/scikit-learn,siutanwong/scikit-learn,mjudsp/Tsallis,MatthieuBizien/scikit-learn,belltailjp/scikit-learn,aewhatley/scikit-learn,wzbozon/scikit-learn,robin-lai/scikit-learn,heli522/scikit-learn,kylerbrown/scikit-learn,waterponey/scikit-learn,0asa/scikit-learn,bigdataelephants/scikit-learn,ephes/scikit-learn,themrmax/scikit-learn,olologin/scikit-learn,zaxtax/scikit-learn,arahuja/scikit-learn,RayMick/scikit-learn,terkkila/scikit-learn,eg-zhang/scikit-learn,madjelan/scikit-learn,idlead/scikit-learn,ngoix/OCRF,Barmaley-exe/scikit-learn,f3r/scikit-learn,mrshu/scikit-learn,mfjb/scikit-learn,betatim/scikit-learn,bhargav/scikit-learn,andrewnc/scikit-learn,russel1237/scikit-learn,fbagirov/scikit-learn,pianomania/scikit-learn,h2educ/scikit-learn,xyguo/scikit-learn,voxlol/scikit-learn,macks22/scikit-learn,ssaeger/scikit-learn,Vimos/scikit-learn,ltiao/scikit-learn,akionakamura/scikit-learn,gclenaghan/scikit-learn,robbymeals/scikit-learn,bhargav/scikit-learn,phdowling/scikit-learn,macks22/scikit-learn,abhishekgahlot/scikit-learn,heli522/scikit-learn,ChanderG/scikit-learn,rvraghav93/scikit-learn,mwv/scikit-learn,qifeigit/scikit-learn,q1ang/scikit-learn,mojoboss/scikit-learn,jblackburne/scikit-learn,vortex-ape/scikit-learn,RachitKansal/scikit-learn,pompiduskus/scikit-learn,jorge2703/scikit-learn,sonnyhu/scikit-learn,q1ang/scikit-learn,andaag/scikit-learn,rexshihaoren/scikit-learn,kashif/scikit-learn,abimannans/scikit-learn,Barmaley-exe/scikit-learn,massmutual/scikit-learn,joernhees/scikit-learn,nrhine1/scikit-learn,larsmans/scikit-learn,RomainBrault/scikit-learn,zorojean/scikit-learn,abimannans/scikit-learn,huzq/scikit-learn,costypetrisor/scikit-learn,cl4rke/scikit-learn,r-mart/scikit-learn,MartinDelzant/scikit-learn,rrohan/scikit-learn,victorbergelin/scikit-learn,alexsavio/scikit-learn,quheng/scikit-learn,NelisVerhoef/scikit-learn,ilyes14/scikit-learn,pkruskal/scikit-learn,mojoboss/scikit-learn,altairpearl/scikit-learn,treycausey/scikit-learn,OshynSong/scikit-learn,rahul-c1/scikit-learn,ZENGXH/scikit-learn,eickenberg/scikit-learn,nhejazi/scikit-learn,RachitKansal/scikit-learn,wazeerzulfikar/scikit-learn,jakobworldpeace/scikit-learn,theoryno3/scikit-learn,tomlof/scikit-learn,sgenoud/scikit-learn,etkirsch/scikit-learn,mjudsp/Tsallis,kagayakidan/scikit-learn,henrykironde/scikit-learn,yunfeilu/scikit-learn,jjx02230808/project0223,beepee14/scikit-learn,ChanChiChoi/scikit-learn,fengzhyuan/scikit-learn,samzhang111/scikit-learn,Lawrence-Liu/scikit-learn,sgenoud/scikit-learn,Aasmi/scikit-learn,Achuth17/scikit-learn,jmetzen/scikit-learn,wanggang3333/scikit-learn,ominux/scikit-learn,glemaitre/scikit-learn,costypetrisor/scikit-learn,DSLituiev/scikit-learn,alexeyum/scikit-learn,aminert/scikit-learn,ldirer/scikit-learn,themrmax/scikit-learn,procoder317/scikit-learn,sinhrks/scikit-learn,devanshdalal/scikit-learn,chrsrds/scikit-learn,hdmetor/scikit-learn,0x0all/scikit-learn,bigdataelephants/scikit-learn,PatrickOReilly/scikit-learn,aabadie/scikit-learn,jorik041/scikit-learn,billy-inn/scikit-learn,ashhher3/scikit-learn,ChanChiChoi/scikit-learn,luo66/scikit-learn,mfjb/scikit-learn,mxjl620/scikit-learn,deepesch/scikit-learn,nvoron23/scikit-learn,jblackburne/scikit-learn,anirudhjayaraman/scikit-learn,h2educ/scikit-learn,btabibian/scikit-learn,mojoboss/scikit-learn,toastedcornflakes/scikit-learn,ElDeveloper/scikit-learn,iismd17/scikit-learn,pythonvietnam/scikit-learn,justincassidy/scikit-learn,eg-zhang/scikit-learn,rvraghav93/scikit-learn,Fireblend/scikit-learn,Myasuka/scikit-learn,arjoly/scikit-learn,ldirer/scikit-learn,abhishekgahlot/scikit-learn,untom/scikit-learn,mattilyra/scikit-learn,dhruv13J/scikit-learn,Srisai85/scikit-learn,samzhang111/scikit-learn,cauchycui/scikit-learn,Titan-C/scikit-learn,djgagne/scikit-learn,yonglehou/scikit-learn,davidgbe/scikit-learn,vshtanko/scikit-learn,rexshihaoren/scikit-learn,beepee14/scikit-learn,clemkoa/scikit-learn,saiwing-yeung/scikit-learn,yunfeilu/scikit-learn,glemaitre/scikit-learn,anntzer/scikit-learn,vortex-ape/scikit-learn,smartscheduling/scikit-learn-categorical-tree,herilalaina/scikit-learn,JPFrancoia/scikit-learn,aetilley/scikit-learn,hsiaoyi0504/scikit-learn,lin-credible/scikit-learn,lazywei/scikit-learn,shusenl/scikit-learn,themrmax/scikit-learn,LohithBlaze/scikit-learn,q1ang/scikit-learn,altairpearl/scikit-learn,IndraVikas/scikit-learn,moutai/scikit-learn,MechCoder/scikit-learn,herilalaina/scikit-learn,UNR-AERIAL/scikit-learn,alexsavio/scikit-learn,Clyde-fare/scikit-learn,jzt5132/scikit-learn,wanggang3333/scikit-learn,depet/scikit-learn,lazywei/scikit-learn,nesterione/scikit-learn,fyffyt/scikit-learn,madjelan/scikit-learn,andrewnc/scikit-learn,vortex-ape/scikit-learn,rahul-c1/scikit-learn,billy-inn/scikit-learn,devanshdalal/scikit-learn,dingocuster/scikit-learn,cybernet14/scikit-learn,terkkila/scikit-learn,PatrickChrist/scikit-learn,B3AU/waveTree,Lawrence-Liu/scikit-learn,qifeigit/scikit-learn,0asa/scikit-learn,untom/scikit-learn,Sentient07/scikit-learn,fyffyt/scikit-learn,lucidfrontier45/scikit-learn,bnaul/scikit-learn,pypot/scikit-learn,petosegan/scikit-learn,tawsifkhan/scikit-learn,ClimbsRocks/scikit-learn,clemkoa/scikit-learn,adamgreenhall/scikit-learn,tomlof/scikit-learn,tmhm/scikit-learn,aetilley/scikit-learn,ZenDevelopmentSystems/scikit-learn,LiaoPan/scikit-learn,vigilv/scikit-learn,ElDeveloper/scikit-learn,aabadie/scikit-learn,glennq/scikit-learn,rahuldhote/scikit-learn,hitszxp/scikit-learn,bikong2/scikit-learn,Srisai85/scikit-learn,rsivapr/scikit-learn,abhishekgahlot/scikit-learn,fengzhyuan/scikit-learn,xavierwu/scikit-learn,costypetrisor/scikit-learn,kagayakidan/scikit-learn,aewhatley/scikit-learn,jayflo/scikit-learn,billy-inn/scikit-learn,lenovor/scikit-learn,dsquareindia/scikit-learn,sumspr/scikit-learn,xzh86/scikit-learn,robbymeals/scikit-learn,mrshu/scikit-learn,theoryno3/scikit-learn,sinhrks/scikit-learn,ZenDevelopmentSystems/scikit-learn,krez13/scikit-learn,hugobowne/scikit-learn,jseabold/scikit-learn,meduz/scikit-learn,eg-zhang/scikit-learn,mjgrav2001/scikit-learn,poryfly/scikit-learn,dingocuster/scikit-learn,mjgrav2001/scikit-learn,lbishal/scikit-learn,AIML/scikit-learn,wazeerzulfikar/scikit-learn,Nyker510/scikit-learn,mxjl620/scikit-learn,nelson-liu/scikit-learn,manashmndl/scikit-learn,potash/scikit-learn,siutanwong/scikit-learn,MohammedWasim/scikit-learn,sergeyf/scikit-learn,pnedunuri/scikit-learn,vibhorag/scikit-learn,JeanKossaifi/scikit-learn,kjung/scikit-learn,cauchycui/scikit-learn,theoryno3/scikit-learn,appapantula/scikit-learn,nhejazi/scikit-learn,elkingtonmcb/scikit-learn,mattilyra/scikit-learn,manhhomienbienthuy/scikit-learn,Clyde-fare/scikit-learn,Aasmi/scikit-learn,Sentient07/scikit-learn,carrillo/scikit-learn,hainm/scikit-learn,ngoix/OCRF,Sentient07/scikit-learn,ishanic/scikit-learn,liberatorqjw/scikit-learn,potash/scikit-learn,yask123/scikit-learn,liberatorqjw/scikit-learn,victorbergelin/scikit-learn,yanlend/scikit-learn,alexeyum/scikit-learn,mugizico/scikit-learn,jzt5132/scikit-learn,0asa/scikit-learn,untom/scikit-learn,adamgreenhall/scikit-learn,jorge2703/scikit-learn,mblondel/scikit-learn,espg/scikit-learn,LohithBlaze/scikit-learn,smartscheduling/scikit-learn-categorical-tree,Fireblend/scikit-learn,adamgreenhall/scikit-learn,PatrickChrist/scikit-learn,dsquareindia/scikit-learn,Windy-Ground/scikit-learn,equialgo/scikit-learn,NunoEdgarGub1/scikit-learn,ephes/scikit-learn,shangwuhencc/scikit-learn,cl4rke/scikit-learn,rrohan/scikit-learn,sarahgrogan/scikit-learn,xiaoxiamii/scikit-learn,petosegan/scikit-learn,xyguo/scikit-learn,Windy-Ground/scikit-learn,kevin-intel/scikit-learn,NelisVerhoef/scikit-learn,jmschrei/scikit-learn,xubenben/scikit-learn,rishikksh20/scikit-learn,yyjiang/scikit-learn,deepesch/scikit-learn,mfjb/scikit-learn,djgagne/scikit-learn,thilbern/scikit-learn,petosegan/scikit-learn,fredhusser/scikit-learn,yyjiang/scikit-learn,tdhopper/scikit-learn,tawsifkhan/scikit-learn,ningchi/scikit-learn,etkirsch/scikit-learn,mehdidc/scikit-learn,hrjn/scikit-learn,Lawrence-Liu/scikit-learn,hainm/scikit-learn,kylerbrown/scikit-learn,Srisai85/scikit-learn,belltailjp/scikit-learn,kmike/scikit-learn,B3AU/waveTree,dsquareindia/scikit-learn,jzt5132/scikit-learn,scikit-learn/scikit-learn,nikitasingh981/scikit-learn,icdishb/scikit-learn,glennq/scikit-learn,MohammedWasim/scikit-learn,liyu1990/sklearn,AlexanderFabisch/scikit-learn,anntzer/scikit-learn,saiwing-yeung/scikit-learn,thientu/scikit-learn,ssaeger/scikit-learn,pianomania/scikit-learn,nmayorov/scikit-learn,wzbozon/scikit-learn,xyguo/scikit-learn,vigilv/scikit-learn,loli/sklearn-ensembletrees,anurag313/scikit-learn,ndingwall/scikit-learn,ivannz/scikit-learn,macks22/scikit-learn,nvoron23/scikit-learn,vortex-ape/scikit-learn,CforED/Machine-Learning,kevin-intel/scikit-learn,qifeigit/scikit-learn,sgenoud/scikit-learn,raghavrv/scikit-learn,shahankhatch/scikit-learn,abhishekkrthakur/scikit-learn,walterreade/scikit-learn,kmike/scikit-learn,evgchz/scikit-learn,cainiaocome/scikit-learn,mikebenfield/scikit-learn,mhdella/scikit-learn,kevin-intel/scikit-learn,RomainBrault/scikit-learn,andaag/scikit-learn,wanggang3333/scikit-learn,loli/sklearn-ensembletrees,arjoly/scikit-learn,simon-pepin/scikit-learn,samuel1208/scikit-learn,r-mart/scikit-learn,henrykironde/scikit-learn,r-mart/scikit-learn,CVML/scikit-learn,imaculate/scikit-learn,BiaDarkia/scikit-learn,mehdidc/scikit-learn,chrisburr/scikit-learn,ankurankan/scikit-learn,zuku1985/scikit-learn,shyamalschandra/scikit-learn,spallavolu/scikit-learn,tawsifkhan/scikit-learn,trankmichael/scikit-learn,robin-lai/scikit-learn,zhenv5/scikit-learn,pkruskal/scikit-learn,costypetrisor/scikit-learn,schets/scikit-learn,cl4rke/scikit-learn,sonnyhu/scikit-learn,Jimmy-Morzaria/scikit-learn,LiaoPan/scikit-learn,DonBeo/scikit-learn,AnasGhrab/scikit-learn,fabianp/scikit-learn,AlexandreAbraham/scikit-learn,michigraber/scikit-learn,mayblue9/scikit-learn,davidgbe/scikit-learn,ogrisel/scikit-learn,anurag313/scikit-learn,lbishal/scikit-learn,hdmetor/scikit-learn,depet/scikit-learn,HolgerPeters/scikit-learn,DSLituiev/scikit-learn,maheshakya/scikit-learn,abhishekkrthakur/scikit-learn,IshankGulati/scikit-learn,jmschrei/scikit-learn,trankmichael/scikit-learn,fabioticconi/scikit-learn,hsuantien/scikit-learn,liyu1990/sklearn,samuel1208/scikit-learn,harshaneelhg/scikit-learn,amueller/scikit-learn,fabianp/scikit-learn,justincassidy/scikit-learn,thilbern/scikit-learn,cybernet14/scikit-learn,shahankhatch/scikit-learn,3manuek/scikit-learn,tmhm/scikit-learn,marcocaccin/scikit-learn,mhue/scikit-learn,eg-zhang/scikit-learn,kmike/scikit-learn,cl4rke/scikit-learn,cainiaocome/scikit-learn,PatrickOReilly/scikit-learn,vybstat/scikit-learn,Adai0808/scikit-learn,pratapvardhan/scikit-learn,dsullivan7/scikit-learn,Akshay0724/scikit-learn,raghavrv/scikit-learn,arabenjamin/scikit-learn,JsNoNo/scikit-learn,yonglehou/scikit-learn,rsivapr/scikit-learn,Jimmy-Morzaria/scikit-learn,vibhorag/scikit-learn,hlin117/scikit-learn,meduz/scikit-learn,ngoix/OCRF,mikebenfield/scikit-learn,nhejazi/scikit-learn,petosegan/scikit-learn,khkaminska/scikit-learn,clemkoa/scikit-learn,Djabbz/scikit-learn,nhejazi/scikit-learn,luo66/scikit-learn,anirudhjayaraman/scikit-learn,henridwyer/scikit-learn,chrsrds/scikit-learn,aflaxman/scikit-learn,theoryno3/scikit-learn,PatrickOReilly/scikit-learn,mrshu/scikit-learn,jereze/scikit-learn,jakobworldpeace/scikit-learn,samzhang111/scikit-learn,sarahgrogan/scikit-learn,xzh86/scikit-learn,hlin117/scikit-learn,gotomypc/scikit-learn,vshtanko/scikit-learn,ahoyosid/scikit-learn,sanketloke/scikit-learn,belltailjp/scikit-learn,AnasGhrab/scikit-learn,0x0all/scikit-learn,MatthieuBizien/scikit-learn,Obus/scikit-learn,3manuek/scikit-learn,russel1237/scikit-learn,PrashntS/scikit-learn,mikebenfield/scikit-learn,bthirion/scikit-learn,giorgiop/scikit-learn,PrashntS/scikit-learn,zorroblue/scikit-learn,jm-begon/scikit-learn,etkirsch/scikit-learn,treycausey/scikit-learn,toastedcornflakes/scikit-learn,hsuantien/scikit-learn,aabadie/scikit-learn,ilo10/scikit-learn,bhargav/scikit-learn,sonnyhu/scikit-learn,trungnt13/scikit-learn,akionakamura/scikit-learn,AIML/scikit-learn,espg/scikit-learn,imaculate/scikit-learn,arjoly/scikit-learn,joshloyal/scikit-learn,quheng/scikit-learn,alvarofierroclavero/scikit-learn,AlexandreAbraham/scikit-learn,pkruskal/scikit-learn,carrillo/scikit-learn,lucidfrontier45/scikit-learn,akionakamura/scikit-learn,maheshakya/scikit-learn,kjung/scikit-learn,hsuantien/scikit-learn,amueller/scikit-learn,massmutual/scikit-learn,ycaihua/scikit-learn,fzalkow/scikit-learn,mehdidc/scikit-learn,ky822/scikit-learn,mhdella/scikit-learn,sgenoud/scikit-learn,abimannans/scikit-learn,aetilley/scikit-learn,procoder317/scikit-learn,arabenjamin/scikit-learn,0x0all/scikit-learn,amueller/scikit-learn,toastedcornflakes/scikit-learn,mugizico/scikit-learn,ElDeveloper/scikit-learn,MartinSavc/scikit-learn,Achuth17/scikit-learn,NunoEdgarGub1/scikit-learn,devanshdalal/scikit-learn,OshynSong/scikit-learn,sumspr/scikit-learn,aflaxman/scikit-learn,jkarnows/scikit-learn,mlyundin/scikit-learn,liangz0707/scikit-learn,B3AU/waveTree,rvraghav93/scikit-learn,hsiaoyi0504/scikit-learn,HolgerPeters/scikit-learn,schets/scikit-learn,jayflo/scikit-learn,MechCoder/scikit-learn,jaidevd/scikit-learn,ssaeger/scikit-learn,yanlend/scikit-learn,LohithBlaze/scikit-learn,lesteve/scikit-learn,Adai0808/scikit-learn,BiaDarkia/scikit-learn,pythonvietnam/scikit-learn,fbagirov/scikit-learn,joernhees/scikit-learn,lenovor/scikit-learn,h2educ/scikit-learn,Barmaley-exe/scikit-learn,nmayorov/scikit-learn,anurag313/scikit-learn,nrhine1/scikit-learn,pianomania/scikit-learn,JosmanPS/scikit-learn,Myasuka/scikit-learn,Jimmy-Morzaria/scikit-learn,jereze/scikit-learn,olologin/scikit-learn,Titan-C/scikit-learn,Akshay0724/scikit-learn,nikitasingh981/scikit-learn,trankmichael/scikit-learn,massmutual/scikit-learn,liangz0707/scikit-learn,luo66/scikit-learn,ndingwall/scikit-learn,mblondel/scikit-learn,jjx02230808/project0223,depet/scikit-learn,DonBeo/scikit-learn,trungnt13/scikit-learn,bigdataelephants/scikit-learn,dhruv13J/scikit-learn,JosmanPS/scikit-learn,kagayakidan/scikit-learn,mhue/scikit-learn,liyu1990/sklearn,lesteve/scikit-learn,jlegendary/scikit-learn,PatrickChrist/scikit-learn,ishanic/scikit-learn,DonBeo/scikit-learn,mattilyra/scikit-learn,MechCoder/scikit-learn,loli/sklearn-ensembletrees,saiwing-yeung/scikit-learn,roxyboy/scikit-learn,jpautom/scikit-learn,lesteve/scikit-learn,elkingtonmcb/scikit-learn,fbagirov/scikit-learn,terkkila/scikit-learn,ankurankan/scikit-learn,dsquareindia/scikit-learn,ningchi/scikit-learn,MatthieuBizien/scikit-learn,stylianos-kampakis/scikit-learn,ltiao/scikit-learn,idlead/scikit-learn,aflaxman/scikit-learn,Garrett-R/scikit-learn,mhdella/scikit-learn,rohanp/scikit-learn,bnaul/scikit-learn,tosolveit/scikit-learn,ishanic/scikit-learn,rsivapr/scikit-learn,alexsavio/scikit-learn,shenzebang/scikit-learn,tosolveit/scikit-learn,nelson-liu/scikit-learn,ky822/scikit-learn,JsNoNo/scikit-learn,rrohan/scikit-learn,krez13/scikit-learn,harshaneelhg/scikit-learn,arahuja/scikit-learn,rohanp/scikit-learn,Garrett-R/scikit-learn,pnedunuri/scikit-learn,sanketloke/scikit-learn,466152112/scikit-learn,wlamond/scikit-learn,rajat1994/scikit-learn,themrmax/scikit-learn,mehdidc/scikit-learn,UNR-AERIAL/scikit-learn,tawsifkhan/scikit-learn,tmhm/scikit-learn,cwu2011/scikit-learn,0x0all/scikit-learn,xiaoxiamii/scikit-learn,alexsavio/scikit-learn,pv/scikit-learn,sarahgrogan/scikit-learn,CVML/scikit-learn,ClimbsRocks/scikit-learn,jseabold/scikit-learn,B3AU/waveTree,appapantula/scikit-learn,spallavolu/scikit-learn,zorojean/scikit-learn,yunfeilu/scikit-learn,jblackburne/scikit-learn,xiaoxiamii/scikit-learn,shikhardb/scikit-learn,raghavrv/scikit-learn,AlexanderFabisch/scikit-learn,Adai0808/scikit-learn,ningchi/scikit-learn,macks22/scikit-learn,xavierwu/scikit-learn,samuel1208/scikit-learn,glouppe/scikit-learn,pompiduskus/scikit-learn,plissonf/scikit-learn,tomlof/scikit-learn,shusenl/scikit-learn,mattilyra/scikit-learn,Barmaley-exe/scikit-learn,jmetzen/scikit-learn,NunoEdgarGub1/scikit-learn,RPGOne/scikit-learn,RPGOne/scikit-learn,pianomania/scikit-learn,mjgrav2001/scikit-learn,poryfly/scikit-learn,anirudhjayaraman/scikit-learn,OshynSong/scikit-learn,AnasGhrab/scikit-learn,ankurankan/scikit-learn,treycausey/scikit-learn,dhruv13J/scikit-learn,IndraVikas/scikit-learn,jakobworldpeace/scikit-learn,cauchycui/scikit-learn,ogrisel/scikit-learn,djgagne/scikit-learn,beepee14/scikit-learn,ltiao/scikit-learn,wlamond/scikit-learn,eickenberg/scikit-learn,espg/scikit-learn,hitszxp/scikit-learn,Windy-Ground/scikit-learn,rajat1994/scikit-learn,zorojean/scikit-learn,hrjn/scikit-learn,xzh86/scikit-learn,beepee14/scikit-learn,fabioticconi/scikit-learn,vybstat/scikit-learn,thilbern/scikit-learn,shangwuhencc/scikit-learn,mwv/scikit-learn,Fireblend/scikit-learn,ZENGXH/scikit-learn,kmike/scikit-learn,betatim/scikit-learn,evgchz/scikit-learn,russel1237/scikit-learn,nomadcube/scikit-learn,frank-tancf/scikit-learn,cdegroc/scikit-learn,gclenaghan/scikit-learn,rahuldhote/scikit-learn,tomlof/scikit-learn,Clyde-fare/scikit-learn,rsivapr/scikit-learn,fzalkow/scikit-learn,hdmetor/scikit-learn,mjgrav2001/scikit-learn,IssamLaradji/scikit-learn,vermouthmjl/scikit-learn,pv/scikit-learn,HolgerPeters/scikit-learn,meduz/scikit-learn,alexeyum/scikit-learn,jakirkham/scikit-learn,rahuldhote/scikit-learn,TomDLT/scikit-learn,harshaneelhg/scikit-learn,HolgerPeters/scikit-learn,depet/scikit-learn,glouppe/scikit-learn,madjelan/scikit-learn,evgchz/scikit-learn,ChanderG/scikit-learn,giorgiop/scikit-learn,joernhees/scikit-learn,mwv/scikit-learn,simon-pepin/scikit-learn,voxlol/scikit-learn,Nyker510/scikit-learn,justincassidy/scikit-learn,jakobworldpeace/scikit-learn,huobaowangxi/scikit-learn,Garrett-R/scikit-learn,Aasmi/scikit-learn,ashhher3/scikit-learn,joernhees/scikit-learn,ilyes14/scikit-learn,pnedunuri/scikit-learn,ningchi/scikit-learn,joshloyal/scikit-learn,IndraVikas/scikit-learn,procoder317/scikit-learn,altairpearl/scikit-learn,nelson-liu/scikit-learn,rajat1994/scikit-learn,plissonf/scikit-learn,Obus/scikit-learn,vinayak-mehta/scikit-learn,mayblue9/scikit-learn,kylerbrown/scikit-learn,ndingwall/scikit-learn,Akshay0724/scikit-learn,r-mart/scikit-learn,jorge2703/scikit-learn,eickenberg/scikit-learn,Windy-Ground/scikit-learn,rishikksh20/scikit-learn,MartinDelzant/scikit-learn,Djabbz/scikit-learn,loli/semisupervisedforests,YinongLong/scikit-learn,xyguo/scikit-learn,sonnyhu/scikit-learn,BiaDarkia/scikit-learn,kashif/scikit-learn,cwu2011/scikit-learn,vigilv/scikit-learn,hlin117/scikit-learn,MohammedWasim/scikit-learn,MatthieuBizien/scikit-learn,mjudsp/Tsallis,ogrisel/scikit-learn,tdhopper/scikit-learn,hitszxp/scikit-learn,ltiao/scikit-learn,mjudsp/Tsallis,glouppe/scikit-learn,larsmans/scikit-learn,wlamond/scikit-learn,xuewei4d/scikit-learn,JosmanPS/scikit-learn,lin-credible/scikit-learn,LohithBlaze/scikit-learn,bikong2/scikit-learn,CforED/Machine-Learning,appapantula/scikit-learn,larsmans/scikit-learn,yanlend/scikit-learn,liberatorqjw/scikit-learn,ZENGXH/scikit-learn,tosolveit/scikit-learn,trankmichael/scikit-learn,gclenaghan/scikit-learn,Sentient07/scikit-learn,ldirer/scikit-learn,JeanKossaifi/scikit-learn,michigraber/scikit-learn,henridwyer/scikit-learn,thientu/scikit-learn,rexshihaoren/scikit-learn,fyffyt/scikit-learn,f3r/scikit-learn,0x0all/scikit-learn,liberatorqjw/scikit-learn,arabenjamin/scikit-learn,dsullivan7/scikit-learn,depet/scikit-learn,florian-f/sklearn,nikitasingh981/scikit-learn,jseabold/scikit-learn,vermouthmjl/scikit-learn,JeanKossaifi/scikit-learn,RPGOne/scikit-learn,ominux/scikit-learn,pkruskal/scikit-learn,TomDLT/scikit-learn,shahankhatch/scikit-learn,herilalaina/scikit-learn,rsivapr/scikit-learn,cainiaocome/scikit-learn,CVML/scikit-learn,ilyes14/scikit-learn,rohanp/scikit-learn,anurag313/scikit-learn,MartinSavc/scikit-learn,kaichogami/scikit-learn,h2educ/scikit-learn,voxlol/scikit-learn,vibhorag/scikit-learn,roxyboy/scikit-learn,jpautom/scikit-learn,nesterione/scikit-learn,466152112/scikit-learn,AlexRobson/scikit-learn,maheshakya/scikit-learn,fabianp/scikit-learn,fredhusser/scikit-learn,hsiaoyi0504/scikit-learn,mlyundin/scikit-learn,mhue/scikit-learn,wazeerzulfikar/scikit-learn,bnaul/scikit-learn,aewhatley/scikit-learn,jm-begon/scikit-learn,mattgiguere/scikit-learn,sumspr/scikit-learn,huobaowangxi/scikit-learn,RPGOne/scikit-learn,jmetzen/scikit-learn,IndraVikas/scikit-learn,aetilley/scikit-learn,ChanderG/scikit-learn,shusenl/scikit-learn,robbymeals/scikit-learn,pratapvardhan/scikit-learn,henridwyer/scikit-learn,idlead/scikit-learn,cdegroc/scikit-learn,shyamalschandra/scikit-learn,jjx02230808/project0223,MartinDelzant/scikit-learn,Djabbz/scikit-learn,andrewnc/scikit-learn,manashmndl/scikit-learn,ankurankan/scikit-learn,466152112/scikit-learn,hlin117/scikit-learn,tdhopper/scikit-learn,nelson-liu/scikit-learn,jkarnows/scikit-learn,Djabbz/scikit-learn,giorgiop/scikit-learn,manhhomienbienthuy/scikit-learn,zuku1985/scikit-learn,lbishal/scikit-learn,treycausey/scikit-learn,murali-munna/scikit-learn,walterreade/scikit-learn,mattilyra/scikit-learn,hitszxp/scikit-learn,arahuja/scikit-learn,YinongLong/scikit-learn,xavierwu/scikit-learn,NunoEdgarGub1/scikit-learn,ky822/scikit-learn,liangz0707/scikit-learn,3manuek/scikit-learn,mikebenfield/scikit-learn,michigraber/scikit-learn,cdegroc/scikit-learn,rishikksh20/scikit-learn,nvoron23/scikit-learn,vivekmishra1991/scikit-learn,larsmans/scikit-learn,nvoron23/scikit-learn,kashif/scikit-learn,amueller/scikit-learn,yunfeilu/scikit-learn,marcocaccin/scikit-learn,Achuth17/scikit-learn,huzq/scikit-learn,RayMick/scikit-learn,ahoyosid/scikit-learn,ZenDevelopmentSystems/scikit-learn,wanggang3333/scikit-learn,glennq/scikit-learn,smartscheduling/scikit-learn-categorical-tree,lazywei/scikit-learn,arjoly/scikit-learn,abhishekgahlot/scikit-learn,simon-pepin/scikit-learn,walterreade/scikit-learn,JosmanPS/scikit-learn,cybernet14/scikit-learn,aminert/scikit-learn,ilo10/scikit-learn,ChanChiChoi/scikit-learn,gotomypc/scikit-learn,pompiduskus/scikit-learn,hugobowne/scikit-learn,huobaowangxi/scikit-learn,shikhardb/scikit-learn,manashmndl/scikit-learn,MartinSavc/scikit-learn,tdhopper/scikit-learn,xiaoxiamii/scikit-learn,jorik041/scikit-learn,yyjiang/scikit-learn,florian-f/sklearn,ycaihua/scikit-learn,marcocaccin/scikit-learn,bhargav/scikit-learn,anntzer/scikit-learn,ahoyosid/scikit-learn,jm-begon/scikit-learn,RachitKansal/scikit-learn,TomDLT/scikit-learn,jpautom/scikit-learn,PrashntS/scikit-learn,pv/scikit-learn,phdowling/scikit-learn,gotomypc/scikit-learn,appapantula/scikit-learn,ycaihua/scikit-learn,vermouthmjl/scikit-learn,glemaitre/scikit-learn,LiaoPan/scikit-learn,Clyde-fare/scikit-learn,hainm/scikit-learn,mattgiguere/scikit-learn,vybstat/scikit-learn,ndingwall/scikit-learn,olologin/scikit-learn,aflaxman/scikit-learn,AlexanderFabisch/scikit-learn,hugobowne/scikit-learn,mayblue9/scikit-learn,vshtanko/scikit-learn,yask123/scikit-learn,kjung/scikit-learn,zuku1985/scikit-learn,xwolf12/scikit-learn,quheng/scikit-learn,abhishekkrthakur/scikit-learn,btabibian/scikit-learn,rohanp/scikit-learn,mblondel/scikit-learn,florian-f/sklearn,rexshihaoren/scikit-learn,chrsrds/scikit-learn,cdegroc/scikit-learn,mwv/scikit-learn,lin-credible/scikit-learn,spallavolu/scikit-learn,nesterione/scikit-learn,ilo10/scikit-learn,meduz/scikit-learn,murali-munna/scikit-learn,YinongLong/scikit-learn,plissonf/scikit-learn,xwolf12/scikit-learn,mojoboss/scikit-learn,krez13/scikit-learn,Vimos/scikit-learn,AlexRobson/scikit-learn,xubenben/scikit-learn,ZenDevelopmentSystems/scikit-learn,mattgiguere/scikit-learn,trungnt13/scikit-learn,deepesch/scikit-learn,saiwing-yeung/scikit-learn,zuku1985/scikit-learn,khkaminska/scikit-learn,davidgbe/scikit-learn,icdishb/scikit-learn,Fireblend/scikit-learn,mrshu/scikit-learn,betatim/scikit-learn,chrisburr/scikit-learn,robbymeals/scikit-learn,nrhine1/scikit-learn,JPFrancoia/scikit-learn,chrisburr/scikit-learn,icdishb/scikit-learn,nomadcube/scikit-learn,equialgo/scikit-learn,iismd17/scikit-learn,samuel1208/scikit-learn,alexeyum/scikit-learn,hdmetor/scikit-learn,Obus/scikit-learn,cybernet14/scikit-learn,scikit-learn/scikit-learn,Lawrence-Liu/scikit-learn,bnaul/scikit-learn,Aasmi/scikit-learn,mjudsp/Tsallis,cwu2011/scikit-learn,vinayak-mehta/scikit-learn,harshaneelhg/scikit-learn,chrsrds/scikit-learn,IshankGulati/scikit-learn,ClimbsRocks/scikit-learn,RayMick/scikit-learn,nesterione/scikit-learn,shikhardb/scikit-learn,henrykironde/scikit-learn,PatrickChrist/scikit-learn,eickenberg/scikit-learn,pypot/scikit-learn,jereze/scikit-learn,zihua/scikit-learn,yask123/scikit-learn,deepesch/scikit-learn,NelisVerhoef/scikit-learn,vshtanko/scikit-learn,ivannz/scikit-learn,0asa/scikit-learn,fabianp/scikit-learn,shusenl/scikit-learn,jlegendary/scikit-learn,gotomypc/scikit-learn,ashhher3/scikit-learn,ngoix/OCRF,florian-f/sklearn,michigraber/scikit-learn,ogrisel/scikit-learn,rahuldhote/scikit-learn,466152112/scikit-learn,wzbozon/scikit-learn,alvarofierroclavero/scikit-learn,xwolf12/scikit-learn,JsNoNo/scikit-learn,bikong2/scikit-learn,kmike/scikit-learn,jkarnows/scikit-learn,glennq/scikit-learn,herilalaina/scikit-learn,schets/scikit-learn,waterponey/scikit-learn,fabioticconi/scikit-learn,jaidevd/scikit-learn,UNR-AERIAL/scikit-learn,pythonvietnam/scikit-learn,zorroblue/scikit-learn,AnasGhrab/scikit-learn,B3AU/waveTree,jayflo/scikit-learn,JPFrancoia/scikit-learn,hsuantien/scikit-learn,fredhusser/scikit-learn,phdowling/scikit-learn,loli/semisupervisedforests,kylerbrown/scikit-learn,hugobowne/scikit-learn,siutanwong/scikit-learn,pnedunuri/scikit-learn,bigdataelephants/scikit-learn,clemkoa/scikit-learn,ycaihua/scikit-learn,stylianos-kampakis/scikit-learn,dingocuster/scikit-learn,chrisburr/scikit-learn,mayblue9/scikit-learn,vinayak-mehta/scikit-learn,belltailjp/scikit-learn,f3r/scikit-learn,shenzebang/scikit-learn,ChanderG/scikit-learn,abhishekkrthakur/scikit-learn,simon-pepin/scikit-learn,Titan-C/scikit-learn,phdowling/scikit-learn,jaidevd/scikit-learn,ivannz/scikit-learn,thilbern/scikit-learn,ephes/scikit-learn,Adai0808/scikit-learn,shahankhatch/scikit-learn,hainm/scikit-learn,pratapvardhan/scikit-learn,vibhorag/scikit-learn,lucidfrontier45/scikit-learn,olologin/scikit-learn,schets/scikit-learn,ClimbsRocks/scikit-learn,ominux/scikit-learn,alvarofierroclavero/scikit-learn,kashif/scikit-learn,mxjl620/scikit-learn,liyu1990/sklearn,roxyboy/scikit-learn,zihua/scikit-learn,rajat1994/scikit-learn,yyjiang/scikit-learn,yonglehou/scikit-learn,OshynSong/scikit-learn,ZENGXH/scikit-learn,sergeyf/scikit-learn,nrhine1/scikit-learn,imaculate/scikit-learn,Srisai85/scikit-learn,henrykironde/scikit-learn,cauchycui/scikit-learn,mugizico/scikit-learn,smartscheduling/scikit-learn-categorical-tree,aminert/scikit-learn,hrjn/scikit-learn,vinayak-mehta/scikit-learn,samzhang111/scikit-learn,jorge2703/scikit-learn,nomadcube/scikit-learn,mhue/scikit-learn,RomainBrault/scikit-learn,khkaminska/scikit-learn,ilyes14/scikit-learn,equialgo/scikit-learn,ominux/scikit-learn,IssamLaradji/scikit-learn,xubenben/scikit-learn,moutai/scikit-learn,AIML/scikit-learn,DonBeo/scikit-learn,altairpearl/scikit-learn,3manuek/scikit-learn,jakirkham/scikit-learn,heli522/scikit-learn,RachitKansal/scikit-learn,jakirkham/scikit-learn,espg/scikit-learn,Akshay0724/scikit-learn,ivannz/scikit-learn,lin-credible/scikit-learn,shyamalschandra/scikit-learn,xubenben/scikit-learn,fzalkow/scikit-learn,dingocuster/scikit-learn,aabadie/scikit-learn,maheshakya/scikit-learn,jseabold/scikit-learn,DSLituiev/scikit-learn,terkkila/scikit-learn,frank-tancf/scikit-learn,0asa/scikit-learn,arahuja/scikit-learn,victorbergelin/scikit-learn,vivekmishra1991/scikit-learn,xzh86/scikit-learn,AIML/scikit-learn,btabibian/scikit-learn,LiaoPan/scikit-learn,fengzhyuan/scikit-learn,pratapvardhan/scikit-learn,treycausey/scikit-learn,ahoyosid/scikit-learn,icdishb/scikit-learn,kaichogami/scikit-learn,carrillo/scikit-learn,kevin-intel/scikit-learn,manashmndl/scikit-learn,xuewei4d/scikit-learn,sergeyf/scikit-learn,Myasuka/scikit-learn,PrashntS/scikit-learn,jmschrei/scikit-learn,victorbergelin/scikit-learn | scikits/learn/logistic.py | scikits/learn/logistic.py | import numpy as np
from . import _liblinear
from .base import ClassifierMixin
from .svm import BaseLibLinear
class LogisticRegression(BaseLibLinear, ClassifierMixin):
"""
Logistic Regression.
Implements L1 and L2 regularized logistic regression.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Y : array, shape = [n_samples]
Target vector relative to X
penalty : string, 'l1' or 'l2'
Used to specify the norm used in the penalization
C : float
Specifies the strength of the regularization. The smaller it is
the bigger in the regularization.
intercept : bool, default: True
Specifies if a constant (a.k.a. bias or intercept) should be
added the decision function
Attributes
----------
`coef_` : array, shape = [n_classes-1, n_features]
Coefficient of the features in the decision function.
`intercept_` : array, shape = [n_classes-1]
intercept (a.k.a. bias) added to the decision function.
It is available only when parameter intercept is set to True
Methods
-------
fit(X, Y) : self
Fit the model
predict(X) : array
Predict using the model.
See also
--------
LinearSVC
References
----------
LIBLINEAR -- A Library for Large Linear Classification
http://www.csie.ntu.edu.tw/~cjlin/liblinear/
"""
def __init__(self, penalty='l2', eps=1e-4, C=1.0, has_intercept=True):
super(LogisticRegression, self).__init__ (penalty=penalty, loss='lr',
dual=False, eps=eps, C=C, has_intercept=has_intercept)
def predict_proba(self, T):
T = np.asanyarray(T, dtype=np.float64, order='C')
return _liblinear.predict_prob_wrap(T, self.raw_coef_, self._get_solver_type(),
self.eps, self.C,
self._weight_label,
self._weight, self.label_,
self._get_bias())
| import numpy as np
from . import _liblinear
from .svm import BaseLibLinear
class LogisticRegression(BaseLibLinear):
"""
Logistic Regression.
Implements L1 and L2 regularized logistic regression.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Y : array, shape = [n_samples]
Target vector relative to X
penalty : string, 'l1' or 'l2'
Used to specify the norm used in the penalization
C : float
Specifies the strength of the regularization. The smaller it is
the bigger in the regularization.
intercept : bool, default: True
Specifies if a constant (a.k.a. bias or intercept) should be
added the decision function
Attributes
----------
`coef_` : array, shape = [n_classes-1, n_features]
Coefficient of the features in the decision function.
`intercept_` : array, shape = [n_classes-1]
intercept (a.k.a. bias) added to the decision function.
It is available only when parameter intercept is set to True
Methods
-------
fit(X, Y) : self
Fit the model
predict(X) : array
Predict using the model.
See also
--------
LinearSVC
References
----------
LIBLINEAR -- A Library for Large Linear Classification
http://www.csie.ntu.edu.tw/~cjlin/liblinear/
"""
def __init__(self, penalty='l2', eps=1e-4, C=1.0, has_intercept=True):
super(LogisticRegression, self).__init__ (penalty=penalty, loss='lr',
dual=False, eps=eps, C=C, has_intercept=has_intercept)
def predict_proba(self, T):
T = np.asanyarray(T, dtype=np.float64, order='C')
return _liblinear.predict_prob_wrap(T, self.raw_coef_, self._get_solver_type(),
self.eps, self.C,
self._weight_label,
self._weight, self.label_,
self._get_bias()) | bsd-3-clause | Python |
4e2ed175dd2fe3d5f82f9652564bc2053f80a472 | fix allure attachments | skostya64/Selenium_tasks | pages/admin_panel_login_page.py | pages/admin_panel_login_page.py | from allure.constants import AttachmentType
from selenium.webdriver.support.wait import WebDriverWait
import allure
class AdminPanelLoginPage:
def __init__(self, driver):
self.driver = driver
self.wait = WebDriverWait(driver, 10)
def open(self):
self.driver.get("http://localhost/litecart/admin")
return self
def is_on_this_page(self):
return len(self.driver.find_elements_by_id("box-login")) > 0
def enter_username(self, username):
with allure.MASTER_HELPER.step('Login window'):
allure.MASTER_HELPER.attach('screen', self.driver.get_screenshot_as_png(), type=AttachmentType.PNG)
self.driver.find_element_by_name("username").send_keys(username)
return self
def enter_password(self, password):
self.driver.find_element_by_name("password").send_keys(password)
return self
def submit_login(self):
self.driver.find_element_by_name("login").click()
self.wait.until(lambda d: d.find_element_by_id("box-apps-menu"))
with allure.MASTER_HELPER.step('Main window'):
allure.MASTER_HELPER.attach('screen', self.driver.get_screenshot_as_png(), type=AttachmentType.PNG) | from allure.constants import AttachmentType
from selenium.webdriver.support.wait import WebDriverWait
import allure
class AdminPanelLoginPage:
def __init__(self, driver):
self.driver = driver
self.wait = WebDriverWait(driver, 10)
def open(self):
self.driver.get("http://localhost/litecart/admin")
return self
def is_on_this_page(self):
return len(self.driver.find_elements_by_id("box-login")) > 0
def enter_username(self, username):
with allure.MASTER_HELPER.step('Login window'):
allure.MASTER_HELPER.attach('screen', self.driver.get_screenshot_as_png(), type=AttachmentType.TEXT)
self.driver.find_element_by_name("username").send_keys(username)
return self
def enter_password(self, password):
self.driver.find_element_by_name("password").send_keys(password)
return self
def submit_login(self):
self.driver.find_element_by_name("login").click()
self.wait.until(lambda d: d.find_element_by_id("box-apps-menu"))
with allure.MASTER_HELPER.step('Main window'):
allure.MASTER_HELPER.attach('screen', self.driver.get_screenshot_as_png(), type=AttachmentType.TEXT) | apache-2.0 | Python |
b6c30758642bce1e07129e9b62970b3129de63a2 | use tltd.git.Repo | rciorba/tldt,rciorba/tldt | src/tldt/tldt.py | src/tldt/tldt.py | from __future__ import absolute_import
import contextlib
import ConfigParser
import importlib
import os
import subprocess
from tldt import git
@contextlib.contextmanager
def chdir(dirname):
cwd = os.getcwd()
os.chdir(dirname)
yield
os.chdir(cwd)
class Project(object):
def __init__(self, head_repo, head_sha, base_repo, base_sha,
configuration_path):
self.head_repo = head_repo
self.head_sha = head_sha
self.base_repo = base_repo
self.base_sha = base_sha
self.config = ConfigParser.ConfigParser()
self.config.read(configuration_path)
self.parsers = self.config.items("ActiveParsers")
self.repo = None
# FOOBAR
self.tldt()
def checkout_code(self):
self.repo = git.Repo(self.config.get("repo", "local"))
self.repo.clone_or_update(self.base_repo)
self.repo.fetch(self.head_repo)
self.repo.checkout(self.head_sha)
def setup_environment(self):
with chdir(self.repo.local):
subprocess.check_call(["build/setup_environment"])
def run_tests(self):
with chdir(self.repo.local):
subprocess.check_call(["build/run_tests"])
def run_parsers(self):
for parser_name, parser_module in self.parsers:
try:
module = importlib.import_module(parser_module)
kargs = dict(self.config.items("parser-%s" % parser_name))
module.Parser(**kargs)
except ImportError as e:
print "Could not load '%s' parsing module.\n %r " % (parser_name, e)
def post_results(self):
pass
def tldt(self):
self.checkout_code()
#self.setup_environment()
#self.run_tests()
self.run_parsers()
self.post_results()
main = Project
| import contextlib
import ConfigParser
import importlib
import os
import subprocess
@contextlib.contextmanager
def chdir(dirname):
cwd = os.getcwd()
os.chdir(dirname)
yield
os.chdir(cwd)
class Project(object):
def __init__(self, head_repo, head_sha, base_repo, base_sha,
configuration_path):
self.head_repo = head_repo
self.head_sha = head_sha
self.base_repo = base_repo
self.base_sha = base_sha
self.config = ConfigParser.ConfigParser()
self.config.read(configuration_path)
self.parsers = self.config.items("ActiveParsers")
self.root_dir = None
# FOOBAR
self.tldt()
def checkout_code(self):
self.root_dir = "/path/to/code_chekout"
def setup_environment(self):
with chdir(self.root_dir):
subprocess.check_call(["build/setup_environment"])
def run_tests(self):
with chdir(self.root_dir):
subprocess.check_call(["build/run_tests"])
def run_parsers(self):
for parser_name, parser_module in self.parsers:
try:
module = importlib.import_module(parser_module)
kargs = dict(self.config.items("parser-%s" % parser_name))
module.Parser(**kargs)
except ImportError as e:
print "Could not load '%s' parsing module.\n %r " % (parser_name, e)
def post_results(self):
pass
def tldt(self):
self.checkout_code()
#self.setup_environment()
#self.run_tests()
self.run_parsers()
self.post_results()
main = Project
| unlicense | Python |
b1ea108f8bb78ce862896e26e15d86ab6bff5c9a | remove broken debug print in silent_socket.py | DrDaveD/cvmfs,alhowaidi/cvmfsNDN,alhowaidi/cvmfsNDN,alhowaidi/cvmfsNDN,DrDaveD/cvmfs,cvmfs/cvmfs,Moliholy/cvmfs,alhowaidi/cvmfsNDN,trshaffer/cvmfs,Gangbiao/cvmfs,cvmfs/cvmfs,cvmfs/cvmfs,Gangbiao/cvmfs,reneme/cvmfs,reneme/cvmfs,djw8605/cvmfs,Moliholy/cvmfs,cvmfs/cvmfs,djw8605/cvmfs,trshaffer/cvmfs,Gangbiao/cvmfs,Moliholy/cvmfs,cvmfs/cvmfs,Moliholy/cvmfs,trshaffer/cvmfs,DrDaveD/cvmfs,DrDaveD/cvmfs,djw8605/cvmfs,reneme/cvmfs,Gangbiao/cvmfs,djw8605/cvmfs,reneme/cvmfs,DrDaveD/cvmfs,cvmfs/cvmfs,alhowaidi/cvmfsNDN,trshaffer/cvmfs,Moliholy/cvmfs,DrDaveD/cvmfs,Gangbiao/cvmfs,reneme/cvmfs,djw8605/cvmfs,cvmfs/cvmfs,DrDaveD/cvmfs,trshaffer/cvmfs | test/common/mock_services/silent_socket.py | test/common/mock_services/silent_socket.py | #!/usr/bin/python
import socket
import SocketServer
import sys
import time
import threading
import os
import datetime
def usage():
print >> sys.stderr, "This opens a socket on a given port number and waits for connection."
print >> sys.stderr, "Connecting programs can send but will not receive anything."
print >> sys.stderr, "Usage:" , sys.argv[0] , "<protocol: TCP|UDP> <port number>"
sys.stderr.flush()
sys.exit(1)
print_lock = threading.Lock()
def print_msg(msg):
global print_lock
print_lock.acquire()
print "[Silent Socket]" , msg
print_lock.release()
sys.stdout.flush()
class SilentHandler(SocketServer.BaseRequestHandler):
def handle(self):
print_msg("(" + str(datetime.datetime.now()) + ") incoming connection: " + str(self.client_address))
time.sleep(100000000)
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
pass
class ThreadedUDPServer(SocketServer.ThreadingMixIn, SocketServer.UDPServer):
pass
if len(sys.argv) != 3:
usage()
server_host = 'localhost' # all available interfaces
server_port = 0
server_protocol = sys.argv[1]
try:
server_port = int(sys.argv[2])
except:
usage()
try:
server = ''
if server_protocol == "TCP":
server = ThreadedTCPServer((server_host, server_port), SilentHandler)
elif server_protocol == "UDP":
server = ThreadedUDPServer((server_host, server_port), SilentHandler)
else:
usage()
print_msg("starting a " + server_protocol + " server on port " + str(server_port))
server.serve_forever()
except socket.error, msg:
print_msg("Failed to open port")
print msg
| #!/usr/bin/python
import socket
import SocketServer
import sys
import time
import threading
import os
import datetime
def usage():
print >> sys.stderr, "This opens a socket on a given port number and waits for connection."
print >> sys.stderr, "Connecting programs can send but will not receive anything."
print >> sys.stderr, "Usage:" , sys.argv[0] , "<protocol: TCP|UDP> <port number>"
sys.stderr.flush()
sys.exit(1)
print_lock = threading.Lock()
def print_msg(msg):
global print_lock
print_lock.acquire()
print "[Silent Socket]" , msg
print_lock.release()
sys.stdout.flush()
class SilentHandler(SocketServer.BaseRequestHandler):
def handle(self):
print_msg("(" + str(datetime.datetime.now()) + ") incoming connection: " + str(self.client_address))
data = self.request[0].strip()
print " * " + data;
time.sleep(100000000)
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
pass
class ThreadedUDPServer(SocketServer.ThreadingMixIn, SocketServer.UDPServer):
pass
if len(sys.argv) != 3:
usage()
server_host = 'localhost' # all available interfaces
server_port = 0
server_protocol = sys.argv[1]
try:
server_port = int(sys.argv[2])
except:
usage()
try:
server = ''
if server_protocol == "TCP":
server = ThreadedTCPServer((server_host, server_port), SilentHandler)
elif server_protocol == "UDP":
server = ThreadedUDPServer((server_host, server_port), SilentHandler)
else:
usage()
print_msg("starting a " + server_protocol + " server on port " + str(server_port))
server.serve_forever()
except socket.error, msg:
print_msg("Failed to open port")
print msg
| bsd-3-clause | Python |
8337977e155d22d139059a919c5e3b99637d3218 | Clarify docstring for update_in | Julian-O/toolz,quantopian/toolz,llllllllll/toolz,quantopian/toolz,pombredanne/toolz,whilo/toolz,obmarg/toolz,bartvm/toolz,cpcloud/toolz,obmarg/toolz,machinelearningdeveloper/toolz,karansag/toolz,jdmcbr/toolz,berrytj/toolz,simudream/toolz,jdmcbr/toolz,bartvm/toolz,whilo/toolz,berrytj/toolz,jcrist/toolz,pombredanne/toolz,JNRowe/toolz,karansag/toolz,JNRowe/toolz,simudream/toolz,llllllllll/toolz,cpcloud/toolz,Julian-O/toolz,jcrist/toolz,machinelearningdeveloper/toolz | toolz/dicttoolz/core.py | toolz/dicttoolz/core.py | def merge(*dicts):
""" Merge a collection of dictionaries
>>> merge({1: 'one'}, {2: 'two'})
{1: 'one', 2: 'two'}
Later dictionaries have precedence
>>> merge({1: 2, 3: 4}, {3: 3, 4: 4})
{1: 2, 3: 3, 4: 4}
"""
rv = dict()
for d in dicts:
rv.update(d)
return rv
def valmap(fn, d):
""" Apply function to values of dictionary
>>> bills = {"Alice": [20, 15, 30], "Bob": [10, 35]}
>>> valmap(sum, bills) # doctest: +SKIP
{'Alice': 65, 'Bob': 45}
See Also:
keymap
"""
return dict(zip(d.keys(), map(fn, d.values())))
def keymap(fn, d):
""" Apply function to keys of dictionary
>>> bills = {"Alice": [20, 15, 30], "Bob": [10, 35]}
>>> keymap(str.lower, bills) # doctest: +SKIP
{'alice': [20, 15, 30], 'bob': [10, 35]}
See Also:
valmap
"""
return dict(zip(map(fn, d.keys()), d.values()))
def assoc(d, q, r):
"""
Return a new dict with q: r added in
New dict has d["q"] set to r. Do not modify the initial dict.
>>> assoc({"z": 1}, "z", 2)
{'z': 2}
>>> assoc({"z": 1}, "b", 3) # doctest: +SKIP
{'z': 1, 'b': 3}
"""
return dict(list(d.items()) + [(q, r)])
def update_in(dikt, keys, f):
""" Update value in a (potentially) nested dictionary
keys is a list or tuple or anything which supports indexing, which
gives the "location" of the value in dikt; f is the function which
operates on the value to produce an updated value.
Returns a copy of the original rather than mutating it.
>>> update_in({"x": {"a": 33}}, ["x", "a"], str)
{'x': {'a': '33'}}
"""
assert len(keys) > 0
if len(keys) == 1:
return assoc(dikt, keys[0], f(dikt.get(keys[0], None)))
else:
return assoc(dikt, keys[0], update_in(dikt.get(keys[0], None),
keys[1:], f))
| def merge(*dicts):
""" Merge a collection of dictionaries
>>> merge({1: 'one'}, {2: 'two'})
{1: 'one', 2: 'two'}
Later dictionaries have precedence
>>> merge({1: 2, 3: 4}, {3: 3, 4: 4})
{1: 2, 3: 3, 4: 4}
"""
rv = dict()
for d in dicts:
rv.update(d)
return rv
def valmap(fn, d):
""" Apply function to values of dictionary
>>> bills = {"Alice": [20, 15, 30], "Bob": [10, 35]}
>>> valmap(sum, bills) # doctest: +SKIP
{'Alice': 65, 'Bob': 45}
See Also:
keymap
"""
return dict(zip(d.keys(), map(fn, d.values())))
def keymap(fn, d):
""" Apply function to keys of dictionary
>>> bills = {"Alice": [20, 15, 30], "Bob": [10, 35]}
>>> keymap(str.lower, bills) # doctest: +SKIP
{'alice': [20, 15, 30], 'bob': [10, 35]}
See Also:
valmap
"""
return dict(zip(map(fn, d.keys()), d.values()))
def assoc(d, q, r):
"""
Return a new dict with q: r added in
New dict has d["q"] set to r. Do not modify the initial dict.
>>> assoc({"z": 1}, "z", 2)
{'z': 2}
>>> assoc({"z": 1}, "b", 3) # doctest: +SKIP
{'z': 1, 'b': 3}
"""
return dict(list(d.items()) + [(q, r)])
def update_in(dikt, keys, f):
""" Immutably update value in a (potentially) nested dictionary.
keys is a list or tuple or anything which supports indexing, which
gives the "location" of the value in dikt; f is the function which
operates on the value to produce an updated value.
>>> update_in({"x": {"a": 33}}, ["x", "a"], str)
{'x': {'a': '33'}}
"""
assert len(keys) > 0
if len(keys) == 1:
return assoc(dikt, keys[0], f(dikt.get(keys[0], None)))
else:
return assoc(dikt, keys[0], update_in(dikt.get(keys[0], None),
keys[1:], f))
| bsd-3-clause | Python |
bd8c02201e5daa31bdd5a1ad21cae42d3a260a50 | Add test for File#__iter__ | aldanor/blox | tests/test_file.py | tests/test_file.py | # -*- coding: utf-8 -*-
import io
import py.path
from pytest import raises_regexp
from blox.file import File, is_blox, FORMAT_STRING, FORMAT_VERSION
from blox.utils import write_i64
def test_is_blox(tmpfile):
assert is_blox(tmpfile)
assert not is_blox('/foo/bar/baz')
with io.open(tmpfile, 'wb') as f:
f.write(b'foo')
assert not is_blox(tmpfile)
with io.open(tmpfile, 'wb') as f:
f.write(FORMAT_STRING)
assert not is_blox(tmpfile)
with io.open(tmpfile, 'wb') as f:
f.write(FORMAT_STRING)
write_i64(f, FORMAT_VERSION)
assert is_blox(tmpfile)
class TestFile(object):
def test_mode_writable(self, tmpfile):
raises_regexp(ValueError, 'invalid mode', File, tmpfile, 'foo')
f1 = File(tmpfile)
assert f1.mode == 'r' and not f1.writable
f2 = File(tmpfile, 'w')
assert f2.mode == 'w' and f2.writable
def test_py_path_local(self, tmpfile):
assert File(py.path.local(tmpfile)).filename == tmpfile
def test_filename(self, tmpfile):
raises_regexp(IOError, 'No such file', File, '/foo/bar/baz')
assert File(tmpfile).filename == tmpfile
def test_format_version(self, tmpfile):
assert File(tmpfile).format_version == FORMAT_VERSION
assert File(tmpfile + '.2', mode='w').format_version == FORMAT_VERSION
def test_write_array(self, tmpfile):
raises_regexp(IOError, 'file is not writable',
File(tmpfile).write_array, 'a', [])
def test_iter(self, tmpfile):
f = File(tmpfile, 'w')
f.write_array('c', 10)
f.write_array('a', [1, 2, 3])
f.write_json('b', {'foo': 'bar'})
f.write_json('d', 42)
assert list(f) == ['a', 'b', 'c', 'd']
f.close()
f = File(tmpfile)
assert list(f) == ['a', 'b', 'c', 'd']
| # -*- coding: utf-8 -*-
import io
import py.path
from pytest import raises_regexp
from blox.file import File, is_blox, FORMAT_STRING, FORMAT_VERSION
from blox.utils import write_i64
def test_is_blox(tmpfile):
assert is_blox(tmpfile)
assert not is_blox('/foo/bar/baz')
with io.open(tmpfile, 'wb') as f:
f.write(b'foo')
assert not is_blox(tmpfile)
with io.open(tmpfile, 'wb') as f:
f.write(FORMAT_STRING)
assert not is_blox(tmpfile)
with io.open(tmpfile, 'wb') as f:
f.write(FORMAT_STRING)
write_i64(f, FORMAT_VERSION)
assert is_blox(tmpfile)
class TestFile(object):
def test_mode_writable(self, tmpfile):
raises_regexp(ValueError, 'invalid mode', File, tmpfile, 'foo')
f1 = File(tmpfile)
assert f1.mode == 'r' and not f1.writable
f2 = File(tmpfile, 'w')
assert f2.mode == 'w' and f2.writable
def test_py_path_local(self, tmpfile):
assert File(py.path.local(tmpfile)).filename == tmpfile
def test_filename(self, tmpfile):
raises_regexp(IOError, 'No such file', File, '/foo/bar/baz')
assert File(tmpfile).filename == tmpfile
def test_format_version(self, tmpfile):
assert File(tmpfile).format_version == FORMAT_VERSION
assert File(tmpfile + '.2', mode='w').format_version == FORMAT_VERSION
def test_write_array(self, tmpfile):
raises_regexp(IOError, 'file is not writable',
File(tmpfile).write_array, 'a', [])
| mit | Python |
bd8582b3d335bcbd40c60b799938fa91319e82f9 | Update queue_info.py | PlatformLSF/platform-python-lsf-api,linearregression/platform-python-lsf-api,xlyang0211/platform-python-lsf-api | examples/queue_info.py | examples/queue_info.py | #!/usr/bin/python
from pythonlsf import lsf
def query_queue(queue_name):
"""
"query queue info"
"""
if lsf.lsb_init("test") > 0:
return -1;
intp_num_queues = lsf.new_intp();
lsf.intp_assign(intp_num_queues, 1);
strArr = lsf.new_stringArray(1);
#print lsf.intp_value(intp_num_queues);
lsf.stringArray_setitem(strArr, 0, queue_name);
#print lsf.stringArray_getitem(strArr, 0);
queueInfo = lsf.lsb_queueinfo(strArr,intp_num_queues,None,None,0);
if queueInfo != None:
print 'queueInfo is not null';
else:
print 'queueInfo is null'
return -1;
print 'queue name = %s' % queueInfo.queue;
print 'queue description = %s' % queueInfo.description;
return 0;
if __name__ == '__main__':
print("LSF Clustername is :", lsf.ls_getclustername())
print(query_queue("normal"))
| #!/usr/bin/python
from pythonlsf import lsf
def query_queue(queue_name):
"""
"query queue info"
"""
if lsf.lsb_init("test") > 0:
return -1;
intp_num_queues = lsf.new_intp();
lsf.intp_assign(intp_num_queues, 1);
strArr = lsf.new_stringArray(1);
#print lsf.intp_value(intp_num_queues);
lsf.stringArray_setitem(strArr, 0, queue_name);
#print lsf.stringArray_getitem(strArr, 0);
queueInfo = lsf.lsb_queueinfo(strArr,intp_num_queues,None,None,0);
if queueInfo != None:
print 'queueInfo is not null';
else:
print 'queueInfo is null'
return -1;
print 'queue name = %s' % queueInfo.queue;
print 'queue description = %s' % queueInfo.description;
#job_id = lsf.lsb_submit(submitreq, submitreply)
#return job_id
return 0;
if __name__ == '__main__':
print("LSF Clustername is :", lsf.ls_getclustername())
print(query_queue("normal"))
| epl-1.0 | Python |
3dd70323fc6a9f71e466fd3e9c1e2e929fe27bc4 | create required img field | cpodlesny/lisbon,pfskiev/lisbon,cpodlesny/lisbon,cpodlesny/lisbon,cpodlesny/lisbon,pfskiev/lisbon | src/related_links/models.py | src/related_links/models.py | from django.db import models
class RelatedLink(models.Model):
title_PT = models.CharField(max_length=100, blank=True, null=False)
title_EN = models.CharField(max_length=100, blank=True, null=False)
title_DE = models.CharField(max_length=100, blank=True, null=False)
description_PT = models.TextField(max_length=1000, blank=True, null=False)
description_EN = models.TextField(max_length=1000, blank=True, null=False)
description_DE = models.TextField(max_length=1000, blank=True, null=False)
link = models.URLField(max_length=100, blank=True, null=False)
img = models.ImageField(null=True, blank=False)
keywords_SEO = models.TextField(max_length=1000, blank=True, null=False)
description_SEO = models.TextField(max_length=1000, blank=True, null=False)
def get_absolute_url(self):
return "/related-links/%i/" % self.id
def __str__(self):
return self.title_EN
def __unicode__(self):
return self.title_EN
| from django.db import models
class RelatedLink(models.Model):
title_PT = models.CharField(max_length=100, blank=True, null=False)
title_EN = models.CharField(max_length=100, blank=True, null=False)
title_DE = models.CharField(max_length=100, blank=True, null=False)
description_PT = models.TextField(max_length=1000, blank=True, null=False)
description_EN = models.TextField(max_length=1000, blank=True, null=False)
description_DE = models.TextField(max_length=1000, blank=True, null=False)
link = models.URLField(max_length=100, blank=True, null=False)
img = models.ImageField(null=True, blank=True)
keywords_SEO = models.TextField(max_length=1000, blank=True, null=False)
description_SEO = models.TextField(max_length=1000, blank=True, null=False)
def get_absolute_url(self):
return "/related-links/%i/" % self.id
def __str__(self):
return self.title_EN
def __unicode__(self):
return self.title_EN
| mit | Python |
511bef25eb3e8aae420db21875b7f0e64db1f391 | Format using black and remove comments | tensorflow/cloud,tensorflow/cloud | src/python/tensorflow_cloud/core/tests/examples/multi_file_example/train_model.py | src/python/tensorflow_cloud/core/tests/examples/multi_file_example/train_model.py | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tensorflow as tf
import tensorflow_cloud as tfc
import tensorflow_datasets as tfds
from create_model import create_keras_model
# Download the dataset
datasets, info = tfds.load(name="mnist", with_info=True, as_supervised=True)
mnist_train, mnist_test = datasets["train"], datasets["test"]
# Setup input pipeline
num_train_examples = info.splits["train"].num_examples
num_test_examples = info.splits["test"].num_examples
BUFFER_SIZE = 10000
BATCH_SIZE = 64
def scale(image, label):
image = tf.cast(image, tf.float32)
image /= 255
return image, label
train_dataset = mnist_train.map(scale).cache()
train_dataset = train_dataset.shuffle(BUFFER_SIZE).batch(BATCH_SIZE)
eval_dataset = mnist_test.map(scale).batch(BATCH_SIZE)
model = create_keras_model()
if tfc.remote():
epochs = 10
else:
epochs = 1
model.fit(train_dataset, epochs=epochs)
| # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tensorflow as tf
import tensorflow_cloud as tfc
import tensorflow_datasets as tfds
from create_model import create_keras_model
# Download the dataset
datasets, info = tfds.load(name="mnist", with_info=True, as_supervised=True)
mnist_train, mnist_test = datasets["train"], datasets["test"]
# Setup input pipeline
num_train_examples = info.splits["train"].num_examples
num_test_examples = info.splits["test"].num_examples
BUFFER_SIZE = 10000
BATCH_SIZE = 64
def scale(image, label):
image = tf.cast(image, tf.float32)
image /= 255
return image, label
train_dataset = mnist_train.map(scale).cache()
train_dataset = train_dataset.shuffle(BUFFER_SIZE).batch(BATCH_SIZE)
eval_dataset = mnist_test.map(scale).batch(BATCH_SIZE)
# Load model from separate file
model = create_keras_model()
if tfc.remote():
epochs = 10
else:
epochs = 1
model.fit(train_dataset, epochs=epochs)
| apache-2.0 | Python |
675b1c0933d3078ec0b0c91ed0bddfae7721c0b5 | Remove legend | jvivian/rnaseq-lib,jvivian/rnaseq-lib | src/rnaseq_lib/plot/opts.py | src/rnaseq_lib/plot/opts.py | gene_curves_opts = {
'Curve': {'plot': dict(height=120, width=600, tools=['hover'], invert_xaxis=True, yrotation=45, yaxis='left'),
'style': dict(line_width=1.5)},
'Curve.Percentage_of_Normal_Samples': {'plot': dict(xaxis=None, invert_yaxis=True),
'style': dict(color='Blue')},
'Curve.Gene_Expression': {'plot': dict(xaxis=None),
'style': dict(color='Green')},
'Curve.Log2_Fold_Change': {'plot': dict(height=150),
'style': dict(color='Purple')},
'Scatter': {'style': dict(color='red', size=3)}}
gene_kde_opts = {'Overlay': {'plot': dict(width=500, legend_position='left')}}
gene_distribution_opts = {'BoxWhisker': {'plot': dict(width=875, xrotation=70)}}
gene_de_opts = {
'Scatter': {'plot': dict(color_index='Tissue', legend_position='left', width=700, height=500, tools=['hover']),
'style': dict(cmap='tab20', size=10, alpha=0.5)}}
sample_count_opts = {
'Bars': {'plot': dict(width=875, xrotation=70, tools=['hover'], show_legend=False)}
}
| gene_curves_opts = {
'Curve': {'plot': dict(height=120, width=600, tools=['hover'], invert_xaxis=True, yrotation=45, yaxis='left'),
'style': dict(line_width=1.5)},
'Curve.Percentage_of_Normal_Samples': {'plot': dict(xaxis=None, invert_yaxis=True),
'style': dict(color='Blue')},
'Curve.Gene_Expression': {'plot': dict(xaxis=None),
'style': dict(color='Green')},
'Curve.Log2_Fold_Change': {'plot': dict(height=150),
'style': dict(color='Purple')},
'Scatter': {'style': dict(color='red', size=3)}}
gene_kde_opts = {'Overlay': {'plot': dict(width=500, legend_position='left')}}
gene_distribution_opts = {'BoxWhisker': {'plot': dict(width=875, xrotation=70)}}
gene_de_opts = {
'Scatter': {'plot': dict(color_index='Tissue', legend_position='left', width=700, height=500, tools=['hover']),
'style': dict(cmap='tab20', size=10, alpha=0.5)}}
sample_count_opts = {
'Bars': {'plot': dict(width=875, xrotation=70, tools=['hover'])}
}
| mit | Python |
cd123204c3384bd3847b34452fc125a7e3a6bf47 | Remove excess whitespace. | higgsd/euler,higgsd/euler | py/24.py | py/24.py | # 2783915460
import euler
N = 10
T = 1000000
f = euler.product(xrange(1, N))
v = T - 1
a = []
d = [n for n in xrange(N)]
for n in xrange(N):
x = v / f
v %= f
a.append(d[x])
del d[x]
if n + 1 != N:
f /= N - n - 1
print ''.join([str(x) for x in a])
| # 2783915460
import euler
N = 10
T = 1000000
f = euler.product(xrange(1, N))
v = T - 1
a = []
d = [n for n in xrange(N)]
for n in xrange(N):
x = v / f
v %= f
a.append(d[x])
del d[x]
if n + 1 != N:
f /= N - n - 1
print ''.join([str(x) for x in a])
| bsd-2-clause | Python |
1b8f483e8d32df0de2dd8f798432172d6abd5a84 | Fix non thunk dispatch calls for python 3.4 | ariddell/aioredux | examples/todo_thunk.py | examples/todo_thunk.py | import asyncio
import enum
import logging
import types
import toolz
try:
from types import coroutine
except ImportError:
from asyncio import coroutine
import aioredux
import aioredux.middleware
logger = logging.getLogger(__name__)
# action types
@enum.unique
class ActionTypes(enum.Enum):
ADD_TODO = 1
REMOVE_TODO = 2
COMPLETE_TODO = 3
REQUEST_TODO = 4
# action creators
def add_todo(text):
return {'type': ActionTypes.ADD_TODO, 'text': text}
def complete_todo(index):
return {'type': ActionTypes.COMPLETE_TODO, 'index': index}
def fetch_todo():
'''Exercise thunk middleware.
See https://rackt.github.io/redux/docs/advanced/AsyncActions.html
'''
def thunk(dispatch, state_func=None):
dispatch(add_todo('do task x (from thunk)'))
return thunk
# initial state
initial_state = {
'todos': ()
}
# reducers
def todo_app(state, action):
if action['type'] == ActionTypes.ADD_TODO:
todos = state['todos'] + (action['text'],)
return toolz.assoc(state, 'todos', todos)
elif action['type'] == ActionTypes.COMPLETE_TODO:
todos = state['todos'][:action['index']] + state['todos'][action['index'] + 1:]
return toolz.assoc(state, 'todos', todos)
else:
return state
@coroutine
def run():
thunk_middleware = aioredux.middleware.thunk_middleware
create_store_with_middleware = aioredux.apply_middleware(thunk_middleware)(aioredux.create_store)
store = yield from create_store_with_middleware(todo_app, initial_state)
store.subscribe(lambda: logging.info("new state: {}".format(store.state)))
store.dispatch(fetch_todo())
for i in range(5):
yield from store.dispatch(add_todo('do task {}'.format(i)))
yield from store.dispatch(complete_todo(1))
yield from store.dispatch(complete_todo(2))
logging.info('Finished')
if __name__ == '__main__':
# configure logging
logging.basicConfig(level=logging.INFO)
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
| import asyncio
import enum
import logging
import types
import toolz
try:
from types import coroutine
except ImportError:
from asyncio import coroutine
import aioredux
import aioredux.middleware
logger = logging.getLogger(__name__)
# action types
@enum.unique
class ActionTypes(enum.Enum):
ADD_TODO = 1
REMOVE_TODO = 2
COMPLETE_TODO = 3
REQUEST_TODO = 4
# action creators
def add_todo(text):
return {'type': ActionTypes.ADD_TODO, 'text': text}
def complete_todo(index):
return {'type': ActionTypes.COMPLETE_TODO, 'index': index}
def fetch_todo():
'''Exercise thunk middleware.
See https://rackt.github.io/redux/docs/advanced/AsyncActions.html
'''
def thunk(dispatch, state_func=None):
dispatch(add_todo('do task x (from thunk)'))
return thunk
# initial state
initial_state = {
'todos': ()
}
# reducers
def todo_app(state, action):
if action['type'] == ActionTypes.ADD_TODO:
todos = state['todos'] + (action['text'],)
return toolz.assoc(state, 'todos', todos)
elif action['type'] == ActionTypes.COMPLETE_TODO:
todos = state['todos'][:action['index']] + state['todos'][action['index'] + 1:]
return toolz.assoc(state, 'todos', todos)
else:
return state
@coroutine
def run():
thunk_middleware = aioredux.middleware.thunk_middleware
create_store_with_middleware = aioredux.apply_middleware(thunk_middleware)(aioredux.create_store)
store = yield from create_store_with_middleware(todo_app, initial_state)
store.subscribe(lambda: logging.info("new state: {}".format(store.state)))
store.dispatch(fetch_todo())
for i in range(5):
store.dispatch(add_todo('do task {}'.format(i)))
store.dispatch(complete_todo(1))
store.dispatch(complete_todo(2))
logging.info('Finished')
if __name__ == '__main__':
# configure logging
logging.basicConfig(level=logging.INFO)
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
| mpl-2.0 | Python |
7cc998358ceca6cadd1569064ec8473ea9f46364 | Update views.py | karec/cookiecutter-flask-restful | {{cookiecutter.project_name}}/{{cookiecutter.app_name}}/api/views.py | {{cookiecutter.project_name}}/{{cookiecutter.app_name}}/api/views.py | from flask import Blueprint, current_app, jsonify
from flask_restful import Api
from marshmallow import ValidationError
from {{cookiecutter.app_name}}.extensions import apispec
from {{cookiecutter.app_name}}.api.resources import UserResource, UserList
from {{cookiecutter.app_name}}.api.resources.user import UserSchema
blueprint = Blueprint("api", __name__, url_prefix="/api/v1")
api = Api(blueprint)
api.add_resource(UserResource, "/users/<int:user_id>")
api.add_resource(UserList, "/users")
@blueprint.before_app_first_request
def register_views():
apispec.spec.components.schema("UserSchema", schema=UserSchema)
apispec.spec.path(view=UserResource, app=current_app)
apispec.spec.path(view=UserList, app=current_app)
@blueprint.errorhandler(ValidationError)
def handle_marshmallow_error(e):
"""Return json error for marshmallow validation errors.
This will avoid having to try/catch ValidationErrors in all endpoints, returning
correct JSON response with associated HTTP 400 Status (https://tools.ietf.org/html/rfc7231#section-6.5.1)
"""
return jsonify(e.messages), 400
| from flask import Blueprint, current_app, jsonify
from flask_restful import Api
from marshmallow import ValidationError
from {{cookiecutter.app_name}}.extensions import apispec
from {{cookiecutter.app_name}}.api.resources import UserResource, UserList
from {{cookiecutter.app_name}}.api.resources.user import UserSchema
blueprint = Blueprint("api", __name__, url_prefix="/api/v1")
api = Api(blueprint)
api.add_resource(UserResource, "/users/<int:user_id>")
api.add_resource(UserList, "/users")
@blueprint.before_app_first_request
def register_views():
apispec.spec.components.schema("UserSchema", schema=UserSchema)
apispec.spec.path(view=UserResource, app=current_app)
apispec.spec.path(view=UserList, app=current_app)
@blueprint.errorhandler(ValidationError)
def handle_marshmallow_error(e):
"""Return json error for marhsmallow validation errors.
This will avoid having to try/catch ValidationErrors in all endpoints, returning
correct JSON response with associated HTTP 400 Status (https://tools.ietf.org/html/rfc7231#section-6.5.1)
"""
return jsonify(e.messages), 400
| mit | Python |
7ee7835907da63de698c86931efb040aaf83973a | Install biopython in setup.py | MicrosoftResearch/Azimuth | setup.py | setup.py | # from Cython.Build import cythonize
from setuptools import setup
setup(name='Azimuth',
version='0.4',
author='Nicolo Fusi and Jennifer Listgarten',
author_email="fusi@microsoft.com, jennl@microsoft.com",
description=("Machine Learning-Based Predictive Modelling of CRISPR/Cas9 guide efficiency"),
packages=["azimuth", "azimuth.features", "azimuth.models", "azimuth.tests"],
package_data={'azimuth': ['saved_models/*.*']},
install_requires=['scipy', 'numpy', 'matplotlib', 'nose', 'scikit-learn>=0.17', 'pandas', 'biopython'],
license="BSD",
# ext_modules=cythonize("ssk_cython.pyx"),
)
| # from Cython.Build import cythonize
from setuptools import setup
setup(name='Azimuth',
version='0.4',
author='Nicolo Fusi and Jennifer Listgarten',
author_email="fusi@microsoft.com, jennl@microsoft.com",
description=("Machine Learning-Based Predictive Modelling of CRISPR/Cas9 guide efficiency"),
packages=["azimuth", "azimuth.features", "azimuth.models", "azimuth.tests"],
package_data={'azimuth': ['saved_models/*.*']},
install_requires=['scipy', 'numpy', 'matplotlib', 'nose', 'scikit-learn>=0.17', 'pandas'],
license="BSD",
# ext_modules=cythonize("ssk_cython.pyx"),
)
| bsd-3-clause | Python |
c5a97d164bf74d1f4394cd9eb5970e1805643898 | Remove old code | IATI/IATI-Website-Tests | tests/test_test.py | tests/test_test.py | import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://iatistandard.org/"
, "http://iatistandard.org/202/namespaces-extensions/"
]
@pytest.mark.parametrize("target_url", ["http://iatistandard.org/"])
def test_locate_text(self, target_url):
"""
Tests that each page contains lthe specified text at the required location.
"""
req = self._loaded_request_from_url(target_url)
text_to_find = "technical publishing framework"
xpath_to_locate = '//*[@id="home-strapline"]/h1'
result = self._get_text_from_xpath(req, xpath_to_locate)
assert self._substring_in_list(text_to_find, result)
| import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://iatistandard.org/"
, "http://iatistandard.org/202/namespaces-extensions/"
]
text_to_find = [
("technical publishing framework", '//*[@id="home-strapline"]/h1')
]
@pytest.mark.parametrize("target_url", ["http://iatistandard.org/"])
def test_locate_text(self, target_url):
"""
Tests that each page contains lthe specified text at the required location.
"""
req = self._loaded_request_from_url(target_url)
text_to_find = "technical publishing framework"
xpath_to_locate = '//*[@id="home-strapline"]/h1'
result = self._get_text_from_xpath(req, xpath_to_locate)
assert self._substring_in_list(text_to_find, result)
| mit | Python |
c26cb4701ea568b369c878194e0f8bde16d040b1 | check systemd PID | wanghongjuan/meta-iotqa-1,ostroproject/meta-iotqa,daweiwu/meta-iotqa-1,daweiwu/meta-iotqa-1,wanghongjuan/meta-iotqa-1,ostroproject/meta-iotqa,daweiwu/meta-iotqa-1,daweiwu/meta-iotqa-1,wanghongjuan/meta-iotqa-1,daweiwu/meta-iotqa-1,wanghongjuan/meta-iotqa-1,wanghongjuan/meta-iotqa-1,ostroproject/meta-iotqa,ostroproject/meta-iotqa,ostroproject/meta-iotqa | lib/oeqa/runtime/sanity/baseos.py | lib/oeqa/runtime/sanity/baseos.py | #[PROTEXCAT]
#\License: ALL RIGHTS RESERVED
#\Author: Wang, Jing <jing.j.wang@intel.com>
'''base os test module'''
from oeqa.oetest import oeRuntimeTest
class BaseOsTest(oeRuntimeTest):
'''Base os health check'''
def test_baseos_dmesg(self):
'''check dmesg command'''
(status, output) = self.target.run('dmesg')
self.assertEqual(status, 0, msg="Error messages: %s" % output)
def test_baseos_lsmod(self):
'''check lsmod command'''
(status, output) = self.target.run('lsmod')
self.assertEqual(status, 0, msg="Error messages: %s" % output)
def test_baseos_ps(self):
'''check ps command'''
(status, output) = self.target.run('ps')
self.assertEqual(status, 0, msg="Error messages: %s" % output)
def test_baseos_df(self):
'''check df command'''
(status, output) = self.target.run('df')
self.assertEqual(status, 0, msg="Error messages: %s" % output)
def test_baseos_systemd_process(self):
'''check systemd process'''
(status, output) = self.target.run("ls -l /proc/1/exe | grep 'systemd'")
self.assertEqual(status, 0, msg="Error messages: %s" % output)
| #[PROTEXCAT]
#\License: ALL RIGHTS RESERVED
#\Author: Wang, Jing <jing.j.wang@intel.com>
'''base os test module'''
from oeqa.oetest import oeRuntimeTest
class BaseOsTest(oeRuntimeTest):
'''Base os health check'''
def test_baseos_dmesg(self):
'''check dmesg command'''
(status, output) = self.target.run('dmesg')
self.assertEqual(status, 0, msg="Error messages: %s" % output)
def test_baseos_lsmod(self):
'''check lsmod command'''
(status, output) = self.target.run('lsmod')
self.assertEqual(status, 0, msg="Error messages: %s" % output)
def test_baseos_ps(self):
'''check ps command'''
(status, output) = self.target.run('ps')
self.assertEqual(status, 0, msg="Error messages: %s" % output)
def test_baseos_df(self):
'''check df command'''
(status, output) = self.target.run('df')
self.assertEqual(status, 0, msg="Error messages: %s" % output)
| mit | Python |
a6bf64319ba77b8b178646d84f7e16dcc63b4ccb | Bump verison to 2.0 | someonehan/raven-python,Photonomie/raven-python,ewdurbin/raven-python,smarkets/raven-python,openlabs/raven,recht/raven-python,beniwohli/apm-agent-python,jbarbuto/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,icereval/raven-python,recht/raven-python,jmp0xf/raven-python,smarkets/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,lopter/raven-python-old,johansteffner/raven-python,ronaldevers/raven-python,jbarbuto/raven-python,dbravender/raven-python,getsentry/raven-python,icereval/raven-python,patrys/opbeat_python,danriti/raven-python,hzy/raven-python,getsentry/raven-python,someonehan/raven-python,daikeren/opbeat_python,ronaldevers/raven-python,alex/raven,Photonomie/raven-python,danriti/raven-python,dirtycoder/opbeat_python,akheron/raven-python,inspirehep/raven-python,icereval/raven-python,beniwohli/apm-agent-python,getsentry/raven-python,percipient/raven-python,ewdurbin/raven-python,icereval/raven-python,akalipetis/raven-python,nikolas/raven-python,arthurlogilab/raven-python,johansteffner/raven-python,smarkets/raven-python,akheron/raven-python,daikeren/opbeat_python,percipient/raven-python,ticosax/opbeat_python,jbarbuto/raven-python,akalipetis/raven-python,smarkets/raven-python,arthurlogilab/raven-python,Photonomie/raven-python,akheron/raven-python,patrys/opbeat_python,beniwohli/apm-agent-python,inspirehep/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,hzy/raven-python,inspirehep/raven-python,patrys/opbeat_python,hzy/raven-python,daikeren/opbeat_python,dbravender/raven-python,nikolas/raven-python,nikolas/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,ticosax/opbeat_python,inspirehep/raven-python,jbarbuto/raven-python,beniwohli/apm-agent-python,tarkatronic/opbeat_python,dirtycoder/opbeat_python,nikolas/raven-python,johansteffner/raven-python,tarkatronic/opbeat_python,someonehan/raven-python,jmagnusson/raven-python,tarkatronic/opbeat_python,arthurlogilab/raven-python,dirtycoder/opbeat_python,jmagnusson/raven-python,akalipetis/raven-python,ronaldevers/raven-python,danriti/raven-python,arthurlogilab/raven-python,ewdurbin/raven-python,lepture/raven-python,collective/mr.poe,jmp0xf/raven-python,ticosax/opbeat_python,patrys/opbeat_python,recht/raven-python,lepture/raven-python,percipient/raven-python,jmagnusson/raven-python,dbravender/raven-python,jmp0xf/raven-python,lepture/raven-python | setup.py | setup.py | #!/usr/bin/env python
import sys
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
tests_require = [
'Django>=1.2,<1.4',
'django-celery',
'celery',
'blinker>=1.1',
'Flask>=0.8',
'django-sentry>=2.0.0',
'django-nose',
'nose',
'mock',
'unittest2',
]
install_requires = [
'simplejson',
]
if sys.version_info[:2] < (2, 5):
install_requires.append('uuid')
setup(
name='raven',
version='2.0.0',
author='David Cramer',
author_email='dcramer@gmail.com',
url='http://github.com/dcramer/raven',
description = 'Exception Logging to a Database in Django',
packages=find_packages(exclude=("tests",)),
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
| #!/usr/bin/env python
import sys
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
tests_require = [
'Django>=1.2,<1.4',
'django-celery',
'celery',
'blinker>=1.1',
'Flask>=0.8',
'django-sentry',
'django-nose',
'nose',
'mock',
'unittest2',
]
install_requires = [
'simplejson',
]
if sys.version_info[:2] < (2, 5):
install_requires.append('uuid')
setup(
name='raven',
version='0.8.0',
author='David Cramer',
author_email='dcramer@gmail.com',
url='http://github.com/dcramer/raven',
description = 'Exception Logging to a Database in Django',
packages=find_packages(exclude=("tests",)),
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
| bsd-3-clause | Python |
71931f36315ab97e875af138b1e80f3ca2ebc583 | Upgrade to 1.2.1 version. | mocketize/python-mocket,mindflayer/python-mocket | setup.py | setup.py | from setuptools import setup, find_packages, os
# Hack to prevent stupid "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when running `python
# setup.py test` (see
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
for m in ('multiprocessing', 'billiard'):
try:
__import__(m)
except ImportError:
pass
dev_requires = []
install_requires = open(os.path.join(os.path.dirname(__file__), 'requirements.txt')).read()
tests_require = open(os.path.join(os.path.dirname(__file__), 'test_requirements.txt')).read()
setup(
name='mocket',
version='1.2.1',
author='Andrea de Marco, Giorgio Salluzzo',
author_email='24erre@gmail.com, giorgio.salluzzo@gmail.com',
url='https://github.com/mocketize/python-mocket',
description='Socket Mock Framework',
long_description=open('README.rst').read(),
packages=find_packages(exclude=('tests', )),
install_requires=install_requires,
extras_require={
'tests': tests_require,
'dev': dev_requires,
},
test_suite='runtests.runtests',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Software Development :: Testing',
],
)
| from setuptools import setup, find_packages, os
# Hack to prevent stupid "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when running `python
# setup.py test` (see
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
for m in ('multiprocessing', 'billiard'):
try:
__import__(m)
except ImportError:
pass
dev_requires = []
install_requires = open(os.path.join(os.path.dirname(__file__), 'requirements.txt')).read()
tests_require = open(os.path.join(os.path.dirname(__file__), 'test_requirements.txt')).read()
setup(
name='mocket',
version='1.2.0',
author='Andrea de Marco, Giorgio Salluzzo',
author_email='24erre@gmail.com, giorgio.salluzzo@gmail.com',
url='https://github.com/mocketize/python-mocket',
description='Socket Mock Framework',
long_description=open('README.rst').read(),
packages=find_packages(exclude=('tests', )),
install_requires=install_requires,
extras_require={
'tests': tests_require,
'dev': dev_requires,
},
test_suite='runtests.runtests',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Software Development :: Testing',
],
)
| bsd-3-clause | Python |
a540e2234b27130ceba9e908f2833417814c6d14 | Prepare openprocurement.planning.api 2.3.5. | openprocurement/openprocurement.planning.api | setup.py | setup.py | from setuptools import setup, find_packages
import os
version = '2.3.5'
requires = [
'setuptools',
'openprocurement.api>=2.3',
]
test_requires = requires + [
'webtest',
'python-coveralls',
]
docs_requires = requires + [
'sphinxcontrib-httpdomain',
]
entry_points = {
'openprocurement.api.plugins': [
'planning = openprocurement.planning.api:includeme'
],
'openprocurement.api.migrations': [
'plans = openprocurement.planning.api.migration:migrate_data'
]
}
setup(name='openprocurement.planning.api',
version=version,
description="",
long_description=open("README.rst").read() + "\n" +
open(os.path.join("docs", "HISTORY.txt")).read(),
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='Quintagroup, Ltd.',
author_email='info@quintagroup.com',
license='Apache License 2.0',
url='https://github.com/gorserg/openprocurement.planning.api',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['openprocurement', 'openprocurement.planning'],
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=test_requires,
extras_require={'test': test_requires, 'docs': docs_requires},
test_suite="openprocurement.planning.api.tests.main.suite",
entry_points=entry_points,
)
| from setuptools import setup, find_packages
import os
version = '2.3.4'
requires = [
'setuptools',
'openprocurement.api>=2.3',
]
test_requires = requires + [
'webtest',
'python-coveralls',
]
docs_requires = requires + [
'sphinxcontrib-httpdomain',
]
entry_points = {
'openprocurement.api.plugins': [
'planning = openprocurement.planning.api:includeme'
],
'openprocurement.api.migrations': [
'plans = openprocurement.planning.api.migration:migrate_data'
]
}
setup(name='openprocurement.planning.api',
version=version,
description="",
long_description=open("README.rst").read() + "\n" +
open(os.path.join("docs", "HISTORY.txt")).read(),
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='Quintagroup, Ltd.',
author_email='info@quintagroup.com',
license='Apache License 2.0',
url='https://github.com/gorserg/openprocurement.planning.api',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['openprocurement', 'openprocurement.planning'],
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=test_requires,
extras_require={'test': test_requires, 'docs': docs_requires},
test_suite="openprocurement.planning.api.tests.main.suite",
entry_points=entry_points,
)
| apache-2.0 | Python |
e419e72dce748654fe9f4277a6326108b521bd1d | add meme as default cog | Naught0/qtbot | qtbot.py | qtbot.py | #!/bin/env python
import discord
import json
import aiohttp
from datetime import datetime
from discord.ext import commands
# Init bot
des = 'qtbot is a big qt written in python3 and love.'
bot = commands.Bot(command_prefix='.', description=des, pm_help=True)
# Get bot's token
with open('data/apikeys.json') as f:
discord_bot_token = json.load(f)['discord']
# Create bot aiohttp session
bot.aio_session = aiohttp.ClientSession
# Choose default cogs
bot.startup_extensions = (
'cogs.admin',
'cogs.generic',
'cogs.weather',
'cogs.comics',
'cogs.dictionary',
'cogs.osrs',
'cogs.tmdb',
'cogs.gif',
'cogs.calc',
'cogs.league',
'cogs.ask',
'cogs.meme',
'cogs.yt',
'cogs.news',
'cogs.wiki',
'cogs.isup')
# Get current time for uptime
startTime = datetime.now()
startTimeStr = startTime.strftime('%B %d %H:%M:%S')
@bot.event
async def on_ready():
""" Basic information printed via stdout """
print('Client logged in at {}'.format(startTimeStr))
print(bot.user.name)
print(bot.user.id)
print('------')
@bot.command(aliases=['up'])
async def uptime(ctx):
""" Get current bot uptime """
currentTime = datetime.now()
currentTimeStr = currentTime.strftime('%B %d %H:%M:%S')
await ctx.send('Initialized: `{}`\nCurrent Time: `{}`\nUptime: `{}`'.format(
startTimeStr, currentTimeStr, str(currentTime - startTime).split('.')[0]))
if __name__ == '__main__':
for ext in bot.startup_extensions:
try:
bot.load_extension(ext)
except Exception as e:
exc = '{}: {}'.format(type(e).__name__, e)
print('failed to load extension {}\n{}'.format(ext, exc))
bot.run(discord_bot_token)
| #!/bin/env python
import discord
import json
import aiohttp
from datetime import datetime
from discord.ext import commands
# Init bot
des = 'qtbot is a big qt written in python3 and love.'
bot = commands.Bot(command_prefix='.', description=des, pm_help=True)
# Get bot's token
with open('data/apikeys.json') as f:
discord_bot_token = json.load(f)['discord']
# Create bot aiohttp session
bot.aio_session = aiohttp.ClientSession
# Choose default cogs
bot.startup_extensions = (
'cogs.admin',
'cogs.generic',
'cogs.weather',
'cogs.comics',
'cogs.dictionary',
'cogs.osrs',
'cogs.tmdb',
'cogs.gif',
'cogs.calc',
'cogs.league',
'cogs.ask',
'cogs.yt',
'cogs.news',
'cogs.wiki',
'cogs.isup')
# Get current time for uptime
startTime = datetime.now()
startTimeStr = startTime.strftime('%B %d %H:%M:%S')
@bot.event
async def on_ready():
""" Basic information printed via stdout """
print('Client logged in at {}'.format(startTimeStr))
print(bot.user.name)
print(bot.user.id)
print('------')
@bot.command(aliases=['up'])
async def uptime(ctx):
""" Get current bot uptime """
currentTime = datetime.now()
currentTimeStr = currentTime.strftime('%B %d %H:%M:%S')
await ctx.send('Initialized: `{}`\nCurrent Time: `{}`\nUptime: `{}`'.format(
startTimeStr, currentTimeStr, str(currentTime - startTime).split('.')[0]))
if __name__ == '__main__':
for ext in bot.startup_extensions:
try:
bot.load_extension(ext)
except Exception as e:
exc = '{}: {}'.format(type(e).__name__, e)
print('failed to load extension {}\n{}'.format(ext, exc))
bot.run(discord_bot_token)
| mit | Python |
421564a3be13fdfa04f775900a05f4d518767c0c | Add pycoin.ui to setup.py. | mperklin/pycoin,mperklin/pycoin | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
from pycoin.version import version
setup(
name="pycoin",
version=version,
packages=[
"pycoin",
"pycoin.blockchain",
"pycoin.cmds",
"pycoin.contrib",
"pycoin.convention",
"pycoin.ecdsa",
"pycoin.ecdsa.native",
"pycoin.key",
"pycoin.message",
"pycoin.networks",
"pycoin.serialize",
"pycoin.services",
"pycoin.tx",
"pycoin.tx.pay_to",
"pycoin.tx.script",
"pycoin.ui",
"pycoin.wallet"
],
author="Richard Kiss",
entry_points={
'console_scripts':
[
'block = pycoin.cmds.block:main',
'ku = pycoin.cmds.ku:main',
'tx = pycoin.cmds.tx:main',
'cache_tx = pycoin.cmds.cache_tx:main',
'fetch_unspent = pycoin.cmds.fetch_unspent:main',
# these scripts are obsolete
'genwallet = pycoin.cmds.genwallet:main',
'spend = pycoin.cmds.spend:main',
'bu = pycoin.cmds.bitcoin_utils:main',
]
},
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoin",
license="http://opensource.org/licenses/MIT",
description="Utilities for Bitcoin and altcoin addresses and transaction manipulation.",
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],)
| #!/usr/bin/env python
from setuptools import setup
from pycoin.version import version
setup(
name="pycoin",
version=version,
packages=[
"pycoin",
"pycoin.blockchain",
"pycoin.cmds",
"pycoin.contrib",
"pycoin.convention",
"pycoin.ecdsa",
"pycoin.ecdsa.native",
"pycoin.key",
"pycoin.message",
"pycoin.networks",
"pycoin.serialize",
"pycoin.services",
"pycoin.tx",
"pycoin.tx.pay_to",
"pycoin.tx.script",
"pycoin.wallet"
],
author="Richard Kiss",
entry_points={
'console_scripts':
[
'block = pycoin.cmds.block:main',
'ku = pycoin.cmds.ku:main',
'tx = pycoin.cmds.tx:main',
'cache_tx = pycoin.cmds.cache_tx:main',
'fetch_unspent = pycoin.cmds.fetch_unspent:main',
# these scripts are obsolete
'genwallet = pycoin.cmds.genwallet:main',
'spend = pycoin.cmds.spend:main',
'bu = pycoin.cmds.bitcoin_utils:main',
]
},
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoin",
license="http://opensource.org/licenses/MIT",
description="Utilities for Bitcoin and altcoin addresses and transaction manipulation.",
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],)
| mit | Python |
0304e4f8f99dd8cd5e51c8332ab3c2b5bcf30d62 | Use latest yelp-cheetah | asottile/cheetah_lint | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
setup(
name='cheetah_lint',
description='Linting tools for the Cheetah templating language.',
url='https://github.com/asottile/cheetah_lint',
version='0.3.1',
author='Anthony Sottile',
author_email='asottile@umich.edu',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
packages=find_packages('.', exclude=('tests*', 'testing*')),
install_requires=[
'aspy.refactor_imports>=0.2.1',
'cached-property',
'flake8>=2.6.0',
'refactorlib[cheetah]>=0.13.0,<=0.13.999',
'yelp-cheetah>=0.17.0,<=0.17.999',
],
entry_points={
'console_scripts': [
'cheetah-reorder-imports = cheetah_lint.reorder_imports:main',
'cheetah-flake = cheetah_lint.flake:main',
],
},
)
| # -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
setup(
name='cheetah_lint',
description='Linting tools for the Cheetah templating language.',
url='https://github.com/asottile/cheetah_lint',
version='0.3.1',
author='Anthony Sottile',
author_email='asottile@umich.edu',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
packages=find_packages('.', exclude=('tests*', 'testing*')),
install_requires=[
'aspy.refactor_imports>=0.2.1',
'cached-property',
'flake8>=2.6.0',
'refactorlib[cheetah]>=0.12.0,<=0.12.999',
'yelp-cheetah>=0.16.1,<=0.16.999',
],
entry_points={
'console_scripts': [
'cheetah-reorder-imports = cheetah_lint.reorder_imports:main',
'cheetah-flake = cheetah_lint.flake:main',
],
},
)
| mit | Python |
4d89756d432bcf8cede033cef6058681fda3fd6c | Bump to v0.13.1 | gisce/enerdata | setup.py | setup.py | import sys
from setuptools import setup, find_packages
INSTALL_REQUIRES = ['pytz', 'workalendar']
if sys.version_info < (2, 7):
INSTALL_REQUIRES += ['backport_collections']
setup(
name='enerdata',
version='0.13.1',
packages=find_packages(),
url='http://code.gisce.net',
license='MIT',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
install_requires=INSTALL_REQUIRES,
description=''
)
| import sys
from setuptools import setup, find_packages
INSTALL_REQUIRES = ['pytz', 'workalendar']
if sys.version_info < (2, 7):
INSTALL_REQUIRES += ['backport_collections']
setup(
name='enerdata',
version='0.13.0',
packages=find_packages(),
url='http://code.gisce.net',
license='MIT',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
install_requires=INSTALL_REQUIRES,
description=''
)
| mit | Python |
9fe7ab80870b2c7de8380ab8013fbb33df7ee847 | Update to 0.3 | jdfreder/jupyter-pip | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='jupyter-pip',
version='0.3',
description='Allows IPython notebook extension authors to make their extension pip installable!',
author='Jonathan Frederic',
author_email='jon.freder@gmail.com',
license='New BSD License',
url='https://github.com/jdfreder/jupyter-pip',
keywords='python jupyter ipython javascript nbextension deployment pip install package extension',
classifiers=['Development Status :: 4 - Beta',
'Programming Language :: Python',
'License :: OSI Approved'],
packages=['jupyterpip'],
include_package_data=True,
)
| # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='jupyter-pip',
version='0.2',
description='Allows IPython notebook extension authors to make their extension pip installable!',
author='Jonathan Frederic',
author_email='jon.freder@gmail.com',
license='New BSD License',
url='https://github.com/jdfreder/jupyter-pip',
keywords='python jupyter ipython javascript nbextension deployment pip install package extension',
classifiers=['Development Status :: 4 - Beta',
'Programming Language :: Python',
'License :: OSI Approved'],
packages=['jupyterpip'],
include_package_data=True,
)
| bsd-3-clause | Python |
21ae601f31a6ef53ce87a14531c458ef8ab91c08 | Add try/except block for dequeue to deal with AttributeErrors from empty queue | jwarren116/data-structures-deux | queue.py | queue.py | #!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
try:
self.old_front = self.front
self.front = self.front.behind
return self.old_front.value
except AttributeError:
raise ValueError('No items in queue')
def peek(self):
try:
return self.front.value
except AttributeError:
raise ValueError('No items in queue')
| #!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
self.old_front = self.front
self.front = self.front.behind
return self.old_front.value
def peek(self):
try:
return self.front.value
except AttributeError:
raise ValueError('No items in queue')
| mit | Python |
12b5631fc02ad2073672a3f8d5712ef3be1bc01a | Bump api dependency. | Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server | setup.py | setup.py | #!/usr/bin/env python
"""Kegbot Beer Kegerator Server package.
Kegbot is a hardware and software system to record and monitor access to a beer
kegerator. For more information and documentation, see http://kegbot.org/
"""
from setuptools import setup, find_packages
VERSION = '0.9.19-pre'
DOCLINES = __doc__.split('\n')
SHORT_DESCRIPTION = DOCLINES[0]
LONG_DESCRIPTION = '\n'.join(DOCLINES[2:])
DEPENDENCIES = [
'kegbot-pyutils == 0.1.7',
'kegbot-api == 0.1.13',
'Django == 1.6.2',
'django-imagekit == 3.1',
'django-registration == 1.0',
'django-socialregistration == 0.5.10',
'django-bootstrap-pagination == 0.1.10',
'django-celery == 3.1.1',
'Celery == 3.1.9',
'South == 0.8.4',
'django-crispy-forms == 1.2.8',
'foursquare == 2014.01.18',
'gunicorn == 18.0',
'MySQL-python == 1.2.5',
'pillow == 2.1.0',
'protobuf == 2.5.0',
'python-gflags == 2.0',
'python-memcached == 1.51',
'pytz == 2013b',
'requests == 2.2.1',
'tweepy == 2.2',
'jsonfield == 0.9.20',
]
def setup_package():
setup(
name = 'kegbot',
version = VERSION,
description = SHORT_DESCRIPTION,
long_description = LONG_DESCRIPTION,
author = 'Bevbot LLC',
author_email = 'info@bevbot.com',
url = 'http://kegbot.org/',
packages = find_packages(),
scripts = [
'bin/kegbot',
'bin/setup-kegbot.py',
],
install_requires = DEPENDENCIES,
dependency_links = [
'https://github.com/rem/python-protobuf/tarball/master#egg=protobuf-2.4.1',
],
include_package_data = True,
entry_points = {
'console_scripts': ['instance=django.core.management:execute_manager'],
},
)
if __name__ == '__main__':
setup_package()
| #!/usr/bin/env python
"""Kegbot Beer Kegerator Server package.
Kegbot is a hardware and software system to record and monitor access to a beer
kegerator. For more information and documentation, see http://kegbot.org/
"""
from setuptools import setup, find_packages
VERSION = '0.9.19-pre'
DOCLINES = __doc__.split('\n')
SHORT_DESCRIPTION = DOCLINES[0]
LONG_DESCRIPTION = '\n'.join(DOCLINES[2:])
DEPENDENCIES = [
'kegbot-pyutils == 0.1.7',
'kegbot-api == 0.1.12',
'Django == 1.6.2',
'django-imagekit == 3.1',
'django-registration == 1.0',
'django-socialregistration == 0.5.10',
'django-bootstrap-pagination == 0.1.10',
'django-celery == 3.1.1',
'Celery == 3.1.9',
'South == 0.8.4',
'django-crispy-forms == 1.2.8',
'foursquare == 2014.01.18',
'gunicorn == 18.0',
'MySQL-python == 1.2.5',
'pillow == 2.1.0',
'protobuf == 2.5.0',
'python-gflags == 2.0',
'python-memcached == 1.51',
'pytz == 2013b',
'requests == 2.2.1',
'tweepy == 2.2',
'jsonfield == 0.9.20',
]
def setup_package():
setup(
name = 'kegbot',
version = VERSION,
description = SHORT_DESCRIPTION,
long_description = LONG_DESCRIPTION,
author = 'Bevbot LLC',
author_email = 'info@bevbot.com',
url = 'http://kegbot.org/',
packages = find_packages(),
scripts = [
'bin/kegbot',
'bin/setup-kegbot.py',
],
install_requires = DEPENDENCIES,
dependency_links = [
'https://github.com/rem/python-protobuf/tarball/master#egg=protobuf-2.4.1',
],
include_package_data = True,
entry_points = {
'console_scripts': ['instance=django.core.management:execute_manager'],
},
)
if __name__ == '__main__':
setup_package()
| mit | Python |
b14e9a48e25b83cf43c3932bb466ce283dc02b0d | fix project url | pytest-dev/pytest-xdist,nicoddemus/pytest-xdist,RonnyPfannschmidt/pytest-xdist | setup.py | setup.py | from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
| from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='http://bitbucket.org/hpk42/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
| mit | Python |
a571caf3b44eb2a5e302f91fd06b01b32479a685 | Add history | sfanous/Pyecobee | setup.py | setup.py | from setuptools import setup
with open('README.rst', 'r') as f:
readme = f.read()
with open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='pyecobee',
version='1.2.1',
description='A Python implementation of the ecobee API',
long_description=readme + '\n\n' + history,
url='https://github.com/sfanous/Pyecobee',
author='Sherif Fanous',
author_email='pyecobee_support@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython'
],
packages=['pyecobee', 'pyecobee.objects'],
install_requires=[
'enum34>=1.1.6; python_version < "3.4"',
'pytz>=2017.2',
'requests>=2.13.0',
'six>=1.10.0'],
package_data={
'license': ['LICENSE'],
},
)
| from setuptools import setup
with open('README.rst', 'r') as f:
readme = f.read()
setup(
name='pyecobee',
version='1.2.1',
description='A Python implementation of the ecobee API',
long_description=readme,
url='https://github.com/sfanous/Pyecobee',
author='Sherif Fanous',
author_email='pyecobee_support@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython'
],
packages=['pyecobee', 'pyecobee.objects'],
install_requires=[
'enum34>=1.1.6; python_version < "3.4"',
'pytz>=2017.2',
'requests>=2.13.0',
'six>=1.10.0'],
package_data={
'license': ['LICENSE'],
},
)
| mit | Python |
af74c291edcb63d8248ac3e36fb1c741144edf28 | install without egg (#11961) | iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-basemap/package.py | var/spack/repos/builtin/packages/py-basemap/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class PyBasemap(PythonPackage):
"""The matplotlib basemap toolkit is a library for plotting
2D data on maps in Python."""
homepage = "http://matplotlib.org/basemap/"
version('1.2.0', 'f8e64bd150590223701a48d60408e939')
version('1.0.7', '48c0557ced9e2c6e440b28b3caff2de8')
# Per Github issue #3813, setuptools is required at runtime in order
# to make mpl_toolkits a namespace package that can span multiple
# directories (i.e., matplotlib and basemap)
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-matplotlib', type=('build', 'run'))
depends_on('py-pyproj@:1.99', type=('build', 'run'), when='@:1.2.0')
depends_on('py-pyproj', type=('build', 'run'))
depends_on('py-pyshp', type=('build', 'run'))
depends_on('pil', type=('build', 'run'))
depends_on('geos')
def url_for_version(self, version):
if version >= Version('1.2.0'):
return 'https://github.com/matplotlib/basemap/archive/v{0}rel.tar.gz'.format(version)
else:
return 'https://downloads.sourceforge.net/project/matplotlib/matplotlib-toolkits/basemap-{0}/basemap-{0}.tar.gz'.format(version)
def setup_environment(self, spack_env, run_env):
spack_env.set('GEOS_DIR', self.spec['geos'].prefix)
def install(self, spec, prefix):
"""Install everything from build directory."""
args = self.install_args(spec, prefix)
self.setup_py('install', *args)
# namespace packages should not create an __init__.py file. This has
# been reported to the basemap project in
# https://github.com/matplotlib/basemap/issues/456
for root, dirs, files in os.walk(spec.prefix.lib):
for filename in files:
if (filename == '__init__.py' and
os.path.basename(root) == 'mpl_toolkits'):
os.remove(os.path.join(root, filename))
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class PyBasemap(PythonPackage):
"""The matplotlib basemap toolkit is a library for plotting
2D data on maps in Python."""
homepage = "http://matplotlib.org/basemap/"
version('1.2.0', 'f8e64bd150590223701a48d60408e939')
version('1.0.7', '48c0557ced9e2c6e440b28b3caff2de8')
# Per Github issue #3813, setuptools is required at runtime in order
# to make mpl_toolkits a namespace package that can span multiple
# directories (i.e., matplotlib and basemap)
depends_on('py-setuptools', type=('run'))
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-matplotlib', type=('build', 'run'))
depends_on('py-pyproj', type=('build', 'run'))
depends_on('py-pyshp', type=('build', 'run'))
depends_on('pil', type=('build', 'run'))
depends_on('geos')
def url_for_version(self, version):
if version >= Version('1.2.0'):
return 'https://github.com/matplotlib/basemap/archive/v{0}rel.tar.gz'.format(version)
else:
return 'https://downloads.sourceforge.net/project/matplotlib/matplotlib-toolkits/basemap-{0}/basemap-{0}.tar.gz'.format(version)
def setup_environment(self, spack_env, run_env):
spack_env.set('GEOS_DIR', self.spec['geos'].prefix)
def install(self, spec, prefix):
"""Install everything from build directory."""
args = self.install_args(spec, prefix)
self.setup_py('install', *args)
# namespace packages should not create an __init__.py file. This has
# been reported to the basemap project in
# https://github.com/matplotlib/basemap/issues/456
for root, dirs, files in os.walk(spec.prefix.lib):
for filename in files:
if (filename == '__init__.py' and
os.path.basename(root) == 'mpl_toolkits'):
os.remove(os.path.join(root, filename))
| lgpl-2.1 | Python |
8c5e95a9477654a81d3efcb6f7fc438324c86fdc | Fix unhashable types-bug | mopsalarm/pr0gramm-meta,mopsalarm/pr0gramm-meta | webapp/service.py | webapp/service.py | from functools import lru_cache
import time
import bottle
import datadog
from bottle.ext import sqlite
from attrdict import AttrDict as attrdict
print("initialize datadog metrics")
datadog.initialize()
stats = datadog.ThreadStats()
stats.start()
print("open database at pr0gramm-meta.sqlite3")
bottle.install(sqlite.Plugin(dbfile="pr0gramm-meta.sqlite3", dictrows=False))
def metric_name(suffix):
return "pr0gramm.meta.webapp.%s" % suffix
def query_sizes(database, item_ids):
where_clause = "items.id IN (%s)" % ",".join(str(val) for val in item_ids)
query = "SELECT items.id, width, height FROM items" \
" JOIN sizes ON items.id=sizes.id " \
" WHERE %s" \
" LIMIT 150" % where_clause
return [
dict(id=item_id, width=width, height=height)
for item_id, width, height in database.execute(query).fetchall()
]
@lru_cache(128)
def query_reposts(database, item_ids):
where_clause = "item_id IN (%s)" % ",".join(str(val) for val in item_ids)
query = "SELECT item_id FROM tags " \
" WHERE %s AND confidence>0.3 AND +tag='repost' COLLATE nocase" \
" LIMIT 150" % where_clause
return [item_id for item_id, in database.execute(query).fetchall()]
@bottle.get("/items")
@bottle.post("/items")
def items(db):
with stats.timer(metric_name("request.items")):
start_time = time.time()
item_ids = tuple(int(val) for val in bottle.request.params.get("ids", []).split(",") if val)[:150]
result = attrdict()
result.sizes = query_sizes(db, item_ids)
result.reposts = query_reposts(db, item_ids)
result.duration = time.time() - start_time
return result
@bottle.get("/user/<user>")
def user_benis(db, user):
with stats.timer(metric_name("request.user")):
query = "SELECT user_score.timestamp, user_score.score" \
" FROM user_score, users" \
" WHERE users.name=? COLLATE nocase AND users.id=user_score.user_id AND user_score.timestamp>?"
start_time = int(time.time() - 3600 * 24 * 7)
return {"benisHistory": db.execute(query, [user, start_time]).fetchall()}
| from functools import lru_cache
import time
import bottle
import datadog
from bottle.ext import sqlite
from attrdict import AttrDict as attrdict
print("initialize datadog metrics")
datadog.initialize()
stats = datadog.ThreadStats()
stats.start()
print("open database at pr0gramm-meta.sqlite3")
bottle.install(sqlite.Plugin(dbfile="pr0gramm-meta.sqlite3", dictrows=False))
def metric_name(suffix):
return "pr0gramm.meta.webapp.%s" % suffix
def query_sizes(database, item_ids):
where_clause = "items.id IN (%s)" % ",".join(str(val) for val in item_ids)
query = "SELECT items.id, width, height FROM items" \
" JOIN sizes ON items.id=sizes.id " \
" WHERE %s" \
" LIMIT 150" % where_clause
return [
dict(id=item_id, width=width, height=height)
for item_id, width, height in database.execute(query).fetchall()
]
@lru_cache(128)
def query_reposts(database, item_ids):
where_clause = "item_id IN (%s)" % ",".join(str(val) for val in item_ids)
query = "SELECT item_id FROM tags " \
" WHERE %s AND confidence>0.3 AND +tag='repost' COLLATE nocase" \
" LIMIT 150" % where_clause
return [item_id for item_id, in database.execute(query).fetchall()]
@bottle.get("/items")
@bottle.post("/items")
def items(db):
with stats.timer(metric_name("request.items")):
start_time = time.time()
item_ids = [int(val) for val in bottle.request.params.get("ids", []).split(",") if val][:150]
result = attrdict()
result.sizes = query_sizes(db, item_ids)
result.reposts = query_reposts(db, item_ids)
result.duration = time.time() - start_time
return result
@bottle.get("/user/<user>")
def user_benis(db, user):
with stats.timer(metric_name("request.user")):
query = "SELECT user_score.timestamp, user_score.score" \
" FROM user_score, users" \
" WHERE users.name=? COLLATE nocase AND users.id=user_score.user_id AND user_score.timestamp>?"
start_time = int(time.time() - 3600 * 24 * 7)
return {"benisHistory": db.execute(query, [user, start_time]).fetchall()}
| apache-2.0 | Python |
14d1f65699a79f879dde8fb6da2e3b1be72a7266 | Fix NoSuchOptError in lbaas agent test | mandeepdhami/neutron,vijayendrabvs/hap,infobloxopen/neutron,barnsnake351/neutron,dhanunjaya/neutron,noironetworks/neutron,eonpatapon/neutron,gkotton/neutron,watonyweng/neutron,Juniper/neutron,apporc/neutron,silenci/neutron,vveerava/Openstack,SamYaple/neutron,SamYaple/neutron,paninetworks/neutron,glove747/liberty-neutron,asgard-lab/neutron,leeseulstack/openstack,vijayendrabvs/ssl-neutron,adelina-t/neutron,chitr/neutron,bgxavier/neutron,leeseulstack/openstack,vbannai/neutron,jerryz1982/neutron,shahbazn/neutron,virtualopensystems/neutron,wolverineav/neutron,JioCloud/neutron,yamahata/tacker,antonioUnina/neutron,igor-toga/local-snat,mmnelemane/neutron,magic0704/neutron,Juniper/neutron,cernops/neutron,aristanetworks/neutron,mmnelemane/neutron,miyakz1192/neutron,chitr/neutron,redhat-openstack/neutron,Juniper/contrail-dev-neutron,javaos74/neutron,pnavarro/neutron,cloudbase/neutron,Metaswitch/calico-neutron,Juniper/contrail-dev-neutron,klmitch/neutron,JianyuWang/neutron,oeeagle/quantum,oeeagle/quantum,shahbazn/neutron,jumpojoy/neutron,openstack/neutron,leeseuljeong/leeseulstack_neutron,gkotton/neutron,openstack/neutron,pnavarro/neutron,zhhf/charging,yamahata/neutron,suneeth51/neutron,mattt416/neutron,glove747/liberty-neutron,paninetworks/neutron,cernops/neutron,apporc/neutron,Stavitsky/neutron,vijayendrabvs/ssl-neutron,beagles/neutron_hacking,vivekanand1101/neutron,noironetworks/neutron,cloudbase/neutron-virtualbox,vijayendrabvs/hap,NeCTAR-RC/neutron,JioCloud/neutron,dims/neutron,rdo-management/neutron,alexandrucoman/vbox-neutron-agent,neoareslinux/neutron,suneeth51/neutron,bigswitch/neutron,aristanetworks/neutron,virtualopensystems/neutron,dhanunjaya/neutron,Comcast/neutron,leeseuljeong/leeseulstack_neutron,watonyweng/neutron,sajuptpm/neutron-ipam,CiscoSystems/neutron,infobloxopen/neutron,waltBB/neutron_read,yuewko/neutron,JianyuWang/neutron,projectcalico/calico-neutron,projectcalico/calico-neutron,vbannai/neutron,mahak/neutron,dims/neutron,takeshineshiro/neutron,eonpatapon/neutron,Comcast/neutron,Comcast/neutron,sasukeh/neutron,mahak/neutron,cloudbase/neutron-virtualbox,bgxavier/neutron,leeseulstack/openstack,cisco-openstack/neutron,SmartInfrastructures/neutron,cloudbase/neutron,adelina-t/neutron,SmartInfrastructures/neutron,openstack/neutron,vijayendrabvs/ssl-neutron,jerryz1982/neutron,mandeepdhami/neutron,asgard-lab/neutron,alexandrucoman/vbox-neutron-agent,jacknjzhou/neutron,gkotton/neutron,vveerava/Openstack,yamahata/tacker,blueboxgroup/neutron,mattt416/neutron,eayunstack/neutron,sajuptpm/neutron-ipam,NeCTAR-RC/neutron,huntxu/neutron,CiscoSystems/neutron,yanheven/neutron,sebrandon1/neutron,wenhuizhang/neutron,zhhf/charging,vveerava/Openstack,takeshineshiro/neutron,MaximNevrov/neutron,yamahata/neutron,CiscoSystems/neutron,Metaswitch/calico-neutron,antonioUnina/neutron,jacknjzhou/neutron,gopal1cloud/neutron,leeseuljeong/leeseulstack_neutron,yamahata/neutron,silenci/neutron,MaximNevrov/neutron,beagles/neutron_hacking,sasukeh/neutron,sajuptpm/neutron-ipam,vbannai/neutron,klmitch/neutron,redhat-openstack/neutron,barnsnake351/neutron,zhhf/charging,Stavitsky/neutron,huntxu/neutron,miyakz1192/neutron,jumpojoy/neutron,eayunstack/neutron,waltBB/neutron_read,neoareslinux/neutron,yanheven/neutron,wolverineav/neutron,magic0704/neutron,gopal1cloud/neutron,mahak/neutron,blueboxgroup/neutron,yuewko/neutron,Juniper/neutron,swdream/neutron,blueboxgroup/neutron,skyddv/neutron,rdo-management/neutron,bigswitch/neutron,skyddv/neutron,vijayendrabvs/hap,swdream/neutron,cisco-openstack/neutron,wenhuizhang/neutron,igor-toga/local-snat,vivekanand1101/neutron,beagles/neutron_hacking,sebrandon1/neutron,javaos74/neutron,Juniper/contrail-dev-neutron,yamahata/tacker,virtualopensystems/neutron | neutron/tests/unit/services/loadbalancer/drivers/haproxy/test_agent.py | neutron/tests/unit/services/loadbalancer/drivers/haproxy/test_agent.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mark McClain, DreamHost
import contextlib
import mock
from oslo.config import cfg
from neutron.services.loadbalancer.drivers.haproxy import agent
from neutron.tests import base
class TestLbaasService(base.BaseTestCase):
def setUp(self):
super(TestLbaasService, self).setUp()
self.addCleanup(cfg.CONF.reset)
def test_start(self):
with mock.patch.object(
agent.rpc_service.Service, 'start'
) as mock_start:
mgr = mock.Mock()
cfg.CONF.periodic_interval = mock.Mock(return_value=10)
agent_service = agent.LbaasAgentService('host', 'topic', mgr)
agent_service.start()
self.assertTrue(mock_start.called)
def test_main(self):
logging_str = 'neutron.agent.common.config.setup_logging'
with contextlib.nested(
mock.patch(logging_str),
mock.patch.object(agent.service, 'launch'),
mock.patch.object(agent, 'eventlet'),
mock.patch('sys.argv'),
mock.patch.object(agent.manager, 'LbaasAgentManager'),
mock.patch.object(cfg.CONF, 'register_opts')
) as (mock_logging, mock_launch, mock_eventlet, sys_argv, mgr_cls, ro):
agent.main()
self.assertTrue(mock_eventlet.monkey_patch.called)
mock_launch.assert_called_once_with(mock.ANY)
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mark McClain, DreamHost
import contextlib
import mock
from oslo.config import cfg
from neutron.services.loadbalancer.drivers.haproxy import agent
from neutron.tests import base
class TestLbaasService(base.BaseTestCase):
def setUp(self):
super(TestLbaasService, self).setUp()
self.addCleanup(cfg.CONF.reset)
def test_start(self):
with mock.patch.object(
agent.rpc_service.Service, 'start'
) as mock_start:
mgr = mock.Mock()
agent_service = agent.LbaasAgentService('host', 'topic', mgr)
agent_service.start()
self.assertTrue(mock_start.called)
def test_main(self):
logging_str = 'neutron.agent.common.config.setup_logging'
with contextlib.nested(
mock.patch(logging_str),
mock.patch.object(agent.service, 'launch'),
mock.patch.object(agent, 'eventlet'),
mock.patch('sys.argv'),
mock.patch.object(agent.manager, 'LbaasAgentManager'),
mock.patch.object(cfg.CONF, 'register_opts')
) as (mock_logging, mock_launch, mock_eventlet, sys_argv, mgr_cls, ro):
agent.main()
self.assertTrue(mock_eventlet.monkey_patch.called)
mock_launch.assert_called_once_with(mock.ANY)
| apache-2.0 | Python |
7c7441648a8adee4250002481d7ee60315bc8e74 | use latest setuptest | praekelt/jmbo-calendar,praekelt/jmbo-calendar | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='jmbo-calendar',
version='0.0.2',
description='Jmbo calendar app.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-calendar',
packages = find_packages(),
install_requires = [
'jmbo',
],
tests_require=[
'django-setuptest>=0.0.6',
],
test_suite="setuptest.SetupTestSuite",
include_package_data=True,
classifiers = [
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
| from setuptools import setup, find_packages
from setuptools.command.test import test
def run_tests(self):
from setuptest.runtests import runtests
return runtests(self)
test.run_tests = run_tests
setup(
name='jmbo-calendar',
version='0.0.2',
description='Jmbo calendar app.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-calendar',
packages = find_packages(),
install_requires = [
'jmbo',
],
tests_require=[
'django-setuptest',
],
test_suite="cal.tests",
include_package_data=True,
classifiers = [
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
| bsd-3-clause | Python |
c08640067d5ab81ee35a03f9482009e856ee732d | update create super user script to use new custom user model | poldracklab/cogat,rwblair/cogat,poldracklab/cogat,poldracklab/cogat,poldracklab/cogat,rwblair/cogat,rwblair/cogat,rwblair/cogat | scripts/create_superuser.py | scripts/create_superuser.py | from cognitive.apps.users.models import User
username = 'admin'
password = 'adminpassword'
if not User.objects.get(username):
User.objects.create_superuser(username=username, password=password, email='')
else:
msg = ("User {} already exists, update scripts/create_superuser.py if you "
"would like a different superuser")
print(msg.format(username))
| from django.contrib.auth.models import User
username = 'admin'
password = 'adminpassword'
if not User.objects.get(username):
User.objects.create_superuser(username=username, password=password, email='')
else:
msg = ("User {} already exists, update scripts/create_superuser.py if you "
"would like a different superuser")
print(msg.format(username))
| mit | Python |
cf00e42a82c233da2a215d7e0cd1ef2e62e617ac | update doc | snower/torpeewee,snower/torpeewee | torpeewee/__init__.py | torpeewee/__init__.py | '''
torpeewee: Tornado and asyncio asynchronous ORM by peewee.
The MIT License (MIT)
Copyright (c) 2014, 2015 torpeewee contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
from peewee import *
from .model import Model, Using
from .mysql import MySQLDatabase
from .postgresql import PostgresqlDatabase
from .transaction import Transaction
from .query import ModelSelect, NoopModelSelect, ModelUpdate, ModelInsert, ModelDelete, ModelRaw
version = "1.0.1"
version_info = (1, 0, 1) | '''
torpeewee: Tornado and asyncio asynchronous ORM by peewee.
The MIT License (MIT)
Copyright (c) 2014, 2015 TorMySQL contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
from peewee import *
from .model import Model, Using
from .mysql import MySQLDatabase
from .postgresql import PostgresqlDatabase
from .transaction import Transaction
from .query import ModelSelect, NoopModelSelect, ModelUpdate, ModelInsert, ModelDelete, ModelRaw
version = "1.0.1"
version_info = (1, 0, 1) | mit | Python |
b5684f602b779842953416732117f184e7431492 | Use ordered dictionaries in generateChipDtsi.py | jasmin-j/distortos,jasmin-j/distortos,CezaryGapinski/distortos,CezaryGapinski/distortos,DISTORTEC/distortos,DISTORTEC/distortos,CezaryGapinski/distortos,CezaryGapinski/distortos,CezaryGapinski/distortos,DISTORTEC/distortos,jasmin-j/distortos,jasmin-j/distortos,DISTORTEC/distortos,jasmin-j/distortos | scripts/generateChipDtsi.py | scripts/generateChipDtsi.py | #!/usr/bin/env python
#
# file: generateChipDtsi.py
#
# author: Copyright (C) 2017 Kamil Szczygiel http://www.distortec.com http://www.freddiechopin.info
#
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not
# distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
import argparse
import ast
import collections
import csv
import datetime
import jinja2
import os
#
# Tries to parse string into proper type
#
# param [in] string is the string which will be parsed
#
# return string parsed into proper type
#
def parseString(string):
try:
return ast.literal_eval(string)
except ValueError:
return string
#
# Handles single row read from CSV file
#
# param [in] jinjaEnvironment is the jinja environment
# param [in] outputPath is the output path for generated file
# param [in] header is the header of CSV row
# param [in] row is the row read from CSV file
#
def handleRow(jinjaEnvironment, outputPath, header, row):
singles = collections.OrderedDict()
nodes = collections.OrderedDict()
for index, element in enumerate(header):
if element[0] == '' or row[index] == '':
continue
if element[1] == '': # single element
singles[element[0]] = parseString(row[index])
else: # node
if element[0] not in nodes:
nodes[element[0]] = collections.OrderedDict()
if element[1] != 'dtsiTemplate':
nodes[element[0]][element[1]] = parseString(row[index])
else:
nodes[element[0]][element[1]] = jinjaEnvironment.get_template(row[index])
with open(os.path.join(outputPath, singles['name'] + '.dtsi'), 'w') as dtsiFile:
jinjaTemplate = jinjaEnvironment.get_template(singles['dtsiTemplate'])
dtsiFile.write(jinjaTemplate.render(nodes = nodes, **singles))
########################################################################################################################
# main
########################################################################################################################
parser = argparse.ArgumentParser()
parser.add_argument('csvFile', help = 'input CSV file')
parser.add_argument('outputPath', help = 'output path')
arguments = parser.parse_args()
with open(arguments.csvFile, newline = '') as csvFile:
jinjaEnvironment = jinja2.Environment(trim_blocks = True, lstrip_blocks = True, keep_trailing_newline = True,
loader = jinja2.FileSystemLoader('.'))
jinjaEnvironment.globals['year'] = datetime.date.today().year
csvReader = csv.reader(csvFile)
header = list(zip(next(csvReader), next(csvReader)))
for row in csvReader:
handleRow(jinjaEnvironment, arguments.outputPath, header, row)
| #!/usr/bin/env python
#
# file: generateChipDtsi.py
#
# author: Copyright (C) 2017 Kamil Szczygiel http://www.distortec.com http://www.freddiechopin.info
#
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not
# distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
import argparse
import ast
import collections
import csv
import datetime
import jinja2
import os
#
# Tries to parse string into proper type
#
# param [in] string is the string which will be parsed
#
# return string parsed into proper type
#
def parseString(string):
try:
return ast.literal_eval(string)
except ValueError:
return string
#
# Handles single row read from CSV file
#
# param [in] jinjaEnvironment is the jinja environment
# param [in] outputPath is the output path for generated file
# param [in] header is the header of CSV row
# param [in] row is the row read from CSV file
#
def handleRow(jinjaEnvironment, outputPath, header, row):
singles = {}
nodes = collections.defaultdict(dict)
for index, element in enumerate(header):
if element[0] == '' or row[index] == '':
continue
if element[1] == '': # single element
singles[element[0]] = parseString(row[index])
else: # node
if element[1] != 'dtsiTemplate':
nodes[element[0]][element[1]] = parseString(row[index])
else:
nodes[element[0]][element[1]] = jinjaEnvironment.get_template(row[index])
with open(os.path.join(outputPath, singles['name'] + '.dtsi'), 'w') as dtsiFile:
jinjaTemplate = jinjaEnvironment.get_template(singles['dtsiTemplate'])
dtsiFile.write(jinjaTemplate.render(nodes = nodes, **singles))
########################################################################################################################
# main
########################################################################################################################
parser = argparse.ArgumentParser()
parser.add_argument('csvFile', help = 'input CSV file')
parser.add_argument('outputPath', help = 'output path')
arguments = parser.parse_args()
with open(arguments.csvFile, newline = '') as csvFile:
jinjaEnvironment = jinja2.Environment(trim_blocks = True, lstrip_blocks = True, keep_trailing_newline = True,
loader = jinja2.FileSystemLoader('.'))
jinjaEnvironment.globals['year'] = datetime.date.today().year
csvReader = csv.reader(csvFile)
header = list(zip(next(csvReader), next(csvReader)))
for row in csvReader:
handleRow(jinjaEnvironment, arguments.outputPath, header, row)
| mpl-2.0 | Python |
5fbb833cf5fa33f2d364d6b56fcc90240297a57b | Handle sample rate in counts | JackDanger/sentry,zenefits/sentry,argonemyth/sentry,jean/sentry,zenefits/sentry,nicholasserra/sentry,1tush/sentry,songyi199111/sentry,JamesMura/sentry,korealerts1/sentry,gg7/sentry,jean/sentry,zenefits/sentry,argonemyth/sentry,boneyao/sentry,Kryz/sentry,jean/sentry,vperron/sentry,kevinastone/sentry,looker/sentry,ifduyue/sentry,gg7/sentry,boneyao/sentry,BayanGroup/sentry,ewdurbin/sentry,beeftornado/sentry,felixbuenemann/sentry,pauloschilling/sentry,Natim/sentry,looker/sentry,ewdurbin/sentry,vperron/sentry,BuildingLink/sentry,drcapulet/sentry,BuildingLink/sentry,kevinlondon/sentry,ewdurbin/sentry,BuildingLink/sentry,JackDanger/sentry,ifduyue/sentry,wong2/sentry,kevinlondon/sentry,fuziontech/sentry,kevinlondon/sentry,beeftornado/sentry,BayanGroup/sentry,korealerts1/sentry,fotinakis/sentry,gencer/sentry,Natim/sentry,mvaled/sentry,JTCunning/sentry,imankulov/sentry,ifduyue/sentry,looker/sentry,mvaled/sentry,JackDanger/sentry,ngonzalvez/sentry,fuziontech/sentry,songyi199111/sentry,jean/sentry,looker/sentry,alexm92/sentry,nicholasserra/sentry,JTCunning/sentry,daevaorn/sentry,zenefits/sentry,beeftornado/sentry,nicholasserra/sentry,korealerts1/sentry,TedaLIEz/sentry,alexm92/sentry,BuildingLink/sentry,pauloschilling/sentry,1tush/sentry,wujuguang/sentry,TedaLIEz/sentry,mvaled/sentry,1tush/sentry,mitsuhiko/sentry,hongliang5623/sentry,ifduyue/sentry,daevaorn/sentry,JamesMura/sentry,alexm92/sentry,wong2/sentry,ifduyue/sentry,Kryz/sentry,fuziontech/sentry,wujuguang/sentry,fotinakis/sentry,felixbuenemann/sentry,jean/sentry,gencer/sentry,argonemyth/sentry,wujuguang/sentry,songyi199111/sentry,boneyao/sentry,hongliang5623/sentry,mvaled/sentry,drcapulet/sentry,drcapulet/sentry,looker/sentry,kevinastone/sentry,kevinastone/sentry,vperron/sentry,gencer/sentry,daevaorn/sentry,imankulov/sentry,mvaled/sentry,BuildingLink/sentry,JTCunning/sentry,gencer/sentry,imankulov/sentry,llonchj/sentry,fotinakis/sentry,ngonzalvez/sentry,zenefits/sentry,llonchj/sentry,wong2/sentry,gencer/sentry,mitsuhiko/sentry,JamesMura/sentry,mvaled/sentry,ngonzalvez/sentry,BayanGroup/sentry,daevaorn/sentry,Natim/sentry,JamesMura/sentry,fotinakis/sentry,hongliang5623/sentry,Kryz/sentry,felixbuenemann/sentry,llonchj/sentry,gg7/sentry,TedaLIEz/sentry,pauloschilling/sentry,JamesMura/sentry | src/sentry/utils/metrics.py | src/sentry/utils/metrics.py | from __future__ import absolute_import
__all__ = ['timing', 'incr']
from django_statsd.clients import statsd
from django.conf import settings
from random import random
def _get_key(key):
prefix = settings.SENTRY_METRICS_PREFIX
if prefix:
return '{}{}'.format(prefix, key)
return key
def incr(key, amount=1):
from sentry.app import tsdb
sample_rate = settings.SENTRY_METRICS_SAMPLE_RATE
statsd.incr(_get_key(key), amount,
rate=sample_rate)
if sample_rate >= 1 or random() >= sample_rate:
if sample_rate > 0 and sample_rate < 1:
amount = amount * (1.0 / sample_rate)
tsdb.incr(tsdb.models.internal, key, amount)
def timing(key, value):
# TODO(dcramer): implement timing for tsdb
return statsd.timing(_get_key(key), value,
rate=settings.SENTRY_METRICS_SAMPLE_RATE)
def timer(key):
# TODO(dcramer): implement timing for tsdb
return statsd.timer(_get_key(key),
rate=settings.SENTRY_METRICS_SAMPLE_RATE)
| from __future__ import absolute_import
__all__ = ['timing', 'incr']
from django_statsd.clients import statsd
from django.conf import settings
from random import random
def _get_key(key):
prefix = settings.SENTRY_METRICS_PREFIX
if prefix:
return '{}{}'.format(prefix, key)
return key
def incr(key, amount=1):
from sentry.app import tsdb
sample_rate = settings.SENTRY_METRICS_SAMPLE_RATE
statsd.incr(_get_key(key), amount,
rate=sample_rate)
if sample_rate >= 1 or random() >= sample_rate:
tsdb.incr(tsdb.models.internal, key)
def timing(key, value):
# TODO(dcramer): implement timing for tsdb
return statsd.timing(_get_key(key), value,
rate=settings.SENTRY_METRICS_SAMPLE_RATE)
def timer(key):
# TODO(dcramer): implement timing for tsdb
return statsd.timer(_get_key(key),
rate=settings.SENTRY_METRICS_SAMPLE_RATE)
| bsd-3-clause | Python |
745cc40a3eb35f55c9fedbff708e5bebb17d7195 | Update __init__.py | Alan-Jairo/topgeo | topgeo/__init__.py | topgeo/__init__.py | """ Esta libreria funciona para realizar calculos topograficos
<topgeo.calcoor("csv")> #Calcula las coordenadas (x,y,z) de un 'csv'.
<topgeo.caldist("csv")> #Calcula las distancias horizontales e inclinadas."""
from coordenada import calcoor
from distancia import caldist
pd.read_csv('Lev_canal.csv')
pd.read_csv('Puntos.csv')
| """ Esta libreria funciona para realizar calculos topograficos
<topgeo.calcoor("csv")> #Calcula las coordenadas (x,y,z) de un 'csv'.
<topgeo.caldist("csv")> #Calcula las distancias horizontales e inclinadas."""
from coordenada import calcoor
from distancia import caldist
| mit | Python |
d2e2d8961bb17948579c21cf793ad3f0e2babea7 | fix install_requires | ponty/discogui,ponty/discogui | setup.py | setup.py | import os.path
from setuptools import setup
NAME = "discogui"
# get __version__
__version__ = None
exec(open(os.path.join(NAME, "about.py")).read())
VERSION = __version__
URL = "https://github.com/ponty/discogui"
DESCRIPTION = "GUI discovery"
LONG_DESCRIPTION = """discogui discovers GUI elements
Documentation: https://github.com/ponty/discogui/tree/"""
LONG_DESCRIPTION += VERSION
PACKAGES = [
"discogui",
"discogui.examples",
]
# extra = {}
# if sys.version_info >= (3,):
# extra["use_2to3"] = True
# extra["use_2to3_exclude_fixers"] = ["lib2to3.fixes.fix_import"]
classifiers = [
# Get more strings from
# http://www.python.org/pypi?%3Aaction=list_classifiers
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
]
install_requires = [
"pillow",
"path.py",
"PyUserInput",
"PyScreenshot",
"easyprocess",
"pyvirtualdisplay",
]
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
classifiers=classifiers,
keywords="GUI",
author="ponty",
# author_email='',
url=URL,
license="BSD",
packages=PACKAGES,
install_requires=install_requires,
)
| import os.path
from setuptools import setup
NAME = "discogui"
# get __version__
__version__ = None
exec(open(os.path.join(NAME, "about.py")).read())
VERSION = __version__
URL = "https://github.com/ponty/discogui"
DESCRIPTION = "GUI discovery"
LONG_DESCRIPTION = """discogui discovers GUI elements
Documentation: https://github.com/ponty/discogui/tree/"""
LONG_DESCRIPTION += VERSION
PACKAGES = [
"discogui",
"discogui.examples",
]
# extra = {}
# if sys.version_info >= (3,):
# extra["use_2to3"] = True
# extra["use_2to3_exclude_fixers"] = ["lib2to3.fixes.fix_import"]
classifiers = [
# Get more strings from
# http://www.python.org/pypi?%3Aaction=list_classifiers
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
]
install_requires = open("requirements.txt").read().split("\n")
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
classifiers=classifiers,
keywords="GUI",
author="ponty",
# author_email='',
url=URL,
license="BSD",
packages=PACKAGES,
install_requires=install_requires,
# **extra
)
| bsd-2-clause | Python |
e7ada76aaecca014f40cf8172a7eec5d75cace7f | Allow updating the next year's courses in March | StoDevX/course-data-tools,StoDevX/course-data-tools | scripts/lib/find_terms.py | scripts/lib/find_terms.py | from argparse import ArgumentParser
from datetime import datetime
from .year_plus_term import year_plus_term
def find_terms_for_year(year):
now = datetime.now()
current_month = now.month
current_year = now.year
all_terms = [1, 2, 3, 4, 5]
limited_terms = [1, 2, 3]
# St. Olaf publishes initial Fall, Interim, and Spring data in April of each year.
# Full data is published by August.
if year == current_year:
if current_month < 3:
return []
elif current_month <= 7:
return [year_plus_term(year, term) for term in limited_terms]
else:
return [year_plus_term(year, term) for term in all_terms]
elif year > current_year:
return []
else:
return [year_plus_term(year, term) for term in all_terms]
def find_terms(start_year=None, end_year=None, this_year=False):
now = datetime.now()
start_year = start_year or 1994
end_year = end_year or now.year
current_year = end_year if end_year is not start_year else end_year + 1
current_month = now.month
if this_year:
start_year = current_year - 1 if current_month <= 7 else current_year
most_years = range(start_year, current_year)
term_list = list(map(find_terms_for_year, most_years))
# Sort the list of terms to 20081, 20082, 20091 (instead of 20081, 20091, 20082)
# (sorts in-place)
term_list.sort()
return term_list
if __name__ == '__main__':
argparser = ArgumentParser()
argparser.add_argument('--start-year', type=int, nargs=1)
argparser.add_argument('--end-year', type=int, nargs=1)
argparser.add_argument('--this-year', action='store_true')
args = argparser.parse_args()
terms = find_terms(
start_year=args.start_year[0],
end_year=args.end_year[0],
this_year=args.this_year)
terms = [str(term) for term in terms]
print(' '.join(terms))
| from argparse import ArgumentParser
from datetime import datetime
from .year_plus_term import year_plus_term
def find_terms_for_year(year):
now = datetime.now()
current_month = now.month
current_year = now.year
all_terms = [1, 2, 3, 4, 5]
limited_terms = [1, 2, 3]
# St. Olaf publishes initial Fall, Interim, and Spring data in April of each year.
# Full data is published by August.
if year == current_year:
if current_month <= 3:
return []
elif current_month <= 7:
return [year_plus_term(year, term) for term in limited_terms]
else:
return [year_plus_term(year, term) for term in all_terms]
elif year > current_year:
return []
else:
return [year_plus_term(year, term) for term in all_terms]
def find_terms(start_year=None, end_year=None, this_year=False):
now = datetime.now()
start_year = start_year or 1994
end_year = end_year or now.year
current_year = end_year if end_year is not start_year else end_year + 1
current_month = now.month
if this_year:
start_year = current_year - 1 if current_month <= 7 else current_year
most_years = range(start_year, current_year)
term_list = list(map(find_terms_for_year, most_years))
# Sort the list of terms to 20081, 20082, 20091 (instead of 20081, 20091, 20082)
# (sorts in-place)
term_list.sort()
return term_list
if __name__ == '__main__':
argparser = ArgumentParser()
argparser.add_argument('--start-year', type=int, nargs=1)
argparser.add_argument('--end-year', type=int, nargs=1)
argparser.add_argument('--this-year', action='store_true')
args = argparser.parse_args()
terms = find_terms(
start_year=args.start_year[0],
end_year=args.end_year[0],
this_year=args.this_year)
terms = [str(term) for term in terms]
print(' '.join(terms))
| mit | Python |
e0f008a35eb38f6fb86ad0e17b0ba4f0208d0337 | Add more unit tests for linked list | ueg1990/aids | tests/test_linked_list/test_linked_list.py | tests/test_linked_list/test_linked_list.py | import unittest
from aids.linked_list.linked_list import LinkedList
class LinkedListTestCase(unittest.TestCase):
'''
Unit tests for the Linked List data structure
'''
def setUp(self):
self.test_linked_list = LinkedList()
def test_stack_initialization(self):
self.assertTrue(isinstance(self.test_linked_list, LinkedList))
def test_linked_list_is_empty(self):
self.assertTrue(self.test_linked_list.is_empty())
def test_linked_list_size(self):
self.test_linked_list.add(1)
self.test_linked_list.add(2)
self.assertEqual(self.test_linked_list.size(), 2)
def test_linked_list_add(self):
self.test_linked_list.add(1)
self.assertEqual(self.test_linked_list.head.get_data(), 1)
def test_linked_list_search(self):
self.test_linked_list.add(1)
self.test_linked_list.add(2)
self.test_linked_list.add(3)
self.test_linked_list.add(4)
self.assertTrue(self.test_linked_list.search(3))
def test_linked_list_search_false(self):
self.test_linked_list.add(1)
self.test_linked_list.add(2)
self.test_linked_list.add(3)
self.test_linked_list.add(4)
self.assertFalse(self.test_linked_list.search(11))
def test_linked_list_search_empty(self):
self.assertFalse(self.test_linked_list.search(3))
def test_linked_list_remove_first(self):
self.test_linked_list.add(1)
self.test_linked_list.add(2)
self.test_linked_list.add(3)
self.test_linked_list.add(4)
self.test_linked_list.remove(1)
self.assertEqual(self.test_linked_list.head.get_data(), 2)
def test_linked_list_remove_last(self):
self.test_linked_list.add(1)
self.test_linked_list.add(2)
self.test_linked_list.add(3)
self.test_linked_list.add(4)
self.test_linked_list.remove(4)
self.assertEqual(self.test_linked_list.size(), 3)
def test_linked_list_remove(self):
self.test_linked_list.add(1)
self.test_linked_list.add(2)
self.test_linked_list.add(3)
self.test_linked_list.add(4)
self.test_linked_list.remove(3)
self.assertEqual(self.test_linked_list.size(), 3)
def tearDown(self):
pass | import unittest
from aids.linked_list.linked_list import LinkedList
class LinkedListTestCase(unittest.TestCase):
'''
Unit tests for the Linked List data structure
'''
def setUp(self):
self.test_linked_list = LinkedList()
def test_stack_initialization(self):
self.assertTrue(isinstance(self.test_linked_list, LinkedList))
def test_linked_list_is_empty(self):
self.assertTrue(self.test_linked_list.is_empty())
def test_linked_list_size(self):
self.test_linked_list.add(1)
self.test_linked_list.add(2)
self.assertEqual(self.test_linked_list.size(), 2)
def test_linked_list_add(self):
self.test_linked_list.add(1)
self.assertEqual(self.test_linked_list.head.get_data(), 1)
def test_linked_list_search(self):
self.test_linked_list.add(1)
self.test_linked_list.add(2)
self.test_linked_list.add(3)
self.test_linked_list.add(4)
self.assertTrue(self.test_linked_list.search(3))
def test_linked_list_search_false(self):
self.test_linked_list.add(1)
self.test_linked_list.add(2)
self.test_linked_list.add(3)
self.test_linked_list.add(4)
self.assertFalse(self.test_linked_list.search(11))
def test_linked_list_search_empty(self):
self.assertFalse(self.test_linked_list.search(3))
def tearDown(self):
pass | mit | Python |
c2138a35123969651212b1d9cd6cdefef89663ec | Modify existing Programs migration to account for help_text change | shurihell/testasia,fintech-circle/edx-platform,doganov/edx-platform,synergeticsedx/deployment-wipro,proversity-org/edx-platform,stvstnfrd/edx-platform,shurihell/testasia,amir-qayyum-khan/edx-platform,RPI-OPENEDX/edx-platform,analyseuc3m/ANALYSE-v1,kmoocdev2/edx-platform,devs1991/test_edx_docmode,UOMx/edx-platform,solashirai/edx-platform,lduarte1991/edx-platform,cecep-edu/edx-platform,stvstnfrd/edx-platform,Lektorium-LLC/edx-platform,deepsrijit1105/edx-platform,lduarte1991/edx-platform,tanmaykm/edx-platform,JioEducation/edx-platform,longmen21/edx-platform,raccoongang/edx-platform,CredoReference/edx-platform,pomegranited/edx-platform,mitocw/edx-platform,Endika/edx-platform,stvstnfrd/edx-platform,Endika/edx-platform,romain-li/edx-platform,Stanford-Online/edx-platform,proversity-org/edx-platform,devs1991/test_edx_docmode,cecep-edu/edx-platform,simbs/edx-platform,angelapper/edx-platform,MakeHer/edx-platform,chrisndodge/edx-platform,miptliot/edx-platform,shabab12/edx-platform,Edraak/circleci-edx-platform,IndonesiaX/edx-platform,nttks/edx-platform,doganov/edx-platform,bigdatauniversity/edx-platform,solashirai/edx-platform,shabab12/edx-platform,gsehub/edx-platform,longmen21/edx-platform,Edraak/edx-platform,cognitiveclass/edx-platform,jjmiranda/edx-platform,procangroup/edx-platform,kmoocdev2/edx-platform,jzoldak/edx-platform,prarthitm/edxplatform,ahmedaljazzar/edx-platform,Edraak/edraak-platform,proversity-org/edx-platform,appsembler/edx-platform,simbs/edx-platform,pabloborrego93/edx-platform,BehavioralInsightsTeam/edx-platform,fintech-circle/edx-platform,Edraak/edx-platform,philanthropy-u/edx-platform,CredoReference/edx-platform,RPI-OPENEDX/edx-platform,itsjeyd/edx-platform,naresh21/synergetics-edx-platform,shabab12/edx-platform,Endika/edx-platform,philanthropy-u/edx-platform,simbs/edx-platform,edx/edx-platform,mbareta/edx-platform-ft,jolyonb/edx-platform,gsehub/edx-platform,CredoReference/edx-platform,mbareta/edx-platform-ft,mitocw/edx-platform,caesar2164/edx-platform,louyihua/edx-platform,arbrandes/edx-platform,a-parhom/edx-platform,defance/edx-platform,gsehub/edx-platform,10clouds/edx-platform,itsjeyd/edx-platform,kursitet/edx-platform,jolyonb/edx-platform,nttks/edx-platform,appsembler/edx-platform,nttks/edx-platform,pabloborrego93/edx-platform,arbrandes/edx-platform,Stanford-Online/edx-platform,10clouds/edx-platform,halvertoluke/edx-platform,hastexo/edx-platform,Edraak/circleci-edx-platform,bigdatauniversity/edx-platform,zhenzhai/edx-platform,CourseTalk/edx-platform,Edraak/edraak-platform,Edraak/edx-platform,arbrandes/edx-platform,pomegranited/edx-platform,antoviaque/edx-platform,pepeportela/edx-platform,msegado/edx-platform,miptliot/edx-platform,devs1991/test_edx_docmode,antoviaque/edx-platform,philanthropy-u/edx-platform,tanmaykm/edx-platform,cpennington/edx-platform,edx/edx-platform,BehavioralInsightsTeam/edx-platform,romain-li/edx-platform,deepsrijit1105/edx-platform,msegado/edx-platform,CredoReference/edx-platform,prarthitm/edxplatform,ampax/edx-platform,pepeportela/edx-platform,teltek/edx-platform,antoviaque/edx-platform,IndonesiaX/edx-platform,CourseTalk/edx-platform,itsjeyd/edx-platform,chrisndodge/edx-platform,cpennington/edx-platform,ovnicraft/edx-platform,UOMx/edx-platform,prarthitm/edxplatform,waheedahmed/edx-platform,zhenzhai/edx-platform,doganov/edx-platform,longmen21/edx-platform,franosincic/edx-platform,eduNEXT/edunext-platform,waheedahmed/edx-platform,longmen21/edx-platform,philanthropy-u/edx-platform,pepeportela/edx-platform,EDUlib/edx-platform,Ayub-Khan/edx-platform,defance/edx-platform,hastexo/edx-platform,nttks/edx-platform,10clouds/edx-platform,kursitet/edx-platform,bigdatauniversity/edx-platform,BehavioralInsightsTeam/edx-platform,shurihell/testasia,simbs/edx-platform,kursitet/edx-platform,MakeHer/edx-platform,solashirai/edx-platform,cpennington/edx-platform,pomegranited/edx-platform,marcore/edx-platform,Endika/edx-platform,alu042/edx-platform,Ayub-Khan/edx-platform,jzoldak/edx-platform,amir-qayyum-khan/edx-platform,kmoocdev2/edx-platform,IndonesiaX/edx-platform,romain-li/edx-platform,hastexo/edx-platform,fintech-circle/edx-platform,ampax/edx-platform,alu042/edx-platform,a-parhom/edx-platform,kmoocdev2/edx-platform,louyihua/edx-platform,marcore/edx-platform,jolyonb/edx-platform,raccoongang/edx-platform,gymnasium/edx-platform,deepsrijit1105/edx-platform,ovnicraft/edx-platform,doganov/edx-platform,angelapper/edx-platform,antoviaque/edx-platform,UOMx/edx-platform,EDUlib/edx-platform,chrisndodge/edx-platform,shabab12/edx-platform,synergeticsedx/deployment-wipro,stvstnfrd/edx-platform,romain-li/edx-platform,MakeHer/edx-platform,halvertoluke/edx-platform,zhenzhai/edx-platform,Livit/Livit.Learn.EdX,amir-qayyum-khan/edx-platform,ahmedaljazzar/edx-platform,itsjeyd/edx-platform,alu042/edx-platform,caesar2164/edx-platform,analyseuc3m/ANALYSE-v1,EDUlib/edx-platform,EDUlib/edx-platform,alu042/edx-platform,teltek/edx-platform,devs1991/test_edx_docmode,wwj718/edx-platform,louyihua/edx-platform,10clouds/edx-platform,teltek/edx-platform,fintech-circle/edx-platform,hastexo/edx-platform,tanmaykm/edx-platform,zhenzhai/edx-platform,ZLLab-Mooc/edx-platform,raccoongang/edx-platform,eduNEXT/edx-platform,prarthitm/edxplatform,eduNEXT/edx-platform,cecep-edu/edx-platform,naresh21/synergetics-edx-platform,longmen21/edx-platform,Ayub-Khan/edx-platform,devs1991/test_edx_docmode,ESOedX/edx-platform,deepsrijit1105/edx-platform,wwj718/edx-platform,lduarte1991/edx-platform,proversity-org/edx-platform,pepeportela/edx-platform,ovnicraft/edx-platform,ZLLab-Mooc/edx-platform,Livit/Livit.Learn.EdX,nttks/edx-platform,waheedahmed/edx-platform,bigdatauniversity/edx-platform,zhenzhai/edx-platform,doganov/edx-platform,jzoldak/edx-platform,caesar2164/edx-platform,romain-li/edx-platform,halvertoluke/edx-platform,TeachAtTUM/edx-platform,franosincic/edx-platform,mbareta/edx-platform-ft,CourseTalk/edx-platform,mitocw/edx-platform,ahmedaljazzar/edx-platform,IndonesiaX/edx-platform,appsembler/edx-platform,appsembler/edx-platform,ahmedaljazzar/edx-platform,gsehub/edx-platform,Lektorium-LLC/edx-platform,devs1991/test_edx_docmode,naresh21/synergetics-edx-platform,TeachAtTUM/edx-platform,caesar2164/edx-platform,amir-qayyum-khan/edx-platform,ampax/edx-platform,devs1991/test_edx_docmode,analyseuc3m/ANALYSE-v1,TeachAtTUM/edx-platform,msegado/edx-platform,UOMx/edx-platform,cognitiveclass/edx-platform,angelapper/edx-platform,lduarte1991/edx-platform,Edraak/circleci-edx-platform,raccoongang/edx-platform,pomegranited/edx-platform,wwj718/edx-platform,CourseTalk/edx-platform,tanmaykm/edx-platform,arbrandes/edx-platform,ESOedX/edx-platform,cognitiveclass/edx-platform,cecep-edu/edx-platform,jjmiranda/edx-platform,shurihell/testasia,BehavioralInsightsTeam/edx-platform,JioEducation/edx-platform,Stanford-Online/edx-platform,cognitiveclass/edx-platform,bigdatauniversity/edx-platform,kursitet/edx-platform,procangroup/edx-platform,edx-solutions/edx-platform,halvertoluke/edx-platform,simbs/edx-platform,synergeticsedx/deployment-wipro,solashirai/edx-platform,Edraak/edraak-platform,jjmiranda/edx-platform,defance/edx-platform,waheedahmed/edx-platform,RPI-OPENEDX/edx-platform,halvertoluke/edx-platform,MakeHer/edx-platform,franosincic/edx-platform,devs1991/test_edx_docmode,chrisndodge/edx-platform,miptliot/edx-platform,ESOedX/edx-platform,eduNEXT/edunext-platform,pomegranited/edx-platform,synergeticsedx/deployment-wipro,ovnicraft/edx-platform,Lektorium-LLC/edx-platform,eduNEXT/edx-platform,msegado/edx-platform,teltek/edx-platform,miptliot/edx-platform,jjmiranda/edx-platform,gymnasium/edx-platform,jzoldak/edx-platform,Stanford-Online/edx-platform,franosincic/edx-platform,mbareta/edx-platform-ft,cpennington/edx-platform,JioEducation/edx-platform,JioEducation/edx-platform,eduNEXT/edunext-platform,mitocw/edx-platform,kmoocdev2/edx-platform,marcore/edx-platform,defance/edx-platform,kursitet/edx-platform,pabloborrego93/edx-platform,ESOedX/edx-platform,naresh21/synergetics-edx-platform,ZLLab-Mooc/edx-platform,analyseuc3m/ANALYSE-v1,edx/edx-platform,Livit/Livit.Learn.EdX,Edraak/circleci-edx-platform,procangroup/edx-platform,a-parhom/edx-platform,edx/edx-platform,solashirai/edx-platform,Ayub-Khan/edx-platform,eduNEXT/edunext-platform,RPI-OPENEDX/edx-platform,ZLLab-Mooc/edx-platform,cognitiveclass/edx-platform,TeachAtTUM/edx-platform,waheedahmed/edx-platform,edx-solutions/edx-platform,louyihua/edx-platform,Livit/Livit.Learn.EdX,a-parhom/edx-platform,angelapper/edx-platform,Edraak/edx-platform,Edraak/edx-platform,Lektorium-LLC/edx-platform,msegado/edx-platform,ampax/edx-platform,franosincic/edx-platform,wwj718/edx-platform,shurihell/testasia,gymnasium/edx-platform,jolyonb/edx-platform,RPI-OPENEDX/edx-platform,wwj718/edx-platform,MakeHer/edx-platform,Ayub-Khan/edx-platform,ovnicraft/edx-platform,ZLLab-Mooc/edx-platform,marcore/edx-platform,Edraak/circleci-edx-platform,cecep-edu/edx-platform,eduNEXT/edx-platform,IndonesiaX/edx-platform,Edraak/edraak-platform,pabloborrego93/edx-platform,edx-solutions/edx-platform,procangroup/edx-platform,gymnasium/edx-platform,edx-solutions/edx-platform | openedx/core/djangoapps/programs/migrations/0003_auto_20151120_1613.py | openedx/core/djangoapps/programs/migrations/0003_auto_20151120_1613.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('programs', '0002_programsapiconfig_cache_ttl'),
]
operations = [
migrations.AddField(
model_name='programsapiconfig',
name='authoring_app_css_path',
field=models.CharField(
max_length=255,
help_text='This value is required in order to enable the Studio authoring interface.',
verbose_name="Path to authoring app's CSS",
blank=True
),
),
migrations.AddField(
model_name='programsapiconfig',
name='authoring_app_js_path',
field=models.CharField(
max_length=255,
help_text='This value is required in order to enable the Studio authoring interface.',
verbose_name="Path to authoring app's JS",
blank=True
),
),
migrations.AddField(
model_name='programsapiconfig',
name='enable_studio_tab',
field=models.BooleanField(default=False, verbose_name='Enable Studio Authoring Interface'),
),
migrations.AlterField(
model_name='programsapiconfig',
name='enable_student_dashboard',
field=models.BooleanField(default=False, verbose_name='Enable Student Dashboard Displays'),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('programs', '0002_programsapiconfig_cache_ttl'),
]
operations = [
migrations.AddField(
model_name='programsapiconfig',
name='authoring_app_css_path',
field=models.CharField(max_length=255, verbose_name="Path to authoring app's CSS", blank=True),
),
migrations.AddField(
model_name='programsapiconfig',
name='authoring_app_js_path',
field=models.CharField(max_length=255, verbose_name="Path to authoring app's JS", blank=True),
),
migrations.AddField(
model_name='programsapiconfig',
name='enable_studio_tab',
field=models.BooleanField(default=False, verbose_name='Enable Studio Authoring Interface'),
),
migrations.AlterField(
model_name='programsapiconfig',
name='enable_student_dashboard',
field=models.BooleanField(default=False, verbose_name='Enable Student Dashboard Displays'),
),
]
| agpl-3.0 | Python |
d64235531fae49b3e76e9c904a9a2b0a08db0cf6 | Change version to 0.2. | TyVik/YaDiskClient | YaDiskClient/__init__.py | YaDiskClient/__init__.py | """
Client for Yandex.Disk.
"""
__version__ = '0.2'
from YaDiskClient import YaDiskException, YaDisk | """
Client for Yandex.Disk.
"""
__version__ = '0.1'
from YaDiskClient import YaDiskException, YaDisk | mit | Python |
c4e83ae47cac80cc4287bc7acfac5c8e6b4a7c4c | bump to 1.2.3 (#123) | twitterdev/twitter-python-ads-sdk,twitterdev/twitter-python-ads-sdk | twitter_ads/__init__.py | twitter_ads/__init__.py | # Copyright (C) 2015 Twitter, Inc.
VERSION = (1, 2, 3)
from twitter_ads.utils import get_version
__version__ = get_version()
| # Copyright (C) 2015 Twitter, Inc.
VERSION = (1, 2, 2)
from twitter_ads.utils import get_version
__version__ = get_version()
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.