commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
4f858059b324de0ec32f3b3f41f81b63254f4fe9 | bump version | vmalloc/weber-utils | weber_utils/__version__.py | weber_utils/__version__.py | __version__ = "1.1.0"
| __version__ = "1.0.2"
| bsd-3-clause | Python |
c4502645291f2ef1a1eaf08801fd4fa99e3beff2 | add perf test for .exists() without caching | xpybuild/xpybuild,xpybuild/xpybuild,xpybuild/xpybuild,xpybuild/xpybuild | tests/performance/microbenchmarks/MicroPerf_FileUtilsCaches/run.py | tests/performance/microbenchmarks/MicroPerf_FileUtilsCaches/run.py | from pysys.constants import *
from xpybuild.microperf_basetest import MicroPerfPySysTest
class PySysTest(MicroPerfPySysTest):
OPERATIONS = [
# resultKey (must be a valid filename), command, setup
('fileutils.exists() non-existent file without caching',"utils.fileutils.exists(OUTPUT_DIR+'/doesntexist%d'%iteration)", ""),
('fileutils.exists() non-existent file with caching',"utils.fileutils.exists(OUTPUT_DIR+'/doesntexist')", ""),
] | from pysys.constants import *
from xpybuild.microperf_basetest import MicroPerfPySysTest
class PySysTest(MicroPerfPySysTest):
OPERATIONS = [
# resultKey (must be a valid filename), command, setup
('fileutils.exists() non-existent file with caching',"utils.fileutils.exists(OUTPUT_DIR+'/doesntexist')", ""),
] | apache-2.0 | Python |
c478c9978aa3843d1961f9e475e5d6a85c8092d5 | Fix xrange fallback | jvarho/pylibscrypt,jvarho/pylibscrypt | pylibscrypt/common.py | pylibscrypt/common.py | # Copyright (c) 2014-2018, Jan Varho
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Common constants and functions used by scrypt implementations"""
import numbers
SCRYPT_MCF_PREFIX_7 = b'$7$'
SCRYPT_MCF_PREFIX_s1 = b'$s1$'
SCRYPT_MCF_PREFIX_DEFAULT = b'$s1$'
SCRYPT_MCF_PREFIX_ANY = None
SCRYPT_N = 1<<14
SCRYPT_r = 8
SCRYPT_p = 1
# The last one differs from libscrypt defaults, but matches the 'interactive'
# work factor from the original paper. For long term storage where runtime of
# key derivation is not a problem, you could use 16 as in libscrypt or better
# yet increase N if memory is plentiful.
try:
xrange = xrange
except:
xrange = range
def check_args(password, salt, N, r, p, olen=64):
if not isinstance(password, bytes):
raise TypeError('password must be a byte string')
if not isinstance(salt, bytes):
raise TypeError('salt must be a byte string')
if not isinstance(N, numbers.Integral):
raise TypeError('N must be an integer')
if not isinstance(r, numbers.Integral):
raise TypeError('r must be an integer')
if not isinstance(p, numbers.Integral):
raise TypeError('p must be an integer')
if not isinstance(olen, numbers.Integral):
raise TypeError('length must be an integer')
if N > 2**63:
raise ValueError('N cannot be larger than 2**63')
if (N & (N - 1)) or N < 2:
raise ValueError('N must be a power of two larger than 1')
if r <= 0:
raise ValueError('r must be positive')
if p <= 0:
raise ValueError('p must be positive')
if r * p >= 2**30:
raise ValueError('r * p must be less than 2 ** 30')
if olen <= 0:
raise ValueError('length must be positive')
| # Copyright (c) 2014-2015, Jan Varho
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Common constants and functions used by scrypt implementations"""
import numbers
SCRYPT_MCF_PREFIX_7 = b'$7$'
SCRYPT_MCF_PREFIX_s1 = b'$s1$'
SCRYPT_MCF_PREFIX_DEFAULT = b'$s1$'
SCRYPT_MCF_PREFIX_ANY = None
SCRYPT_N = 1<<14
SCRYPT_r = 8
SCRYPT_p = 1
# The last one differs from libscrypt defaults, but matches the 'interactive'
# work factor from the original paper. For long term storage where runtime of
# key derivation is not a problem, you could use 16 as in libscrypt or better
# yet increase N if memory is plentiful.
xrange = xrange if 'xrange' in globals() else range
def check_args(password, salt, N, r, p, olen=64):
if not isinstance(password, bytes):
raise TypeError('password must be a byte string')
if not isinstance(salt, bytes):
raise TypeError('salt must be a byte string')
if not isinstance(N, numbers.Integral):
raise TypeError('N must be an integer')
if not isinstance(r, numbers.Integral):
raise TypeError('r must be an integer')
if not isinstance(p, numbers.Integral):
raise TypeError('p must be an integer')
if not isinstance(olen, numbers.Integral):
raise TypeError('length must be an integer')
if N > 2**63:
raise ValueError('N cannot be larger than 2**63')
if (N & (N - 1)) or N < 2:
raise ValueError('N must be a power of two larger than 1')
if r <= 0:
raise ValueError('r must be positive')
if p <= 0:
raise ValueError('p must be positive')
if r * p >= 2**30:
raise ValueError('r * p must be less than 2 ** 30')
if olen <= 0:
raise ValueError('length must be positive')
| isc | Python |
9aa17b90b8f3413f0621cc25a686774dd809dc84 | Add drf serializer for environment variables | frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq | frigg/projects/serializers.py | frigg/projects/serializers.py | from rest_framework import serializers
from frigg.builds.serializers import BuildInlineSerializer
from .models import Project
class EnvironmentVariableSerializer(serializers.ModelSerializer):
def to_representation(self, instance):
representation = super().to_representation(instance)
if instance.is_secret:
representation.value = '[secret]'
return representation
class Meta:
model = Project
fields = (
'id',
'key',
'is_secret',
'value',
)
class ProjectSerializer(serializers.ModelSerializer):
builds = BuildInlineSerializer(read_only=True, many=True)
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'should_clone_with_ssh',
'builds'
)
| from rest_framework import serializers
from frigg.builds.serializers import BuildInlineSerializer
from .models import Project
class ProjectSerializer(serializers.ModelSerializer):
builds = BuildInlineSerializer(read_only=True, many=True)
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'should_clone_with_ssh',
'builds'
)
| mit | Python |
d0c557547c4aa98cfe47d36a18417557c11b18f9 | Bump version | mostm/pyqiwi | pyqiwi/__version__.py | pyqiwi/__version__.py | # -*- coding: utf-8 -*-
# _ _
# _ __ _ _ __ _(_)_ _(_)
# | '_ \| | | |/ _` | \ \ /\ / / |
# | |_) | |_| | (_| | |\ V V /| |
# | .__/ \__, |\__, |_| \_/\_/ |_|
# |_| |___/ |_|
# Python Qiwi API Wrapper
VERSION = (2, 1, 1)
__version__ = '.'.join(map(str, VERSION))
| # -*- coding: utf-8 -*-
# _ _
# _ __ _ _ __ _(_)_ _(_)
# | '_ \| | | |/ _` | \ \ /\ / / |
# | |_) | |_| | (_| | |\ V V /| |
# | .__/ \__, |\__, |_| \_/\_/ |_|
# |_| |___/ |_|
# Python Qiwi API Wrapper
VERSION = (2, 1, 0)
__version__ = '.'.join(map(str, VERSION))
| mit | Python |
ca6a330d629050937bc4543a66059068ae6ec504 | update tests | jendrikseipp/vulture,jendrikseipp/vulture | tests/test_script.py | tests/test_script.py | import os.path
import subprocess
import sys
from vulture import __version__
DIR = os.path.dirname(os.path.abspath(__file__))
REPO = os.path.dirname(DIR)
WHITELIST = os.path.join(REPO, 'whitelists', 'stdlib.py')
def call_vulture(args, **kwargs):
return subprocess.call(
[sys.executable, 'vulture.py'] + args, cwd=REPO, **kwargs)
def get_output(args):
child = subprocess.Popen([sys.executable, "vulture.py"] + args,
cwd=REPO, stdout=subprocess.PIPE)
return child.communicate()[0].decode("utf-8")
def test_script_with_whitelist():
assert call_vulture(['vulture.py', WHITELIST]) == 0
def test_script_without_whitelist():
assert call_vulture(['vulture.py']) == 1
def test_exclude():
assert call_vulture(['vulture.py', '--exclude', 'vulture.py']) == 0
def test_missing_file():
assert call_vulture(['missing.py']) == 1
def test_dir():
assert call_vulture(['tests']) == 0
def test_whitelist_with_python():
assert subprocess.call([sys.executable, WHITELIST], cwd=REPO) == 0
def test_whitelist_with_vulture():
assert call_vulture([WHITELIST]) == 0
def test_pyc():
assert call_vulture(['missing.pyc']) == 1
def test_version():
assert call_vulture(['--version']) == 0
assert get_output(['--version']) == 'vulture {0}\n'.format(__version__)
| import os.path
import subprocess
import sys
from vulture import __version__
DIR = os.path.dirname(os.path.abspath(__file__))
REPO = os.path.dirname(DIR)
WHITELIST = os.path.join(REPO, 'whitelists', 'stdlib.py')
def call_vulture(args, **kwargs):
return subprocess.call(
[sys.executable, 'vulture.py'] + args, cwd=REPO, **kwargs)
def get_output(args):
child = subprocess.Popen([sys.executable, "vulture.py"] + args,
cwd=REPO, stdout=subprocess.PIPE)
return child.communicate()[0].decode("utf-8")
def test_script_with_whitelist():
assert call_vulture(['vulture.py', WHITELIST]) == 0
def test_script_without_whitelist():
assert call_vulture(['vulture.py']) == 1
def test_exclude():
assert call_vulture(['vulture.py', '--exclude', 'vulture.py']) == 0
def test_missing_file():
assert call_vulture(['missing.py']) == 1
def test_dir():
assert call_vulture(['tests']) == 0
def test_whitelist_with_python():
assert subprocess.call([sys.executable, WHITELIST], cwd=REPO) == 0
def test_whitelist_with_vulture():
assert call_vulture([WHITELIST]) == 0
def test_pyc():
assert call_vulture(['missing.pyc']) == 1
def test_version():
assert call_vulture(['--version']) == 0
assert get_output(['--version']) == 'vulture.py {0}\n'.format(__version__)
| mit | Python |
f97fbb50bd5f311230c5b005e238a665e096e867 | Fix failing unit test | capybaralet/fuel,chrishokamp/fuel,udibr/fuel,codeaudit/fuel,glewis17/fuel,janchorowski/fuel,bouthilx/fuel,aalmah/fuel,hantek/fuel,rodrigob/fuel,dmitriy-serdyuk/fuel,vdumoulin/fuel,orhanf/fuel,dwf/fuel,udibr/fuel,orhanf/fuel,dhruvparamhans/fuel,dwf/fuel,hantek/fuel,dribnet/fuel,bouthilx/fuel,mila-udem/fuel,janchorowski/fuel,mila-udem/fuel,chrishokamp/fuel,dmitriy-serdyuk/fuel,markusnagel/fuel,rodrigob/fuel,harmdevries89/fuel,aalmah/fuel,mjwillson/fuel,dhruvparamhans/fuel,dribnet/fuel,vdumoulin/fuel,glewis17/fuel,mjwillson/fuel,markusnagel/fuel,codeaudit/fuel,capybaralet/fuel,harmdevries89/fuel | tests/test_server.py | tests/test_server.py | from multiprocessing import Process
from numpy.testing import assert_allclose, assert_raises
from six.moves import cPickle
from fuel.datasets import MNIST
from fuel.schemes import SequentialScheme
from fuel.server import start_server
from fuel.streams import DataStream, ServerDataStream
def get_stream():
return DataStream(
MNIST(('train',)), iteration_scheme=SequentialScheme(1500, 500))
class TestServer(object):
def setUp(self):
self.server_process = Process(
target=start_server, args=(get_stream(),))
self.server_process.start()
self.stream = ServerDataStream(('f', 't'), False)
def tearDown(self):
self.server_process.terminate()
self.stream = None
def test_server(self):
server_data = self.stream.get_epoch_iterator()
expected_data = get_stream().get_epoch_iterator()
for _, s, e in zip(range(3), server_data, expected_data):
for data in zip(s, e):
assert_allclose(*data)
assert_raises(StopIteration, next, server_data)
def test_pickling(self):
self.stream = cPickle.loads(cPickle.dumps(self.stream))
server_data = self.stream.get_epoch_iterator()
expected_data = get_stream().get_epoch_iterator()
for _, s, e in zip(range(3), server_data, expected_data):
for data in zip(s, e):
assert_allclose(*data, rtol=1e-5)
assert_raises(StopIteration, next, server_data)
def test_value_error_on_request(self):
assert_raises(ValueError, self.stream.get_data, [0, 1])
def test_close(self):
self.stream.close()
def test_next_epoch(self):
self.stream.next_epoch()
def test_reset(self):
self.stream.reset()
| from multiprocessing import Process
from numpy.testing import assert_allclose, assert_raises
from six.moves import cPickle
from fuel.datasets import MNIST
from fuel.schemes import SequentialScheme
from fuel.server import start_server
from fuel.streams import DataStream, ServerDataStream
def get_stream():
return DataStream(
MNIST(('train',)), iteration_scheme=SequentialScheme(1500, 500))
class TestServer(object):
def setUp(self):
self.server_process = Process(
target=start_server, args=(get_stream(),))
self.server_process.start()
self.stream = ServerDataStream(('f', 't'))
def tearDown(self):
self.server_process.terminate()
self.stream = None
def test_server(self):
server_data = self.stream.get_epoch_iterator()
expected_data = get_stream().get_epoch_iterator()
for _, s, e in zip(range(3), server_data, expected_data):
for data in zip(s, e):
assert_allclose(*data)
assert_raises(StopIteration, next, server_data)
def test_pickling(self):
self.stream = cPickle.loads(cPickle.dumps(self.stream))
server_data = self.stream.get_epoch_iterator()
expected_data = get_stream().get_epoch_iterator()
for _, s, e in zip(range(3), server_data, expected_data):
for data in zip(s, e):
assert_allclose(*data, rtol=1e-5)
assert_raises(StopIteration, next, server_data)
def test_value_error_on_request(self):
assert_raises(ValueError, self.stream.get_data, [0, 1])
def test_close(self):
self.stream.close()
def test_next_epoch(self):
self.stream.next_epoch()
def test_reset(self):
self.stream.reset()
| mit | Python |
025f6c68844101fb45722e7f27dc7854fc1f6065 | Update tables.py | CADTS-Bachelor/playbook | grade-2015/KuangJia/tables.py | grade-2015/KuangJia/tables.py | # encoding:utf8
# Python使用ORM框架操作数据库
from sqlalchemy import Column, Integer, String, DateTime, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Scene(Base):
__tablename__ = 'scene' # 表名
id = Column(Integer, primary_key=True) # 表结构
name = Column(String(20), unique=True)
create_time = Column(DateTime())
destory_time = Column(DateTime())
# 初始化数据库连接:('数据库类型+数据库驱动名称://用户名:口令@机器地址:端口号/数据库名)
engine = create_engine('mysql+mysqlconnector://root:123@localhost:3306/test')
DB_Session = sessionmaker(bind = engine) # 创建DBSession类型:
# 增
session = DB_Session() # 创建session对象:
scene = Scene(id=1, name = 'xilming', create_time = '2015-12-21 12:20:21',destory_time = '2015-12-21 12:20:54') # 创建新Scene对象:
session.add(scene) # 添加到session:
session.commit() # 提交即保存到数据库:
session.close() # 关闭session:
# 查
session = DB_Session()
scene_info = session.query(Scene).filter(Scene.id == 1).one()
print(scene_info.name)
session.close()
'''
# 改
session = DB_Session()
session.query(Scene).filter(Scene.id == 1).update({'name': 'xinxin'})
session.commit()
session.close()
# # 删
session = DB_Session()
session.query(Scene).filter(Scene.id == 1).delete()
session.commit()
session.close()
'''
| # encoding:utf8
# Python使用ORM框架操作数据库
from sqlalchemy import Column, Integer, String, DateTime, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Scene(Base):
__tablename__ = 'scene' # 表名
id = Column(Integer, primary_key=True) # 表结构
name = Column(String(20), unique=True)
create_time = Column(DateTime())
destory_time = Column(DateTime())
# 初始化数据库连接:('数据库类型+数据库驱动名称://用户名:口令@机器地址:端口号/数据库名)
engine = create_engine('mysql+mysqlconnector://root:123@localhost:3306/test')
DB_Session = sessionmaker(bind = engine) # 创建DBSession类型:
# 增
session = DB_Session() # 创建session对象:
scene = Scene(id=1, name = 'xilming', create_time = '2015-12-21 12:20:21',destory_time = '2015-12-21 12:20:54') # 创建新Scene对象:
session.add(scene) # 添加到session:
session.commit() # 提交即保存到数据库:
session.close() # 关闭session:
# 查, destory_time = '2015-12-21 12:20:54'
session = DB_Session()
scene_info = session.query(Scene).filter(Scene.id == 1).one()
print(scene_info.name)
session.close()
'''# 改
session = DB_Session()
session.query(Scene).filter(Scene.id == 1).update({'name': 'xinxin'})
session.commit()
session.close()
# # 删
session = DB_Session()
session.query(Scene).filter(Scene.id == 1).delete()
session.commit()
session.close()
'''
| mit | Python |
127a707c7e77f0a1fa8066bc24099addb1f429d2 | Update version for release | great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations | great_expectations/version.py | great_expectations/version.py | __version__ = "0.7.6"
| __version__ = "0.7.6__develop"
| apache-2.0 | Python |
c9d4dd153b582470216bc9e01b55753876136fec | add another test since i'm paranoid | spotify/annoy,tjrileywisc/annoy,eddelbuettel/annoy,spotify/annoy,eddelbuettel/annoy,eddelbuettel/annoy,tjrileywisc/annoy,tjrileywisc/annoy,eddelbuettel/annoy,spotify/annoy,spotify/annoy,tjrileywisc/annoy | test/holes_test.py | test/holes_test.py | # Copyright (c) 2013 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import numpy
import random
from common import TestCase
from annoy import AnnoyIndex
class HolesTest(TestCase):
def test_root_one_child(self):
# See https://github.com/spotify/annoy/issues/223
f = 10
index = AnnoyIndex(f)
index.add_item(1000, numpy.random.normal(size=(f,)))
index.build(10)
js = index.get_nns_by_vector(numpy.random.normal(size=(f,)), 100)
self.assertEquals(js, [1000])
def test_many_holes(self):
f = 10
index = AnnoyIndex(f)
valid_indices = random.sample(range(2000), 1000) # leave holes
for i in valid_indices:
v = numpy.random.normal(size=(f,))
index.add_item(i, v)
index.build(10)
for i in valid_indices:
js = index.get_nns_by_item(i, 10000)
for j in js:
self.assertTrue(j in valid_indices)
for i in range(1000):
v = numpy.random.normal(size=(f,))
js = index.get_nns_by_vector(v, 10000)
for j in js:
self.assertTrue(j in valid_indices)
def test_holes_many_children(self):
# See https://github.com/spotify/annoy/issues/295
f, base_i, n = 100, 100000, 10
annoy = AnnoyIndex(f)
for i in range(n):
annoy.add_item(base_i + i, numpy.random.normal(size=(f,)))
annoy.build(100)
res = annoy.get_nns_by_item(base_i, n)
self.assertEquals(set(res), set([base_i + i for i in range(n)]))
def test_root_two_children(self):
# See https://github.com/spotify/annoy/issues/223
f = 10
index = AnnoyIndex(f)
index.add_item(1000, numpy.random.normal(size=(f,)))
index.add_item(1001, numpy.random.normal(size=(f,)))
index.build(10)
js = index.get_nns_by_vector(numpy.random.normal(size=(f,)), 100)
self.assertEquals(set(js), set([1000, 1001]))
| # Copyright (c) 2013 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import numpy
import random
from common import TestCase
from annoy import AnnoyIndex
class HolesTest(TestCase):
def test_holes(self):
# See https://github.com/spotify/annoy/issues/223
f = 10
index = AnnoyIndex(f)
index.add_item(1000, numpy.random.normal(size=(f,)))
index.build(10)
js = index.get_nns_by_vector(numpy.random.normal(size=(f,)), 100)
self.assertEquals(js, [1000])
def test_holes_more(self):
f = 10
index = AnnoyIndex(f)
valid_indices = random.sample(range(2000), 1000) # leave holes
for i in valid_indices:
v = numpy.random.normal(size=(f,))
index.add_item(i, v)
index.build(10)
for i in valid_indices:
js = index.get_nns_by_item(i, 10000)
for j in js:
self.assertTrue(j in valid_indices)
for i in range(1000):
v = numpy.random.normal(size=(f,))
js = index.get_nns_by_vector(v, 10000)
for j in js:
self.assertTrue(j in valid_indices)
def test_holes_even_more(self):
# See https://github.com/spotify/annoy/issues/295
annoy = AnnoyIndex(100)
base_i, n = 100000, 10
for i in range(n):
annoy.add_item(base_i + i, [random.gauss(0, 1) for z in range(100)])
annoy.build(100)
res = annoy.get_nns_by_item(base_i, n)
self.assertEquals(set(res), set([base_i + i for i in range(n)]))
| apache-2.0 | Python |
4ffa677f0c50d535136d983820b48c39e125b447 | Fix anything tests didn't pick up | apache/cloudstack-gcestack | gstack/controllers/regions.py | gstack/controllers/regions.py | #!/usr/bin/env python
# encoding: utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from gstack import app
from gstack import helpers
from gstack import controllers
from gstack import authentication
from flask import request, url_for
def _cloudstack_account_to_gce(cloudstack_response):
response = {}
response['kind'] = 'compute#region'
response['description'] = cloudstack_response['name']
response['name'] = cloudstack_response['name']
response['id'] = cloudstack_response['id']
response['status'] = 'UP'
return response
@app.route('/compute/v1/projects/<projectid>/regions', methods=['GET'])
@authentication.required
def listregions(projectid, authorization):
args = {'command': 'listRegions'}
kwargs = {}
items = controllers.describe_items(
authorization, args, 'region',
_cloudstack_account_to_gce, **kwargs)
populated_response = {
'kind': 'compute#regionList',
'id': 'projects/' + projectid + '/regions',
'selfLink': request.base_url,
'items': items
}
return helpers.create_response(data=populated_response)
@app.route('/compute/v1/projects/<projectid>/regions/<region>', methods=['GET'])
@authentication.required
def getregion(projectid, authorization, region):
func_route = url_for('getregion', projectid=projectid, region=region)
args = {'command': 'listRegions'}
return controllers.get_item_with_name_or_error(
authorization, region, args, 'region', func_route,
_cloudstack_account_to_gce, **{})
| #!/usr/bin/env python
# encoding: utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from gstack import app
from gstack import helpers
from gstack import controllers
from gstack import authentication
from flask import request, url_for
def _cloudstack_account_to_gce(cloudstack_response):
response = {}
response['kind'] = 'compute#region'
response['description'] = cloudstack_response['name']
response['name'] = cloudstack_response['name']
response['id'] = cloudstack_response['id']
response['status'] = 'UP'
return response
@app.route('/compute/v1/projects/<projectid>/regions', methods=['GET'])
@authentication.required
def listregions(projectid, authorization):
args = {'command': 'listAccounts'}
kwargs = {}
items = controllers.describe_items(
authorization, args, 'account',
_cloudstack_account_to_gce, **kwargs)
populated_response = {
'kind': 'compute#regionList',
'id': 'projects/' + projectid + '/regions',
'selfLink': request.base_url,
'items': items
}
return helpers.create_response(data=populated_response)
@app.route('/compute/v1/projects/<projectid>/regions/<region>', methods=['GET'])
@authentication.required
def getregion(projectid, authorization, region):
func_route = url_for('getregion', projectid=projectid, region=region)
args = {'command': 'listAccounts'}
return controllers.get_item_with_name_or_error(
authorization, region, args, 'account', func_route,
_cloudstack_account_to_gce, **{})
| apache-2.0 | Python |
3106a712d0a61801857813c98d31591b0937777d | Update __init__.py | zsdonghao/tensorlayer,zsdonghao/tensorlayer | tensorlayer/layers/__init__.py | tensorlayer/layers/__init__.py | #! /usr/bin/python
# -*- coding: utf-8 -*-
from .activation import *
from .convolution import *
from .core import *
from .dense import *
from .dropout import *
from .deprecated import *
from .embedding import *
from .extend import *
# from .flow_control import * # remove for TF 2.0
from .image_resampling import *
from .inputs import *
from .lambda_layers import *
from .merge import *
from .noise import *
from .normalization import *
from .padding import *
from .pooling import *
from .quantize import *
# from .reconstruction import * # remove for TF 2.0
from .recurrent import *
from .scale import *
from .shape import *
from .spatial_transformer import *
from .stack import *
# from .time_distribution import * # remove for TF 2.0
from .utils import *
| #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
TensorLayer provides rich layer implementations trailed for
various benchmarks and domain-specific problems. In addition, we also
support transparent access to native TensorFlow parameters.
For example, we provide not only layers for local response normalization, but also
layers that allow user to apply ``tf.nn.lrn`` on ``network.outputs``.
More functions can be found in `TensorFlow API <https://www.tensorflow.org/versions/master/api_docs/index.html>`__.
"""
from .activation import *
from .convolution import *
from .core import *
from .dense import *
from .dropout import *
from .deprecated import *
from .embedding import *
from .extend import *
# from .flow_control import * # remove for TF 2.0
from .image_resampling import *
from .inputs import *
from .lambda_layers import *
from .merge import *
from .noise import *
from .normalization import *
from .padding import *
from .pooling import *
from .quantize import *
# from .reconstruction import * # remove for TF 2.0
from .recurrent import *
from .scale import *
from .shape import *
from .spatial_transformer import *
from .stack import *
# from .time_distribution import * # remove for TF 2.0
from .utils import *
| apache-2.0 | Python |
200db2db83d2765413ad62ec43087deee0c1cf6e | Add trailing newline | rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son | app/soc/models/__init__.py | app/soc/models/__init__.py | #
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains Melange models."""
| #
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains Melange models.""" | apache-2.0 | Python |
b26af5f69b310e35dd871d4d9a7a3b0ed67cf870 | add another stats test | pombredanne/lsh-hdc,escherba/lsh-hdc,escherba/lsh-hdc,escherba/lsh-hdc,pombredanne/lsh-hdc,escherba/lsh-hdc,pombredanne/lsh-hdc,pombredanne/lsh-hdc,pombredanne/lsh-hdc,escherba/lsh-hdc | test/test_stats.py | test/test_stats.py | __author__ = 'escherba'
import unittest
from lsh.stats import UncertaintySummarizer, VarianceSummarizer
from collections import Counter
class MyTestCase(unittest.TestCase):
def test_uncertainty_index(self):
"""
Example from Manning et al. Introduction to Information Retrieval.
CUP. 2009, p. 357.
"""
summ = UncertaintySummarizer()
summ.add_object(Counter({'x': 5, 'o': 1}), 6)
summ.add_object(Counter({'x': 1, 'o': 4, 'v': 1}), 6)
summ.add_object(Counter({'x': 2, 'v': 3}), 5)
self.assertAlmostEqual(summ.get_summary(), 0.370949657022)
def test_explained_variance(self):
"""
Example from Manning et al. Introduction to Information Retrieval.
CUP. 2009, p. 357.
"""
summ1 = VarianceSummarizer()
summ1.add_object([1, 2, 3])
summ1.add_object([12, 13, 14])
summ1.add_object([23, 24, 25])
var1 = summ1.get_summary()
summ2 = VarianceSummarizer()
summ2.add_object([1, 2, 3])
summ2.add_object([2, 3, 4])
summ2.add_object([3, 4, 5])
var2 = summ2.get_summary()
self.assertLess(var2, var1)
if __name__ == '__main__':
unittest.main()
| __author__ = 'escherba'
import unittest
from lsh.stats import UncertaintySummarizer
from collections import Counter
class MyTestCase(unittest.TestCase):
def test_uindex(self):
"""
Example from Manning et al. Introduction to Information Retrieval.
CUP. 2009, p. 357.
"""
uindex = UncertaintySummarizer()
uindex.add_object(Counter({'x': 5, 'o': 1}), 6)
uindex.add_object(Counter({'x': 1, 'o': 4, 'v': 1}), 6)
uindex.add_object(Counter({'x': 2, 'v': 3}), 5)
self.assertAlmostEqual(uindex.get_summary(), 0.370949657022)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
2bde9f7ef0280b0320d8bf4431c636d7ac4f67e3 | Update version number | SimonSapin/tinycss2 | tinycss2/__init__.py | tinycss2/__init__.py | """
tinycss2
========
tinycss2 is a low-level CSS parser and generator: it can parse strings, return
Python objects representing tokens and blocks, and generate CSS strings
corresponding to these objects.
"""
from .bytes import parse_stylesheet_bytes # noqa
from .parser import ( # noqa
parse_declaration_list, parse_one_component_value, parse_one_declaration,
parse_one_rule, parse_rule_list, parse_stylesheet)
from .serializer import serialize, serialize_identifier # noqa
from .tokenizer import parse_component_value_list # noqa
VERSION = __version__ = '1.1.0'
| """
tinycss2
========
tinycss2 is a low-level CSS parser and generator: it can parse strings, return
Python objects representing tokens and blocks, and generate CSS strings
corresponding to these objects.
"""
from .bytes import parse_stylesheet_bytes # noqa
from .parser import ( # noqa
parse_declaration_list, parse_one_component_value, parse_one_declaration,
parse_one_rule, parse_rule_list, parse_stylesheet)
from .serializer import serialize, serialize_identifier # noqa
from .tokenizer import parse_component_value_list # noqa
VERSION = __version__ = '1.0.2'
| bsd-3-clause | Python |
12ce300c3b0101ec75ba805bd5a577e191a5b8fc | fix relation for simple deployment test | juju-solutions/layer-apache-kafka | tests/01-deploy.py | tests/01-deploy.py | #!/usr/bin/env python3
import unittest
import amulet
class TestDeploy(unittest.TestCase):
"""
Trivial deployment test for Apache Kafka.
This charm cannot do anything useful by itself, so integration testing
is done in the bundle.
"""
@classmethod
def setUpClass(cls):
cls.d = amulet.Deployment(series='trusty')
cls.d.add('kafka', 'apache-kafka')
cls.d.add('zookeeper', 'apache-zookeeper')
cls.d.relate('kafka:zookeeper', 'zookeeper:zkclient')
cls.d.setup(timeout=900)
cls.d.sentry.wait(timeout=1800)
cls.unit = cls.d.sentry['kafka'][0]
def test_deploy(self):
output, retcode = self.unit.run("pgrep -a java")
assert 'Kafka' in output, "Kafka daemon is not started"
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python3
import unittest
import amulet
class TestDeploy(unittest.TestCase):
"""
Trivial deployment test for Apache Kafka.
This charm cannot do anything useful by itself, so integration testing
is done in the bundle.
"""
@classmethod
def setUpClass(cls):
cls.d = amulet.Deployment(series='trusty')
cls.d.add('kafka', 'apache-kafka')
cls.d.add('zookeeper', 'apache-zookeeper')
cls.d.relate('kafka:zookeeper', 'zookeeper:zookeeper')
cls.d.setup(timeout=900)
cls.d.sentry.wait(timeout=1800)
cls.unit = cls.d.sentry['kafka'][0]
def test_deploy(self):
output, retcode = self.unit.run("pgrep -a java")
assert 'Kafka' in output, "Kafka daemon is not started"
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
71c837c2792e6b30c5b93a4848b8b6a3c0ca0308 | use parallel in progressbar.py | alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl | tests/misc/progressbar.py | tests/misc/progressbar.py | #!/usr/bin/env python
# Tai Sakuma <tai.sakuma@gmail.com>
from __future__ import print_function
import sys
import time, random
import uuid
import argparse
import alphatwirl
##__________________________________________________________________||
parser = argparse.ArgumentParser()
parser.add_argument('--parallel-mode', default='multiprocessing', choices=['multiprocessing', 'subprocess', 'htcondor'], help='mode for concurrency')
parser.add_argument('-p', '--process', default=16, type=int, help='number of processes to run in parallel')
parser.add_argument('-q', '--quiet', default=False, action='store_true', help='quiet mode')
args = parser.parse_args()
##__________________________________________________________________||
class Task(object):
def __init__(self, name):
self.name = name
def __call__(self):
progressReporter = alphatwirl.progressbar.progressReporter
n = random.randint(5, 1000000)
taskid = uuid.uuid4()
time.sleep(random.randint(0, 3))
for i in range(n):
time.sleep(0.0001)
report = alphatwirl.progressbar.ProgressReport(name=self.name, done=i + 1, total=n, taskid=taskid)
progressReporter.report(report)
return None
##__________________________________________________________________||
parallel = alphatwirl.parallel.build_parallel(
parallel_mode=args.parallel_mode,
quiet=args.quiet,
processes=args.process
)
##__________________________________________________________________||
parallel.begin()
parallel.communicationChannel.put(Task("loop"))
parallel.communicationChannel.put(Task("another loop"))
parallel.communicationChannel.put(Task("more loop"))
parallel.communicationChannel.put(Task("loop loop loop"))
parallel.communicationChannel.put(Task("l"))
parallel.communicationChannel.put(Task("loop6"))
parallel.communicationChannel.put(Task("loop7"))
parallel.communicationChannel.put(Task("loop8"))
parallel.communicationChannel.put(Task("loop6"))
parallel.communicationChannel.receive()
parallel.end()
##__________________________________________________________________||
| #!/usr/bin/env python
# Tai Sakuma <tai.sakuma@gmail.com>
from __future__ import print_function
import sys
import time, random
import uuid
import alphatwirl
##__________________________________________________________________||
class Task(object):
def __init__(self, name):
self.name = name
def __call__(self):
progressReporter = alphatwirl.progressbar.progressReporter
n = random.randint(5, 1000000)
taskid = uuid.uuid4()
time.sleep(random.randint(0, 3))
for i in range(n):
time.sleep(0.0001)
report = alphatwirl.progressbar.ProgressReport(name=self.name, done=i + 1, total=n, taskid=taskid)
progressReporter.report(report)
return None
##__________________________________________________________________||
progressBar = alphatwirl.progressbar.ProgressBar() if sys.stdout.isatty() else alphatwirl.progressbar.ProgressPrint()
##__________________________________________________________________||
progressMonitor = alphatwirl.progressbar.BProgressMonitor(presentation=progressBar)
dropbox = alphatwirl.concurrently.MultiprocessingDropbox(nprocesses=10, progressMonitor=progressMonitor)
channel = alphatwirl.concurrently.CommunicationChannel(dropbox)
progressMonitor.begin()
channel.begin()
channel.put(Task("loop"))
channel.put(Task("another loop"))
channel.put(Task("more loop"))
channel.put(Task("loop loop loop"))
channel.put(Task("l"))
channel.put(Task("loop6"))
channel.put(Task("loop7"))
channel.put(Task("loop8"))
channel.put(Task("loop6"))
channel.receive()
channel.end()
progressMonitor.end()
##__________________________________________________________________||
| bsd-3-clause | Python |
0d0a081a8b37066b2000cac3395d59e2cc7b29e1 | Add another smoke test. | chebee7i/buhmm | buhmm/tests/test_smoke.py | buhmm/tests/test_smoke.py | """
Smoke tests.
"""
from nose.tools import *
from nose import SkipTest
import dit
import buhmm
import numpy as np
class TestSmoke:
def setUp(cls):
global machines
try:
from cmpy import machines
except ImportError:
raise SkipTest('cmpy not available')
def test_pm_uhmm(self):
m = machines.Even()
d = '011110'
x = buhmm.Infer(m, d)
xnew = x.get_updated_prior()
uhmm1 = x.posterior.pm_uhmm('A')
uhmm2 = xnew.posterior.pm_uhmm('A')
np.testing.assert_almost_equal(uhmm1, uhmm2)
def test_predictive_probability(self):
m = machines.Even()
d = '11111'
x = buhmm.Infer(m, d)
m2A = x.pm_machine('A')
fnode = m2A.graph['final_node']
assert_equal(fnode, 'B')
w = '0'
p2A = m2A.probability(w, start=fnode)
assert_almost_equal(p2A, x.predictive_probability(w, 'A'))
w = '1'
p2A = m2A.probability(w, start=fnode)
assert_almost_equal(p2A, x.predictive_probability(w, 'A'))
m2B = x.pm_machine('B')
fnode = m2B.graph['final_node']
assert_equal(fnode, 'A')
w = '0'
p2B = m2B.probability(w, start=fnode)
assert_almost_equal(p2B, x.predictive_probability(w, 'B'))
w = '1'
p2B = m2B.probability(w, start=fnode)
assert_almost_equal(p2B, x.predictive_probability(w, 'B'))
def test_infer(self):
m = machines.Even()
m.prng.seed(0)
d = m.symbols(100)
x = buhmm.Infer(m, d)
evid = x.log_evidence()
assert_almost_equal(evid, -68.165400496389665)
pred = x.predictive_probability('1011')
assert_almost_equal(pred, -2.0372080704707334)
| """
Smoke tests.
"""
from nose.tools import *
from nose import SkipTest
import dit
import buhmm
import numpy as np
class TestSmoke:
def setUp(cls):
global machines
try:
from cmpy import machines
except ImportError:
raise SkipTest('cmpy not available')
def test_pm_uhmm(self):
m = machines.Even()
d = '011110'
x = buhmm.Infer(m, d)
xnew = x.get_updated_prior()
uhmm1 = x.posterior.pm_uhmm('A')
uhmm2 = xnew.posterior.pm_uhmm('A')
np.testing.assert_almost_equal(uhmm1, uhmm2)
def test_predictive_probability(self):
m = machines.Even()
d = '11111'
x = buhmm.Infer(m, d)
m2A = x.pm_machine('A')
fnode = m2A.graph['final_node']
assert_equal(fnode, 'B')
w = '0'
p2A = m2A.probability(w, start=fnode)
assert_almost_equal(p2A, x.predictive_probability(w, 'A'))
w = '1'
p2A = m2A.probability(w, start=fnode)
assert_almost_equal(p2A, x.predictive_probability(w, 'A'))
m2B = x.pm_machine('B')
fnode = m2B.graph['final_node']
assert_equal(fnode, 'A')
w = '0'
p2B = m2B.probability(w, start=fnode)
assert_almost_equal(p2B, x.predictive_probability(w, 'B'))
w = '1'
p2B = m2B.probability(w, start=fnode)
assert_almost_equal(p2B, x.predictive_probability(w, 'B'))
| mit | Python |
4a861a9a3a6070c3a9016f788cbda521e11da3d5 | remove test_resolve() test since resolving schema inheritance is now built in to the loadDocument() function when called on schemas; update other calls to work with the latest API | xgds/xgds_planner2,xgds/xgds_planner2,xgds/xgds_planner2,xgds/xgds_planner2 | xgds_planner2/xpjsonTest.py | xgds_planner2/xpjsonTest.py | #!/usr/bin/env python
#__BEGIN_LICENSE__
# Copyright (c) 2015, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The xGDS platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#__END_LICENSE__
"""
Test xpjson.py.
"""
import unittest
import os
from xgds_planner2 import xpjson
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
SCHEMA_PATH = os.path.join(THIS_DIR, 'xpjsonSpec', 'examplePlanSchema.json')
PLAN_PATH = os.path.join(THIS_DIR, 'xpjsonSpec', 'examplePlan.json')
LIBRARY_PATH = os.path.join(THIS_DIR, 'xpjsonSpec', 'examplePlanLibrary.json')
class XpjsonTest(unittest.TestCase):
def test_schema(self):
_schema = xpjson.loadDocument(SCHEMA_PATH)
def test_plan(self):
schema = xpjson.loadDocument(SCHEMA_PATH)
_plan = xpjson.loadDocument(PLAN_PATH, schema=schema)
def test_library(self):
schema = xpjson.loadDocument(SCHEMA_PATH)
_library = xpjson.loadDocument(LIBRARY_PATH, schema=schema)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
#__BEGIN_LICENSE__
# Copyright (c) 2015, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The xGDS platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#__END_LICENSE__
"""
Test xpjson.py.
"""
import unittest
import os
from xgds_planner2 import xpjson
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
SCHEMA_PATH = os.path.join(THIS_DIR, 'xpjsonSpec', 'examplePlanSchema.json')
PLAN_PATH = os.path.join(THIS_DIR, 'xpjsonSpec', 'examplePlan.json')
LIBRARY_PATH = os.path.join(THIS_DIR, 'xpjsonSpec', 'examplePlanLibrary.json')
class XpjsonTest(unittest.TestCase):
def test_resolve(self):
schemaDict = xpjson.loadDocument(SCHEMA_PATH)
xpjson.resolveSchemaInheritance(schemaDict)
def test_schema(self):
_schema = xpjson.PlanSchema(xpjson.loadDocument(SCHEMA_PATH))
def test_plan(self):
schema = xpjson.PlanSchema(xpjson.loadDocument(SCHEMA_PATH))
_plan = xpjson.Plan(xpjson.loadDocument(PLAN_PATH), schema=schema)
def test_library(self):
schema = xpjson.PlanSchema(xpjson.loadDocument(SCHEMA_PATH))
_plan = xpjson.PlanLibrary(xpjson.loadDocument(LIBRARY_PATH), schema=schema)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
705bcf72367120ac9ffe063c3a0d244f432c6a92 | fix warning message | thinkopensolutions/tkobr-addons,thinkopensolutions/tkobr-addons,thinkopensolutions/tkobr-addons,thinkopensolutions/tkobr-addons | tko_contract_invoicing_product_effective_date/models/account_analytic_account.py | tko_contract_invoicing_product_effective_date/models/account_analytic_account.py | # -*- encoding: utf-8 -*-
from odoo import models, fields, api, _
from odoo.exceptions import Warning
import logging
_logger = logging.getLogger(__name__)
class AccountAnalyticAccont(models.Model):
_inherit = 'account.analytic.account'
end_date = fields.Date(u'End Date')
def _validate_invoice_creating(self):
for line in self.recurring_invoice_line_ids:
if line.start_date <= self.recurring_next_date and line.end_date >= self.recurring_next_date:
return True
return False
# Add invoce lines only Contract's Date of Next Invoice
# falls in the range of start and end date of line
@api.multi
def _create_invoice(self):
if self._validate_invoice_creating():
self.ensure_one()
invoice_vals = self._prepare_invoice()
invoice = self.env['account.invoice'].create(invoice_vals)
for line in self.recurring_invoice_line_ids:
if line.start_date <= self.recurring_next_date and line.end_date >= self.recurring_next_date:
invoice_line_vals = self._prepare_invoice_line(line, invoice.id)
self.env['account.invoice.line'].create(invoice_line_vals)
invoice.compute_taxes()
return invoice
else:
code = self.code or ''
name = self.name or ''
contract_name = "[{}] %s".format(code) %name
raise Warning(u"No Invoice to be created on date %s for contract %s" % (
self.recurring_next_date, contract_name ))
class AccountAnalyticInvoiceLine(models.Model):
_inherit = 'account.analytic.invoice.line'
start_date = fields.Date(u'Start Date')
end_date = fields.Date(u'End Date')
@api.multi
@api.constrains('start_date', 'end_date')
def validate_start_end_date(self):
for record in self:
if record.start_date < record.analytic_account_id.date_start:
raise Warning("Start date must be greater than %s" % record.analytic_account_id.date_start)
if record.end_date > record.analytic_account_id.end_date:
raise Warning("End date must be less than %s" % record.analytic_account_id.end_date)
| # -*- encoding: utf-8 -*-
from odoo import models, fields, api, _
from odoo.exceptions import Warning
import logging
_logger = logging.getLogger(__name__)
class AccountAnalyticAccont(models.Model):
_inherit = 'account.analytic.account'
end_date = fields.Date(u'End Date')
def _validate_invoice_creating(self):
for line in self.recurring_invoice_line_ids:
if line.start_date <= self.recurring_next_date and line.end_date >= self.recurring_next_date:
return True
return False
# Add invoce lines only Contract's Date of Next Invoice
# falls in the range of start and end date of line
@api.multi
def _create_invoice(self):
if self._validate_invoice_creating():
self.ensure_one()
invoice_vals = self._prepare_invoice()
invoice = self.env['account.invoice'].create(invoice_vals)
for line in self.recurring_invoice_line_ids:
if line.start_date <= self.recurring_next_date and line.end_date >= self.recurring_next_date:
invoice_line_vals = self._prepare_invoice_line(line, invoice.id)
self.env['account.invoice.line'].create(invoice_line_vals)
invoice.compute_taxes()
return invoice
else:
contract_name = '[' + self.code or ' ' + '] ' + self.name
raise Warning(u"No Invoice to be created on date %s for contract %s" % (
self.recurring_next_date, contract_name ))
class AccountAnalyticInvoiceLine(models.Model):
_inherit = 'account.analytic.invoice.line'
start_date = fields.Date(u'Start Date')
end_date = fields.Date(u'End Date')
@api.multi
@api.constrains('start_date', 'end_date')
def validate_start_end_date(self):
for record in self:
if record.start_date < record.analytic_account_id.date_start:
raise Warning("Start date must be greater than %s" % record.analytic_account_id.date_start)
if record.end_date > record.analytic_account_id.end_date:
raise Warning("End date must be less than %s" % record.analytic_account_id.end_date)
| agpl-3.0 | Python |
4a6ab36636039b59fef5f73250c6e54b049f2b9f | Update myglobals.py | NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint | tests/myglobals.py | tests/myglobals.py | DUMP_DIR = 'tests/rdf-metadata'
BASE_URL = 'http://0.0.0.0:8080'
# example catalog, dataset and distributions
URL_PATHS = [
'fdp',
'catalog/pbg-ld',
'dataset/sly-genes',
'dataset/spe-genes',
'dataset/stu-genes',
'distribution/sly-genes-gff',
'distribution/spe-genes-gff',
'distribution/stu-genes-gff'
]
# lookup table: MIME type - file extension pairs
MIME_TYPES = {
'text/turtle' : 'ttl',
'application/rdf+xml' : 'rdf',
'application/ld+json' : 'jsonld',
'application/n-triples' : 'nt'
}
| DUMP_DIR = 'tests/rdf-metadata'
BASE_URL = 'http://127.0.0.1:8080'
# example catalog, dataset and distributions
URL_PATHS = [
'fdp',
'catalog/pbg-ld',
'dataset/sly-genes',
'dataset/spe-genes',
'dataset/stu-genes',
'distribution/sly-genes-gff',
'distribution/spe-genes-gff',
'distribution/stu-genes-gff'
]
# lookup table: MIME type - file extension pairs
MIME_TYPES = {
'text/turtle' : 'ttl',
'application/rdf+xml' : 'rdf',
'application/ld+json' : 'jsonld',
'application/n-triples' : 'nt'
}
| apache-2.0 | Python |
31d66b0884c7e397f8558ba08796e5cccda89c19 | fix merge | opencivicdata/scrapers-ca,opencivicdata/scrapers-ca | ca_nb_moncton/__init__.py | ca_nb_moncton/__init__.py | from __future__ import unicode_literals
from utils import CanadianJurisdiction
from pupa.scrape import Organization
class Moncton(CanadianJurisdiction):
classification = 'legislature'
division_id = 'ocd-division/country:ca/csd:1307022'
division_name = 'Moncton'
name = 'Moncton City Council'
url = 'http://www.moncton.ca'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
organization.add_post(role='Mayor', label='Moncton', division_id=self.division_id)
for i in range(1, 3):
organization.add_post(role='Councillor at Large', label='Moncton (seat {})'.format(i), division_id=self.division_id)
for i in range(1, 5):
for j in range(1, 3):
organization.add_post(role='Councillor', label='Ward {} (seat {})'.format(i, j), division_id='{}/ward:{}'.format(self.division_id, i))
yield organization
| from __future__ import unicode_literals
from utils import CanadianJurisdiction
from pupa.scrape import Organization
class Moncton(CanadianJurisdiction):
classification = 'legislature'
division_id = 'ocd-division/country:ca/csd:1307022'
division_name = 'Moncton'
name = 'Moncton City Council'
url = 'http://www.moncton.ca'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
organization.add_post(role='Mayor', label='Moncton', division_id=self.division_id)
for i in range(1, 3):
organization.add_post(role='Councillor at Large', label='Moncton (seat {})'.format(i), division_id=self.division_id)
for i in range(1, 5):
for j in range(1, 3):
organization.add_post(role='Councillor', label='Ward {} (seat {})'.format(i, j))
yield organization
| mit | Python |
0709b37f36fac67fc80d98f67c03b8fe3ab21893 | include last_run_at and total_run_count when loading stored task data | adblair/celerycontrib.sqlalchemyscheduler | celerycontrib/sqlalchemyscheduler/scheduler.py | celerycontrib/sqlalchemyscheduler/scheduler.py | import json
import celery.beat
import sqlalchemy as sqla
from sqlalchemy import orm
from . import model
class SQLAlchemyScheduler(celery.beat.Scheduler):
database_url = 'sqlite:///data.sqlite'
_session = None
@property
def session(self):
if self._session is None:
engine = sqla.create_engine(self.database_url)
Session = orm.sessionmaker(bind=engine)
self._session = Session()
return self._session
def get_periodic_tasks(self):
for periodic_task in self.session.query(model.PeriodicTask):
yield periodic_task.name, dict(
task=periodic_task.task,
schedule=periodic_task.schedule,
args=json.loads(periodic_task.args or 'null'),
kwargs=json.loads(periodic_task.kwargs or 'null'),
options=dict(
queue=periodic_task.queue,
exchange=periodic_task.exchange,
routing_key=periodic_task.routing_key,
expires=periodic_task.expires,
),
last_run_at=periodic_task.last_run_at,
total_run_count=periodic_task.total_run_count,
)
def setup_schedule(self):
super(SQLAlchemyScheduler, self).setup_schedule()
self.merge_inplace(dict(self.get_periodic_tasks()))
def sync(self):
super(SQLAlchemyScheduler, self).sync()
for name, entry in self.schedule.items():
task = self.session.query(model.PeriodicTask).filter(
model.PeriodicTask.name == name
).first()
if task is not None:
task.last_run_at = entry.last_run_at
task.total_run_count = entry.total_run_count
self.session.commit()
def close(self):
super(SQLAlchemyScheduler, self).close()
if self._session is not None:
self.logger.debug('Closing database connection')
self.session.close()
| import json
import celery.beat
import sqlalchemy as sqla
from sqlalchemy import orm
from . import model
class SQLAlchemyScheduler(celery.beat.Scheduler):
database_url = 'sqlite:///data.sqlite'
_session = None
@property
def session(self):
if self._session is None:
engine = sqla.create_engine(self.database_url)
Session = orm.sessionmaker(bind=engine)
self._session = Session()
return self._session
def get_periodic_tasks(self):
for periodic_task in self.session.query(model.PeriodicTask):
yield periodic_task.name, dict(
task=periodic_task.task,
schedule=periodic_task.schedule,
args=json.loads(periodic_task.args or 'null'),
kwargs=json.loads(periodic_task.kwargs or 'null'),
options=dict(
queue=periodic_task.queue,
exchange=periodic_task.exchange,
routing_key=periodic_task.routing_key,
expires=periodic_task.expires,
),
)
def setup_schedule(self):
super(SQLAlchemyScheduler, self).setup_schedule()
self.merge_inplace(dict(self.get_periodic_tasks()))
def sync(self):
super(SQLAlchemyScheduler, self).sync()
for name, entry in self.schedule.items():
task = self.session.query(model.PeriodicTask).filter(
model.PeriodicTask.name == name
).first()
if task is not None:
task.last_run_at = entry.last_run_at
task.total_run_count = entry.total_run_count
self.session.commit()
def close(self):
super(SQLAlchemyScheduler, self).close()
if self._session is not None:
self.logger.debug('Closing database connection')
self.session.close()
| mit | Python |
d88cb7e63bb6b0ef03b3a2b0388a4ec05d39a057 | add pep exception | DrDos0016/z2,DrDos0016/z2,DrDos0016/z2 | tools/_blank-tool.py | tools/_blank-tool.py | import os
import sys
import django
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from django.contrib.auth.models import User # noqa: E402
from museum_site.models import * # noqa: E402
def main():
return True
if __name__ == '__main__':
main()
| import os
import sys
import django
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from django.contrib.auth.models import User
from museum_site.models import * # noqa: E402
def main():
return True
if __name__ == '__main__':
main()
| mit | Python |
5022f61e5eed4d1bb5f8ec491784eb06f168e1ed | Use temporary eclipse workspace directories for DartEditor tests | dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk | tools/bots/editor.py | tools/bots/editor.py | #!/usr/bin/python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import shutil
import sys
import tempfile
import bot
class TempDir(object):
def __enter__(self):
self._temp_dir = tempfile.mkdtemp('eclipse-workspace')
return self._temp_dir
def __exit__(self, *_):
shutil.rmtree(self._temp_dir, ignore_errors = True)
def GetEditorExecutable(mode, arch):
configuration_dir = mode + arch.upper()
linux_path = os.path.join('out', configuration_dir, 'editor')
win_path = os.path.join('build', configuration_dir, 'editor')
mac_path = os.path.join('xcodebuild', configuration_dir, 'editor')
if sys.platform == 'darwin':
executable = os.path.join('DartEditor.app', 'Contents', 'MacOS',
'DartEditor')
# TODO(kustermann,ricow): Maybe we're able to get rid of this in the future.
# We use ninja on bots which use out/ instead of xcodebuild/
if os.path.exists(linux_path) and os.path.isdir(linux_path):
return os.path.join(linux_path, executable)
else:
return os.path.join(mac_path, executable)
elif sys.platform == 'win32':
return os.path.join(win_path, 'DartEditor.exe')
elif sys.platform == 'linux2':
return os.path.join(linux_path, 'DartEditor')
else:
raise Exception('Unknown platform %s' % sys.platform)
def main():
build_py = os.path.join('tools', 'build.py')
architectures = ['ia32', 'x64']
test_architectures = ['x64']
if sys.platform == 'win32':
# Our windows bots pull in only a 32 bit JVM.
test_architectures = ['ia32']
for arch in architectures:
with bot.BuildStep('Build Editor %s' % arch):
args = [sys.executable, build_py,
'-mrelease', '--arch=%s' % arch, 'editor']
print 'Running: %s' % (' '.join(args))
sys.stdout.flush()
bot.RunProcess(args)
for arch in test_architectures:
editor_executable = GetEditorExecutable('Release', arch)
with bot.BuildStep('Test Editor %s' % arch):
with TempDir() as temp_dir:
args = [editor_executable, '--test', '--auto-exit', '-data', temp_dir]
print 'Running: %s' % (' '.join(args))
sys.stdout.flush()
bot.RunProcess(args)
return 0
if __name__ == '__main__':
try:
sys.exit(main())
except OSError as e:
sys.exit(e.errno)
| #!/usr/bin/python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import sys
import bot
def GetEditorExecutable(mode, arch):
configuration_dir = mode + arch.upper()
linux_path = os.path.join('out', configuration_dir, 'editor')
win_path = os.path.join('build', configuration_dir, 'editor')
mac_path = os.path.join('xcodebuild', configuration_dir, 'editor')
if sys.platform == 'darwin':
executable = os.path.join('DartEditor.app', 'Contents', 'MacOS',
'DartEditor')
# TODO(kustermann,ricow): Maybe we're able to get rid of this in the future.
# We use ninja on bots which use out/ instead of xcodebuild/
if os.path.exists(linux_path) and os.path.isdir(linux_path):
return os.path.join(linux_path, executable)
else:
return os.path.join(mac_path, executable)
elif sys.platform == 'win32':
return os.path.join(win_path, 'DartEditor.exe')
elif sys.platform == 'linux2':
return os.path.join(linux_path, 'DartEditor')
else:
raise Exception('Unknown platform %s' % sys.platform)
def main():
build_py = os.path.join('tools', 'build.py')
architectures = ['ia32', 'x64']
test_architectures = ['x64']
if sys.platform == 'win32':
# Our windows bots pull in only a 32 bit JVM.
test_architectures = ['ia32']
for arch in architectures:
with bot.BuildStep('Build Editor %s' % arch):
args = [sys.executable, build_py,
'-mrelease', '--arch=%s' % arch, 'editor']
print 'Running: %s' % (' '.join(args))
sys.stdout.flush()
bot.RunProcess(args)
for arch in test_architectures:
editor_executable = GetEditorExecutable('Release', arch)
with bot.BuildStep('Test Editor %s' % arch):
args = [editor_executable, '--test', '--auto-exit']
print 'Running: %s' % (' '.join(args))
sys.stdout.flush()
bot.RunProcess(args)
return 0
if __name__ == '__main__':
try:
sys.exit(main())
except OSError as e:
sys.exit(e.errno)
| bsd-3-clause | Python |
2b2ace4723c7fec68866ce2f74a85a94e5dd6ad3 | Change expiration test times a little bit to pass under valgrind. | mquandalle/rethinkdb,scripni/rethinkdb,sontek/rethinkdb,gavioto/rethinkdb,victorbriz/rethinkdb,yakovenkodenis/rethinkdb,pap/rethinkdb,scripni/rethinkdb,Wilbeibi/rethinkdb,nviennot/rethinkdb,elkingtonmcb/rethinkdb,RubenKelevra/rethinkdb,bchavez/rethinkdb,matthaywardwebdesign/rethinkdb,losywee/rethinkdb,marshall007/rethinkdb,nviennot/rethinkdb,yakovenkodenis/rethinkdb,marshall007/rethinkdb,sbusso/rethinkdb,AtnNn/rethinkdb,marshall007/rethinkdb,bpradipt/rethinkdb,bpradipt/rethinkdb,losywee/rethinkdb,alash3al/rethinkdb,dparnell/rethinkdb,robertjpayne/rethinkdb,matthaywardwebdesign/rethinkdb,wojons/rethinkdb,losywee/rethinkdb,Wilbeibi/rethinkdb,RubenKelevra/rethinkdb,mbroadst/rethinkdb,KSanthanam/rethinkdb,Wilbeibi/rethinkdb,grandquista/rethinkdb,grandquista/rethinkdb,losywee/rethinkdb,mbroadst/rethinkdb,nviennot/rethinkdb,lenstr/rethinkdb,Qinusty/rethinkdb,rrampage/rethinkdb,pap/rethinkdb,sebadiaz/rethinkdb,lenstr/rethinkdb,yaolinz/rethinkdb,captainpete/rethinkdb,gdi2290/rethinkdb,greyhwndz/rethinkdb,yaolinz/rethinkdb,victorbriz/rethinkdb,greyhwndz/rethinkdb,sbusso/rethinkdb,AtnNn/rethinkdb,ajose01/rethinkdb,eliangidoni/rethinkdb,mcanthony/rethinkdb,jfriedly/rethinkdb,grandquista/rethinkdb,wojons/rethinkdb,catroot/rethinkdb,wujf/rethinkdb,JackieXie168/rethinkdb,mquandalle/rethinkdb,wojons/rethinkdb,jesseditson/rethinkdb,Wilbeibi/rethinkdb,eliangidoni/rethinkdb,lenstr/rethinkdb,wkennington/rethinkdb,ajose01/rethinkdb,ajose01/rethinkdb,jesseditson/rethinkdb,rrampage/rethinkdb,jesseditson/rethinkdb,captainpete/rethinkdb,captainpete/rethinkdb,pap/rethinkdb,rrampage/rethinkdb,KSanthanam/rethinkdb,4talesa/rethinkdb,yaolinz/rethinkdb,bpradipt/rethinkdb,marshall007/rethinkdb,wujf/rethinkdb,ayumilong/rethinkdb,ajose01/rethinkdb,AtnNn/rethinkdb,captainpete/rethinkdb,gavioto/rethinkdb,mcanthony/rethinkdb,dparnell/rethinkdb,rrampage/rethinkdb,bchavez/rethinkdb,AtnNn/rethinkdb,mbroadst/rethinkdb,mbroadst/rethinkdb,eliangidoni/rethinkdb,robertjpayne/rethinkdb,AntouanK/rethinkdb,wojons/rethinkdb,elkingtonmcb/rethinkdb,spblightadv/rethinkdb,AntouanK/rethinkdb,sontek/rethinkdb,gavioto/rethinkdb,scripni/rethinkdb,wujf/rethinkdb,bpradipt/rethinkdb,Qinusty/rethinkdb,wojons/rethinkdb,urandu/rethinkdb,captainpete/rethinkdb,wkennington/rethinkdb,niieani/rethinkdb,JackieXie168/rethinkdb,AntouanK/rethinkdb,tempbottle/rethinkdb,catroot/rethinkdb,tempbottle/rethinkdb,urandu/rethinkdb,yaolinz/rethinkdb,sbusso/rethinkdb,gavioto/rethinkdb,marshall007/rethinkdb,bchavez/rethinkdb,matthaywardwebdesign/rethinkdb,wojons/rethinkdb,captainpete/rethinkdb,4talesa/rethinkdb,jfriedly/rethinkdb,captainpete/rethinkdb,victorbriz/rethinkdb,wujf/rethinkdb,jfriedly/rethinkdb,dparnell/rethinkdb,losywee/rethinkdb,bpradipt/rethinkdb,spblightadv/rethinkdb,pap/rethinkdb,urandu/rethinkdb,sbusso/rethinkdb,AntouanK/rethinkdb,dparnell/rethinkdb,RubenKelevra/rethinkdb,bchavez/rethinkdb,mbroadst/rethinkdb,Wilbeibi/rethinkdb,sontek/rethinkdb,alash3al/rethinkdb,matthaywardwebdesign/rethinkdb,dparnell/rethinkdb,dparnell/rethinkdb,4talesa/rethinkdb,bchavez/rethinkdb,marshall007/rethinkdb,alash3al/rethinkdb,dparnell/rethinkdb,KSanthanam/rethinkdb,losywee/rethinkdb,gdi2290/rethinkdb,ayumilong/rethinkdb,jfriedly/rethinkdb,losywee/rethinkdb,rrampage/rethinkdb,nviennot/rethinkdb,jfriedly/rethinkdb,sebadiaz/rethinkdb,jmptrader/rethinkdb,AtnNn/rethinkdb,yakovenkodenis/rethinkdb,matthaywardwebdesign/rethinkdb,pap/rethinkdb,victorbriz/rethinkdb,mbroadst/rethinkdb,mcanthony/rethinkdb,rrampage/rethinkdb,sebadiaz/rethinkdb,Qinusty/rethinkdb,grandquista/rethinkdb,jesseditson/rethinkdb,robertjpayne/rethinkdb,victorbriz/rethinkdb,jfriedly/rethinkdb,JackieXie168/rethinkdb,scripni/rethinkdb,jesseditson/rethinkdb,eliangidoni/rethinkdb,JackieXie168/rethinkdb,jmptrader/rethinkdb,wkennington/rethinkdb,niieani/rethinkdb,elkingtonmcb/rethinkdb,elkingtonmcb/rethinkdb,mbroadst/rethinkdb,bchavez/rethinkdb,gdi2290/rethinkdb,RubenKelevra/rethinkdb,jesseditson/rethinkdb,KSanthanam/rethinkdb,mquandalle/rethinkdb,jfriedly/rethinkdb,4talesa/rethinkdb,4talesa/rethinkdb,spblightadv/rethinkdb,bchavez/rethinkdb,bpradipt/rethinkdb,lenstr/rethinkdb,yaolinz/rethinkdb,rrampage/rethinkdb,elkingtonmcb/rethinkdb,spblightadv/rethinkdb,jmptrader/rethinkdb,sebadiaz/rethinkdb,sebadiaz/rethinkdb,niieani/rethinkdb,Qinusty/rethinkdb,dparnell/rethinkdb,mquandalle/rethinkdb,JackieXie168/rethinkdb,wujf/rethinkdb,niieani/rethinkdb,lenstr/rethinkdb,sontek/rethinkdb,Qinusty/rethinkdb,niieani/rethinkdb,captainpete/rethinkdb,sebadiaz/rethinkdb,bchavez/rethinkdb,RubenKelevra/rethinkdb,grandquista/rethinkdb,RubenKelevra/rethinkdb,sebadiaz/rethinkdb,greyhwndz/rethinkdb,pap/rethinkdb,yakovenkodenis/rethinkdb,AntouanK/rethinkdb,robertjpayne/rethinkdb,RubenKelevra/rethinkdb,robertjpayne/rethinkdb,ayumilong/rethinkdb,tempbottle/rethinkdb,lenstr/rethinkdb,tempbottle/rethinkdb,urandu/rethinkdb,jmptrader/rethinkdb,yakovenkodenis/rethinkdb,jmptrader/rethinkdb,catroot/rethinkdb,mcanthony/rethinkdb,4talesa/rethinkdb,sontek/rethinkdb,tempbottle/rethinkdb,greyhwndz/rethinkdb,catroot/rethinkdb,lenstr/rethinkdb,ayumilong/rethinkdb,alash3al/rethinkdb,ajose01/rethinkdb,wkennington/rethinkdb,niieani/rethinkdb,urandu/rethinkdb,mquandalle/rethinkdb,Qinusty/rethinkdb,AntouanK/rethinkdb,victorbriz/rethinkdb,grandquista/rethinkdb,tempbottle/rethinkdb,ajose01/rethinkdb,nviennot/rethinkdb,AtnNn/rethinkdb,alash3al/rethinkdb,yaolinz/rethinkdb,nviennot/rethinkdb,greyhwndz/rethinkdb,scripni/rethinkdb,jesseditson/rethinkdb,matthaywardwebdesign/rethinkdb,KSanthanam/rethinkdb,matthaywardwebdesign/rethinkdb,losywee/rethinkdb,wkennington/rethinkdb,bchavez/rethinkdb,lenstr/rethinkdb,Qinusty/rethinkdb,scripni/rethinkdb,wkennington/rethinkdb,yakovenkodenis/rethinkdb,gdi2290/rethinkdb,mbroadst/rethinkdb,jmptrader/rethinkdb,gdi2290/rethinkdb,Qinusty/rethinkdb,gavioto/rethinkdb,victorbriz/rethinkdb,Wilbeibi/rethinkdb,mcanthony/rethinkdb,KSanthanam/rethinkdb,4talesa/rethinkdb,alash3al/rethinkdb,nviennot/rethinkdb,wujf/rethinkdb,wkennington/rethinkdb,pap/rethinkdb,eliangidoni/rethinkdb,sbusso/rethinkdb,Wilbeibi/rethinkdb,4talesa/rethinkdb,sbusso/rethinkdb,niieani/rethinkdb,wojons/rethinkdb,marshall007/rethinkdb,catroot/rethinkdb,jmptrader/rethinkdb,pap/rethinkdb,gdi2290/rethinkdb,nviennot/rethinkdb,ayumilong/rethinkdb,ajose01/rethinkdb,urandu/rethinkdb,elkingtonmcb/rethinkdb,jesseditson/rethinkdb,greyhwndz/rethinkdb,mquandalle/rethinkdb,bpradipt/rethinkdb,tempbottle/rethinkdb,eliangidoni/rethinkdb,grandquista/rethinkdb,JackieXie168/rethinkdb,elkingtonmcb/rethinkdb,yaolinz/rethinkdb,mquandalle/rethinkdb,AntouanK/rethinkdb,bpradipt/rethinkdb,robertjpayne/rethinkdb,jmptrader/rethinkdb,Qinusty/rethinkdb,bpradipt/rethinkdb,JackieXie168/rethinkdb,yakovenkodenis/rethinkdb,rrampage/rethinkdb,mbroadst/rethinkdb,gavioto/rethinkdb,robertjpayne/rethinkdb,yaolinz/rethinkdb,ajose01/rethinkdb,tempbottle/rethinkdb,sontek/rethinkdb,robertjpayne/rethinkdb,sbusso/rethinkdb,gdi2290/rethinkdb,KSanthanam/rethinkdb,spblightadv/rethinkdb,urandu/rethinkdb,catroot/rethinkdb,JackieXie168/rethinkdb,eliangidoni/rethinkdb,robertjpayne/rethinkdb,wojons/rethinkdb,mcanthony/rethinkdb,AtnNn/rethinkdb,urandu/rethinkdb,marshall007/rethinkdb,jfriedly/rethinkdb,elkingtonmcb/rethinkdb,gavioto/rethinkdb,sebadiaz/rethinkdb,scripni/rethinkdb,ayumilong/rethinkdb,KSanthanam/rethinkdb,matthaywardwebdesign/rethinkdb,scripni/rethinkdb,eliangidoni/rethinkdb,niieani/rethinkdb,catroot/rethinkdb,alash3al/rethinkdb,dparnell/rethinkdb,catroot/rethinkdb,eliangidoni/rethinkdb,greyhwndz/rethinkdb,wkennington/rethinkdb,victorbriz/rethinkdb,sontek/rethinkdb,grandquista/rethinkdb,Wilbeibi/rethinkdb,gavioto/rethinkdb,wujf/rethinkdb,spblightadv/rethinkdb,grandquista/rethinkdb,mquandalle/rethinkdb,sontek/rethinkdb,spblightadv/rethinkdb,AtnNn/rethinkdb,mcanthony/rethinkdb,sbusso/rethinkdb,yakovenkodenis/rethinkdb,mcanthony/rethinkdb,spblightadv/rethinkdb,alash3al/rethinkdb,ayumilong/rethinkdb,ayumilong/rethinkdb,JackieXie168/rethinkdb,greyhwndz/rethinkdb,AntouanK/rethinkdb,RubenKelevra/rethinkdb | test/integration/expiration.py | test/integration/expiration.py | #!/usr/bin/python
from test_common import *
import time
def test_function(opts, mc):
print "Testing set with expiration"
if mc.set("a", "aaa", time=5) == 0:
raise ValueError, "Set failed"
print "Make sure we can get the element back after short sleep"
time.sleep(1)
if mc.get("a") != "aaa":
raise ValueError("Failure: value can't be found but it's supposed to be")
print "Make sure the element eventually expires"
time.sleep(4)
if mc.get("a") != None:
raise ValueError("Failure: value should have expired but didn't")
print "Done"
mc.disconnect_all()
if __name__ == "__main__":
simple_test_main(test_function, make_option_parser().parse(sys.argv), timeout = 7)
| #!/usr/bin/python
from test_common import *
import time
def test_function(opts, mc):
print "Testing set with expiration"
if mc.set("a", "aaa", time=5) == 0:
raise ValueError, "Set failed"
print "Make sure we can get the element back after short sleep"
time.sleep(3)
if mc.get("a") != "aaa":
raise ValueError("Failure: value can't be found but it's supposed to be")
print "Make sure the element eventually expires"
time.sleep(2)
if mc.get("a") != None:
raise ValueError("Failure: value should have expired but didn't")
print "Done"
mc.disconnect_all()
if __name__ == "__main__":
simple_test_main(test_function, make_option_parser().parse(sys.argv), timeout = 7)
| agpl-3.0 | Python |
4feb11086c85ce9b70f59240da638351321ce013 | Fix PEP8 | CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,Secheron/compassion-switzerland,Secheron/compassion-switzerland,MickSandoz/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,MickSandoz/compassion-switzerland,ndtran/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland | child_sync_typo3/wizard/child_depart_wizard.py | child_sync_typo3/wizard/child_depart_wizard.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
| # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
| agpl-3.0 | Python |
3cedde019466570221555fe292fca3faa11b17a4 | Bump version | sigopt/sigopt-python,sigopt/sigopt-python | sigopt/version.py | sigopt/version.py | VERSION = '2.11.2'
| VERSION = '2.11.1'
| mit | Python |
2761c6f70df6db259fa6be2e70f6ff6b537546f0 | Fix typo | bbockelm/glideinWMS,holzman/glideinwms-old,bbockelm/glideinWMS,holzman/glideinwms-old,bbockelm/glideinWMS,bbockelm/glideinWMS,holzman/glideinwms-old | tools/glidein_gdb.py | tools/glidein_gdb.py | #!/bin/env python
#
# glidein_gdb.py
#
# Description:
# Execute a ls command on a condor job working directory
#
# Usage:
# glidein_gdb.py <cluster>.<process> <pid> [<command>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Supported gdb commands:
# where (default)
#
# Author:
# Igor Sfiligoi (June 2007)
#
# License:
# Fermitools
#
import sys,os.path
sys.path.append(os.path.join(sys.path[0],"lib"))
sys.path.append(os.path.join(sys.path[0],"../lib"))
import glideinCmd
def argv_gdb(argv):
if len(argv)==0:
raise RuntimeError, "Missing PID"
pid=argv[0]
# parse args to get the command
gdb_cmd="where"
if len(argv)>1:
if argv[1]=="where":
gdb_cmd="where"
else:
raise RuntimeError, "Unexpected command %s found!\nOnly where supported."%argv[1]
# select the lines
gdbcommand="gdb.command"
script_lines=[]
script_lines.append('cat > %s <<EOF'%gdbcommand)
script_lines.append('set height 0')
script_lines.append(gdb_cmd)
script_lines.append('quit')
script_lines.append('EOF')
script_lines.append('gdb -command %s /proc/%s/exe %s'%(gdbcommand,pid,pid))
script_lines.append('rm -f %s'%gdbcommand)
return script_lines
glideinCmd.exe_cmd_script(argv_gdb)
| #!/bin/env python
#
# glidein_gdb.py
#
# Description:
# Execute a ls command on a condor job working directory
#
# Usage:
# glidein_gdb.py <cluster>.<process> <pid> [<command>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Supported gdb commands:
# watch (default)
#
# Author:
# Igor Sfiligoi (June 2007)
#
# License:
# Fermitools
#
import sys,os.path
sys.path.append(os.path.join(sys.path[0],"lib"))
sys.path.append(os.path.join(sys.path[0],"../lib"))
import glideinCmd
def argv_gdb(argv):
if len(argv)==0:
raise RuntimeError, "Missing PID"
pid=argv[0]
# parse args to get the command
gdb_cmd="watch"
if len(argv)>1:
if argv[1]=="watch":
gdb_cmd="watch"
else:
raise RuntimeError, "Unexpected command %s found!\nOnly watch supported."%argv[1]
# select the lines
gdbcommand="gdb.command"
script_lines=[]
script_lines.append('cat > %s <<EOF'%gdbcommand)
script_lines.append('set height 0')
script_lines.append(gdb_cmd)
script_lines.append('quit')
script_lines.append('EOF')
script_lines.append('gdb -command %s /proc/%s/exe %s'%(gdbcommand,pid,pid))
script_lines.append('rm -f %s'%gdbcommand)
return script_lines
glideinCmd.exe_cmd_script(argv_gdb)
| bsd-3-clause | Python |
59533cc8fb516429ef1cf4f9b930a75e5ece336e | Add Lady Liadrin's Reinforce to Justicar Trueheart's map | NightKev/fireplace,Ragowit/fireplace,beheh/fireplace,jleclanche/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,Ragowit/fireplace | fireplace/cards/tgt/neutral_legendary.py | fireplace/cards/tgt/neutral_legendary.py | from ..utils import *
##
# Minions
# Confessor Paletress
class AT_018:
inspire = Summon(CONTROLLER, RandomMinion(rarity=Rarity.LEGENDARY))
# Skycap'n Kragg
class AT_070:
cost_mod = -Count(FRIENDLY_MINIONS + PIRATE)
# Gormok the Impaler
class AT_122:
play = (Count(FRIENDLY_MINIONS) >= 4) & Hit(TARGET, 4)
# Chillmaw
class AT_123:
deathrattle = HOLDING_DRAGON & Hit(ALL_MINIONS, 3)
# Bolf Ramshield
class AT_124:
events = Predamage(FRIENDLY_HERO).on(
Predamage(FRIENDLY_HERO, 0), Hit(SELF, Predamage.AMOUNT)
)
# Icehowl
class AT_125:
tags = {GameTag.CANNOT_ATTACK_HEROES: True}
# Nexus-Champion Saraad
class AT_127:
inspire = Give(CONTROLLER, RandomSpell())
# The Skeleton Knight
class AT_128:
deathrattle = JOUST & Bounce(SELF)
# Fjola Lightbane
class AT_129:
events = Play(CONTROLLER, SPELL, SELF).on(GiveDivineShield(SELF))
# Eydis Darkbane
class AT_131:
events = Play(CONTROLLER, SPELL, SELF).on(Hit(RANDOM_ENEMY_CHARACTER, 3))
# Justicar Trueheart
class AT_132:
HERO_POWER_MAP = {
"CS2_017": "AT_132_DRUID",
"DS1h_292": "AT_132_HUNTER",
"DS1h_292_H1": "DS1h_292_H1_AT_132",
"CS2_034": "AT_132_MAGE",
"CS2_101": "AT_132_PALADIN",
"CS2_101_H": "AT_132_PALADIN",
"CS1h_001": "AT_132_PRIEST",
"CS2_083b": "AT_132_ROGUE",
"CS2_049": "AT_132_SHAMAN",
"CS2_056": "AT_132_WARLOCK",
"CS2_102": "AT_132_WARRIOR",
"CS2_102_H1": "CS2_102_H1_AT_132",
}
def play(self):
upgrade = AT_132.HERO_POWER_MAP.get(self.controller.hero.power.id)
if upgrade:
yield Summon(CONTROLLER, upgrade)
| from ..utils import *
##
# Minions
# Confessor Paletress
class AT_018:
inspire = Summon(CONTROLLER, RandomMinion(rarity=Rarity.LEGENDARY))
# Skycap'n Kragg
class AT_070:
cost_mod = -Count(FRIENDLY_MINIONS + PIRATE)
# Gormok the Impaler
class AT_122:
play = (Count(FRIENDLY_MINIONS) >= 4) & Hit(TARGET, 4)
# Chillmaw
class AT_123:
deathrattle = HOLDING_DRAGON & Hit(ALL_MINIONS, 3)
# Bolf Ramshield
class AT_124:
events = Predamage(FRIENDLY_HERO).on(
Predamage(FRIENDLY_HERO, 0), Hit(SELF, Predamage.AMOUNT)
)
# Icehowl
class AT_125:
tags = {GameTag.CANNOT_ATTACK_HEROES: True}
# Nexus-Champion Saraad
class AT_127:
inspire = Give(CONTROLLER, RandomSpell())
# The Skeleton Knight
class AT_128:
deathrattle = JOUST & Bounce(SELF)
# Fjola Lightbane
class AT_129:
events = Play(CONTROLLER, SPELL, SELF).on(GiveDivineShield(SELF))
# Eydis Darkbane
class AT_131:
events = Play(CONTROLLER, SPELL, SELF).on(Hit(RANDOM_ENEMY_CHARACTER, 3))
# Justicar Trueheart
class AT_132:
HERO_POWER_MAP = {
"CS2_017": "AT_132_DRUID",
"DS1h_292": "AT_132_HUNTER",
"DS1h_292_H1": "DS1h_292_H1_AT_132",
"CS2_034": "AT_132_MAGE",
"CS2_101": "AT_132_PALADIN",
"CS1h_001": "AT_132_PRIEST",
"CS2_083b": "AT_132_ROGUE",
"CS2_049": "AT_132_SHAMAN",
"CS2_056": "AT_132_WARLOCK",
"CS2_102": "AT_132_WARRIOR",
"CS2_102_H1": "CS2_102_H1_AT_132",
}
def play(self):
upgrade = AT_132.HERO_POWER_MAP.get(self.controller.hero.power.id)
if upgrade:
yield Summon(CONTROLLER, upgrade)
| agpl-3.0 | Python |
a030e92b46acf27c05500ed2cb47b2cece4e87ac | Work around selenium Edge driver bug | cockpit-project/cockpit,garrett/cockpit,martinpitt/cockpit,cockpit-project/cockpit,martinpitt/cockpit,garrett/cockpit,mvollmer/cockpit,mvollmer/cockpit,cockpit-project/cockpit,mvollmer/cockpit,mvollmer/cockpit,garrett/cockpit,martinpitt/cockpit,garrett/cockpit,cockpit-project/cockpit,martinpitt/cockpit,cockpit-project/cockpit,garrett/cockpit,mvollmer/cockpit,martinpitt/cockpit | test/selenium/selenium-base.py | test/selenium/selenium-base.py | #!/usr/bin/python3
# we need to be able to find and import seleniumlib, so add this directory
from testlib_avocado.seleniumlib import SeleniumTest, clickable
import os
import sys
machine_test_dir = os.path.dirname(os.path.abspath(__file__))
if machine_test_dir not in sys.path:
sys.path.insert(1, machine_test_dir)
class BasicTestSuite(SeleniumTest):
"""
:avocado: enable
"""
def test10Base(self):
# this is minimal cockpit test what checks login page
self.wait_id('server-name')
def test15BaseSSHKeyAdded(self):
# calling self.login() ensures there is added public ssh key to user to be able to call
# machine.execute(...)
self.login()
self.logout()
out = self.machine.execute("hostname")
server_element = self.wait_id('server-name')
self.assertIn(out.strip(), str(server_element.text))
def test30ChangeTabServices(self):
self.login()
self.click(self.wait_link('Services', cond=clickable))
self.wait_frame("services")
self.wait_id("services-list")
self.click(self.wait_text("Socket", cond=clickable))
self.wait_text("cockpit.socket")
self.wait_id("services-list")
self.click(self.wait_text("Target", cond=clickable))
self.wait_id("services-list")
self.wait_text("basic.target")
self.click(self.wait_text("System services", cond=clickable))
self.wait_id("services-list")
self.wait_text("auditd")
self.mainframe()
| #!/usr/bin/python3
# we need to be able to find and import seleniumlib, so add this directory
from testlib_avocado.seleniumlib import SeleniumTest, clickable
import os
import sys
machine_test_dir = os.path.dirname(os.path.abspath(__file__))
if machine_test_dir not in sys.path:
sys.path.insert(1, machine_test_dir)
class BasicTestSuite(SeleniumTest):
"""
:avocado: enable
"""
def test10Base(self):
# this is minimal cockpit test what checks login page
self.wait_id('server-name')
def test15BaseSSHKeyAdded(self):
# calling self.login() ensures there is added public ssh key to user to be able to call
# machine.execute(...)
self.login()
self.logout()
out = self.machine.execute("hostname")
server_element = self.wait_id('server-name')
self.assertIn(out.strip(), str(server_element.text))
def test30ChangeTabServices(self):
self.login()
self.click(self.wait_link('Services', cond=clickable))
self.wait_frame("services")
self.wait_id("services-list")
self.click(self.wait_text("Socket", cond=clickable))
self.wait_text("udev")
self.wait_id("services-list")
self.click(self.wait_text("Target", cond=clickable))
self.wait_id("services-list")
self.wait_text("reboot.target")
self.click(self.wait_text("System services", cond=clickable))
self.wait_id("services-list")
self.wait_text("sshd")
self.mainframe()
| lgpl-2.1 | Python |
61e280ea32718443895af7619dd750e959ddd629 | Add a complementary test for givers and zero | eXcomm/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,eXcomm/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,gratipay/gratipay.com,studio666/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com | tests/test_communities.py | tests/test_communities.py | from __future__ import absolute_import, division, print_function, unicode_literals
from gittip.testing import Harness
class Tests(Harness):
def setUp(self):
Harness.setUp(self)
self.client.website.NMEMBERS_THRESHOLD = 1
# Alice joins a community.
self.alice = self.make_participant("alice", claimed_time='now', last_bill_result='')
self.client.POST( '/for/communities.json'
, {'name': 'something', 'is_member': 'true'}
, auth_as='alice'
)
def test_community_member_shows_up_on_community_listing(self):
html = self.client.GET('/for/something/', want='response.body')
assert html.count('alice') == 2 # entry in New Participants
def test_givers_show_up_on_community_page(self):
# Alice tips bob.
self.make_participant("bob", claimed_time='now')
self.alice.set_tip_to('bob', '1.00')
html = self.client.GET('/for/something/', want='response.body')
assert html.count('alice') == 4 # entries in both New Participants and Givers
assert 'bob' not in html
def test_givers_dont_show_up_if_they_give_zero(self):
# Alice tips bob.
self.make_participant("bob", claimed_time='now')
self.alice.set_tip_to('bob', '1.00')
self.alice.set_tip_to('bob', '0.00')
html = self.client.GET('/for/something/', want='response.body')
assert html.count('alice') == 2 # entry in New Participants only
assert 'bob' not in html
def test_receivers_show_up_on_community_page(self):
# Bob tips alice.
bob = self.make_participant("bob", claimed_time='now', last_bill_result='')
bob.set_tip_to('alice', '1.00')
html = self.client.GET('/for/something/', want='response.body')
assert html.count('alice') == 4 # entries in both New Participants and Receivers
assert 'bob' not in html
def test_receivers_dont_show_up_if_they_receive_zero(self):
# Bob tips alice.
bob = self.make_participant("bob", claimed_time='now', last_bill_result='')
bob.set_tip_to('alice', '1.00')
bob.set_tip_to('alice', '0.00') # zero out bob's tip
html = self.client.GET('/for/something/', want='response.body')
assert html.count('alice') == 2 # entry in New Participants only
assert 'bob' not in html
| from __future__ import absolute_import, division, print_function, unicode_literals
from gittip.testing import Harness
class Tests(Harness):
def setUp(self):
Harness.setUp(self)
self.client.website.NMEMBERS_THRESHOLD = 1
# Alice joins a community.
self.alice = self.make_participant("alice", claimed_time='now', last_bill_result='')
self.client.POST( '/for/communities.json'
, {'name': 'something', 'is_member': 'true'}
, auth_as='alice'
)
def test_community_member_shows_up_on_community_listing(self):
html = self.client.GET('/for/something/', want='response.body')
assert html.count('alice') == 2 # entry in New Participants
def test_givers_show_up_on_community_page(self):
# Alice tips bob.
self.make_participant("bob", claimed_time='now')
self.alice.set_tip_to('bob', '1.00')
html = self.client.GET('/for/something/', want='response.body')
assert html.count('alice') == 4 # entries in both New Participants and Givers
assert 'bob' not in html
def test_receivers_show_up_on_community_page(self):
# Bob tips alice.
bob = self.make_participant("bob", claimed_time='now', last_bill_result='')
bob.set_tip_to('alice', '1.00')
html = self.client.GET('/for/something/', want='response.body')
assert html.count('alice') == 4 # entries in both New Participants and Receivers
assert 'bob' not in html
def test_receivers_dont_show_up_if_they_receive_zero(self):
# Bob tips alice.
bob = self.make_participant("bob", claimed_time='now', last_bill_result='')
bob.set_tip_to('alice', '1.00')
bob.set_tip_to('alice', '0.00') # zero out bob's tip
html = self.client.GET('/for/something/', want='response.body')
assert html.count('alice') == 2 # entry in New Participants only
assert 'bob' not in html
| mit | Python |
219e0fed56f91954e2132b68d0e606f0d2825060 | Remove unnecessary print in test_missingnode | ktkt2009/disco,discoproject/disco,mozilla/disco,pavlobaron/disco_playground,pavlobaron/disco_playground,pavlobaron/disco_playground,pombredanne/disco,pooya/disco,simudream/disco,scrapinghub/disco,pombredanne/disco,seabirdzh/disco,mwilliams3/disco,simudream/disco,pombredanne/disco,ErikDubbelboer/disco,ErikDubbelboer/disco,scrapinghub/disco,oldmantaiter/disco,mwilliams3/disco,ktkt2009/disco,pooya/disco,seabirdzh/disco,beni55/disco,mozilla/disco,seabirdzh/disco,pavlobaron/disco_playground,oldmantaiter/disco,ktkt2009/disco,oldmantaiter/disco,beni55/disco,beni55/disco,mwilliams3/disco,simudream/disco,mozilla/disco,ktkt2009/disco,ErikDubbelboer/disco,mozilla/disco,seabirdzh/disco,discoproject/disco,oldmantaiter/disco,simudream/disco,beni55/disco,pombredanne/disco,scrapinghub/disco,ktkt2009/disco,discoproject/disco,seabirdzh/disco,mwilliams3/disco,oldmantaiter/disco,ErikDubbelboer/disco,discoproject/disco,pooya/disco,mwilliams3/disco,pombredanne/disco,scrapinghub/disco,ErikDubbelboer/disco,beni55/disco,simudream/disco,discoproject/disco,pooya/disco | tests/test_missingnode.py | tests/test_missingnode.py | from disco.test import DiscoJobTestFixture, DiscoTestCase
def unique_nodename(nodenames, count=0):
nodename = 'missingnode_%s' % count
if nodename not in nodenames:
return nodename
return unique_nodename(nodenames, count + 1)
class MissingNodeTestCase(DiscoJobTestFixture, DiscoTestCase):
@property
def inputs(self):
return range(self.num_workers * 2)
def getdata(self, path):
return path
@staticmethod
def map(e, params):
time.sleep(0.5)
return [(int(e), '')]
def setUp(self):
self.config = self.disco.config
nodenames = set(name for name, workers in self.config)
self.disco.config = self.config + [[unique_nodename(nodenames), '1']]
super(MissingNodeTestCase, self).setUp()
def runTest(self):
self.assertEquals(sum(xrange(self.num_workers * 2)),
sum(int(k) for k, v in self.results))
def tearDown(self):
super(MissingNodeTestCase, self).tearDown()
self.disco.config = self.config
| from disco.test import DiscoJobTestFixture, DiscoTestCase
def unique_nodename(nodenames, count=0):
nodename = 'missingnode_%s' % count
if nodename not in nodenames:
return nodename
return unique_nodename(nodenames, count + 1)
class MissingNodeTestCase(DiscoJobTestFixture, DiscoTestCase):
@property
def inputs(self):
print range(self.num_workers * 2)
return range(self.num_workers * 2)
def getdata(self, path):
return path
@staticmethod
def map(e, params):
time.sleep(0.5)
return [(int(e), '')]
def setUp(self):
self.config = self.disco.config
nodenames = set(name for name, workers in self.config)
self.disco.config = self.config + [[unique_nodename(nodenames), '1']]
super(MissingNodeTestCase, self).setUp()
def runTest(self):
self.assertEquals(sum(xrange(self.num_workers * 2)),
sum(int(k) for k, v in self.results))
def tearDown(self):
super(MissingNodeTestCase, self).tearDown()
self.disco.config = self.config
| bsd-3-clause | Python |
dac2a76051271c52bf9c12874f8345eb0e1c3937 | purge test jobs after requesting them | kalessin/python-hubstorage,scrapinghub/python-hubstorage,torymur/python-hubstorage | tests/test_jobq.py | tests/test_jobq.py | """
Test JobQ
"""
from hstestcase import HSTestCase
class JobqTest(HSTestCase):
def test_basic(self):
#authpos(JOBQ_PUSH_URL, data="", expect=400)
spider1 = self.project.jobq.push('spidey')
spider2 = self.project.jobq.push(spider='spidey')
spider3 = self.project.jobq.push(spider='spidey', metatest='somekey')
spider4 = self.project.jobq.push('spidey')
summary = dict((s['name'], s) for s in self.project.jobq.summary())
pending = summary['pending']
pending_summaries = pending['summary']
assert len(pending_summaries) >= 4
assert len(pending_summaries) <= 8 # 8 are requested
assert pending['count'] >= len(pending_summaries)
# expected keys, in the order they should be in the queue
expected_keys = [spider4['key'], spider3['key'], spider2['key'], spider1['key']]
# only count the keys we inserted, as other tests may be running
def filter_test(summary):
"""filter out all summaries not in our test"""
return [s['key'] for s in summary if s['key'] in expected_keys]
received_keys = filter_test(pending_summaries)
assert expected_keys == received_keys
# change some job states
job1 = self.hsclient.get_job(spider1['key'])
job1.finished()
job2 = self.hsclient.get_job(spider2['key'])
job2.started()
job3 = self.hsclient.get_job(spider3['key'])
job4 = self.hsclient.get_job(spider4['key'])
# check job queues again
summary = dict((s['name'], s) for s in self.project.jobq.summary())
assert summary['pending']['count'] >= 2
assert summary['running']['count'] >= 1
assert summary['finished']['count'] >= 1
pending_keys = filter_test(summary['pending']['summary'])
assert pending_keys == [spider4['key'], spider3['key']]
running_keys = filter_test(summary['running']['summary'])
assert running_keys == [spider2['key']]
finished_keys = filter_test(summary['finished']['summary'])
assert finished_keys == [spider1['key']]
job2.finished()
summary = dict((s['name'], s) for s in self.project.jobq.summary())
finished_keys = filter_test(summary['finished']['summary'])
assert finished_keys == [spider2['key'], spider1['key']]
job1.purged()
job2.purged()
job3.purged()
job4.purged()
| """
Test JobQ
"""
from hstestcase import HSTestCase
class JobqTest(HSTestCase):
def test_basic(self):
#authpos(JOBQ_PUSH_URL, data="", expect=400)
spider1 = self.project.jobq.push('spidey')
spider2 = self.project.jobq.push(spider='spidey')
spider3 = self.project.jobq.push(spider='spidey', metatest='somekey')
spider4 = self.project.jobq.push('spidey')
summary = dict((s['name'], s) for s in self.project.jobq.summary())
pending = summary['pending']
pending_summaries = pending['summary']
assert len(pending_summaries) >= 4
assert len(pending_summaries) <= 8 # 8 are requested
assert pending['count'] >= len(pending_summaries)
# expected keys, in the order they should be in the queue
expected_keys = [spider4['key'], spider3['key'], spider2['key'], spider1['key']]
# only count the keys we inserted, as other tests may be running
def filter_test(summary):
"""filter out all summaries not in our test"""
return [s['key'] for s in summary if s['key'] in expected_keys]
received_keys = filter_test(pending_summaries)
assert expected_keys == received_keys
# change some job states
job1 = self.hsclient.get_job(spider1['key'])
job1.finished()
job2 = self.hsclient.get_job(spider2['key'])
job2.started()
# check job queues again
summary = dict((s['name'], s) for s in self.project.jobq.summary())
assert summary['pending']['count'] >= 2
assert summary['running']['count'] >= 1
assert summary['finished']['count'] >= 1
pending_keys = filter_test(summary['pending']['summary'])
assert pending_keys == [spider4['key'], spider3['key']]
running_keys = filter_test(summary['running']['summary'])
assert running_keys == [spider2['key']]
finished_keys = filter_test(summary['finished']['summary'])
assert finished_keys == [spider1['key']]
job2.finished()
summary = dict((s['name'], s) for s in self.project.jobq.summary())
finished_keys = filter_test(summary['finished']['summary'])
assert finished_keys == [spider2['key'], spider1['key']]
| bsd-3-clause | Python |
70fb77dd35fd647e0c4fc8b8c8ef5b30fc099476 | update tests | semio/ddf_utils | tests/test_misc.py | tests/test_misc.py | # -*- coding: utf-8 -*-
import os
wd = os.path.dirname(__file__)
def test_build_dictionary():
from ddf_utils.chef.api import Chef
from ddf_utils.chef.helpers import build_dictionary
d = {'China': 'chn', 'USA': 'usa'}
c = Chef()
assert build_dictionary(c, d) == d
d2 = {
"imr_median": "infant_mortality_median",
"imr_upper": "imr_lower"
}
dfp = os.path.join(wd, 'chef', 'translation_dictionaries')
fp = 'indicators_cme_to_sg.json'
c.add_config(dictionaries_dir=dfp)
assert build_dictionary(c, fp) == d2
def test_retry():
import time
from ddf_utils.factory.common import retry
from numpy.testing import assert_almost_equal
@retry(times=4)
def test():
raise NotImplementedError
t0 = time.time()
try:
test()
except NotImplementedError:
pass
t1 = time.time()
print('Took', t1 - t0, 'seconds')
assert_almost_equal(t1 - t0, 3, 1) # 0.5 + 1 + 1.5 = 3
| # -*- coding: utf-8 -*-
import os
wd = os.path.dirname(__file__)
def test_build_dictionary():
from ddf_utils.chef.api import Chef
from ddf_utils.chef.helpers import build_dictionary
d = {'China': 'chn', 'USA': 'usa'}
c = Chef()
assert build_dictionary(c, d) == d
d2 = {
"imr_median": "infant_mortality_median",
"imr_upper": "imr_lower"
}
dfp = os.path.join(wd, 'chef', 'translation_dictionaries')
fp = 'indicators_cme_to_sg.json'
c.add_config(dictionaries_dir=dfp)
assert build_dictionary(c, fp) == d2
def test_retry():
import time
from ddf_utils.factory.common import retry
from numpy.testing import assert_almost_equal
@retry(times=4)
def test():
raise NotImplementedError
t0 = time.time()
try:
test()
except NotImplementedError:
pass
t1 = time.time()
print('Took', t1 - t0, 'seconds')
assert_almost_equal(t1 - t0, 5, 1) # 0.5 + 1 + 1.5 + 2 = 5
| mit | Python |
57151faa40af3c545e6557bd22de9a80a0d3e49c | Add sqlite url test | funkybob/django-classy-settings | tests/test_urls.py | tests/test_urls.py | from unittest import TestCase
from cbs.urls import parse_dburl
class TestUrlParse(TestCase):
def test_simple(self):
result = parse_dburl(
"postgres://user:password@hostname:1234/dbname?conn_max_age=15&local_option=test"
)
self.assertEqual(
result,
{
"ENGINE": "django.db.backends.postgresql",
"NAME": "dbname",
"HOST": "hostname",
"PORT": 1234,
"PASSWORD": "password",
"USER": "user",
"CONN_MAX_AGE": 15,
"OPTIONS": {
"local_option": "test",
},
},
)
def test_sqlite(self):
result = parse_dburl("sqlite:///db.sqlite")
self.assertEqual(
result, {"ENGINE": "django.db.backends.sqlite3", "NAME": "db.sqlite"}
)
| from unittest import TestCase
from cbs.urls import parse_dburl
class TestUrlParse(TestCase):
def test_simple(self):
result = parse_dburl(
"postgres://user:password@hostname:1234/dbname?conn_max_age=15&local_option=test"
)
self.assertEqual(
result,
{
"ENGINE": "django.db.backends.postgresql",
"NAME": "dbname",
"HOST": "hostname",
"PORT": 1234,
"PASSWORD": "password",
"USER": "user",
"CONN_MAX_AGE": 15,
"OPTIONS": {
"local_option": "test",
},
},
)
| bsd-2-clause | Python |
066281d2e0f571484392dde3ad5ce6372cf1dc55 | test illegal id | basbloemsaat/dartsense,basbloemsaat/dartsense,basbloemsaat/dartsense,basbloemsaat/dartsense,basbloemsaat/dartsense | tests/test_user.py | tests/test_user.py | #!/usr/bin/env python3
import pytest
import os
import sys
from pprint import pprint
sys.path.append(os.path.join(os.path.dirname(__file__), "../lib"))
import dartsense.user
def test_user_init():
user = dartsense.user.User()
assert isinstance(user, dartsense.user.User)
assert hasattr(user, 'id')
assert user.id == None
assert hasattr(user, 'name')
assert user.name == None
assert hasattr(user, 'get_permissions')
assert callable(user.get_permissions)
permissions = user.get_permissions()
assert isinstance(permissions, list)
assert len(permissions) == 0
def test_user_db(setup_db):
# pprint(pytest.setup_vars)
user = dartsense.user.User(id=-1)
assert isinstance(user, dartsense.user.User)
assert hasattr(user, 'id')
assert user.id == -1
assert hasattr(user, 'name')
assert user.name == None
user = dartsense.user.User(id=pytest.setup_vars['testuser_id'])
assert isinstance(user, dartsense.user.User)
assert hasattr(user, 'name')
assert user.name == 'test user'
assert hasattr(user, 'email')
assert user.email == 'test@test.com'
def test_user_login(setup_db):
user = dartsense.user.User()
assert hasattr(user, 'login')
assert callable(user.login)
assert user.id == None
assert user.login('google', 'test@test.org')
assert user.id == pytest.setup_vars['testuser_id']
assert hasattr(user, 'name')
assert user.name == 'test user'
| #!/usr/bin/env python3
import pytest
import os
import sys
from pprint import pprint
sys.path.append(os.path.join(os.path.dirname(__file__), "../lib"))
import dartsense.user
def test_user_init():
user = dartsense.user.User()
assert isinstance(user, dartsense.user.User)
assert hasattr(user, 'id')
assert user.id == None
assert hasattr(user, 'get_permissions')
assert callable(user.get_permissions)
permissions = user.get_permissions()
assert isinstance(permissions, list)
assert len(permissions) == 0
def test_user_db(setup_db):
# pprint(pytest.setup_vars)
user = dartsense.user.User(id=pytest.setup_vars['testuser_id'])
assert isinstance(user, dartsense.user.User)
assert hasattr(user, 'name')
assert user.name == 'test user'
assert hasattr(user, 'email')
assert user.email == 'test@test.com'
def test_user_login(setup_db):
user = dartsense.user.User()
assert hasattr(user, 'login')
assert callable(user.login)
assert user.id == None
assert user.login('google', 'test@test.org')
assert user.id == pytest.setup_vars['testuser_id']
assert hasattr(user, 'name')
assert user.name == 'test user'
| mit | Python |
60086bd0cd959809b2e15860c39d681474c9a011 | Cover empty link case | CodersOfTheNight/verata | tests/test_util.py | tests/test_util.py | import pytest
from grazer.util import time_convert, grouper, extract_links, trim_link
from .fixtures import example_html
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
def test_unknown(self):
with pytest.raises(RuntimeError):
time_convert("5u")
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
class TestLinkExtract(object):
def test_extract_wo_hashes(self, example_html):
result = extract_links(example_html, ignore_hashes=True)
assert len(result) == 1
assert result[0] == "http://magic-link"
def test_extract_w_hashes(self, example_html):
result = extract_links(example_html, ignore_hashes=False)
assert "http://magic-link/#/with-hash" in result
def test_trim_link_absolute(self):
link = "http://magic-link.dev/something-good"
result = trim_link(link, "magic-link.dev")
assert result == "/something-good"
def test_trim_link_relative(self):
link = "/something-good"
result = trim_link(link, "magic-link.dev")
assert result == "/something-good"
def test_trim_link_external_domain(self):
link = "http://google.com"
result = trim_link(link, "magic-link.dev")
assert result is None
def test_trim_empty_link(self):
assert trim_link(None, "magic-link.dev") is None
| import pytest
from grazer.util import time_convert, grouper, extract_links, trim_link
from .fixtures import example_html
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
def test_unknown(self):
with pytest.raises(RuntimeError):
time_convert("5u")
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
class TestLinkExtract(object):
def test_extract_wo_hashes(self, example_html):
result = extract_links(example_html, ignore_hashes=True)
assert len(result) == 1
assert result[0] == "http://magic-link"
def test_extract_w_hashes(self, example_html):
result = extract_links(example_html, ignore_hashes=False)
assert "http://magic-link/#/with-hash" in result
def test_trim_link_absolute(self):
link = "http://magic-link.dev/something-good"
result = trim_link(link, "magic-link.dev")
assert result == "/something-good"
def test_trim_link_relative(self):
link = "/something-good"
result = trim_link(link, "magic-link.dev")
assert result == "/something-good"
def test_trim_link_external_domain(self):
link = "http://google.com"
result = trim_link(link, "magic-link.dev")
assert result is None
| mit | Python |
acd749504087d1c7fe527ab78b044827b7d9e3a7 | Make ast generation tools silent when BUILDFARM is set. | urbiforge/urbi,aldebaran/urbi,urbiforge/urbi,urbiforge/urbi,urbiforge/urbi,urbiforge/urbi,urbiforge/urbi,urbiforge/urbi,aldebaran/urbi,urbiforge/urbi,aldebaran/urbi,aldebaran/urbi,aldebaran/urbi,urbiforge/urbi,aldebaran/urbi,aldebaran/urbi,aldebaran/urbi | dev/tools.py | dev/tools.py | ## ----------------------------------------------------------------------------
## Tools for python generators
## ----------------------------------------------------------------------------
import string, re, sys
import os, stat, filecmp, shutil
## Display a warning.
def warning (msg):
print >>sys.stderr, "Warning: " + msg
## Display an error message and exit.
def error (msg):
print >>sys.stderr, "Error: " + msg
sys.exit (1)
## Overwrite old with new if different, or nonexistant.
## Remove the write permission on the result to avoid accidental edition
## of generated files.
def lazy_overwrite (old, new):
verbose = os.getenv(key='BUILDFARM') is None
if not os.path.isfile (old):
if verbose:
print "> Create: " + old
shutil.move (new, old)
if verbose:
os.system("diff -uw /dev/null " + old)
elif not filecmp.cmp (old, new):
if verbose:
print "> Overwrite: " + old
# Change the file modes to write the file
file_modes = os.stat (old) [stat.ST_MODE]
os.chmod (old, file_modes | 0666);
shutil.move (old, old + "~")
shutil.move (new, old)
if verbose:
os.system("diff -uw " + old + "~ " + old)
else:
os.remove (new)
# Prevent generated file modifications
file_modes = os.stat (old) [stat.ST_MODE]
os.chmod(old, file_modes & 0555);
def lazy_install (srcdir, name):
"""Install name.tmp as srcdir/name."""
lazy_overwrite (os.path.join (srcdir, name), name + ".tmp")
## String helpers -------------------------------------------------------------
## Return a conventional macro identifier from a class name.
## (FooBar -> FOO_BAR).
def define_id (s):
return re.sub ("([^_])([A-Z])", "\\1_\\2", s).upper ()
## Return a conventional file name from a class name.
## (FooBar -> foo-bar).
def file_id (s):
return re.sub ("^-", "", re.sub ("([A-Z])", "-\\1", s)).lower ()
# FIXME: Improve this generator
# (see http://en.wikipedia.org/wiki/A_and_an for instance).
# Reported by Nicolas Pierron.
## Return the indefinite article to be put before NOUN.
def indef_article (noun):
if re.match ("[aeiouAEIOU]", noun):
return "an"
else:
return "a"
## Wrap a function prototype.
## This is simplistic, but enough to process our generated code.
def wrap_proto (fundec, width):
## Look for the first parenthesis to get the level of indentation.
indent = fundec.find ("(")
pieces = fundec.split(",")
output = ""
line = ""
while pieces:
if len (pieces) == 1:
sep = ""
else:
sep = ","
piece = pieces.pop (0)
if len (line) + len (piece) + len (sep) > width:
# "Flush" the current line.
output += line + "\n"
line = " " * indent + piece + sep
else:
line += piece + sep
output += line
return output
def banner(name, description):
'''Given a name and description, return the file's banner.'''
return """\
//<<-
// Generated, do not edit by hand.
//->>
/**
** \\file """ + name + """
** \\brief """ + description + """
*/
"""
| ## ----------------------------------------------------------------------------
## Tools for python generators
## ----------------------------------------------------------------------------
import string, re, sys
import os, stat, filecmp, shutil
## Display a warning.
def warning (msg):
print >>sys.stderr, "Warning: " + msg
## Display an error message and exit.
def error (msg):
print >>sys.stderr, "Error: " + msg
sys.exit (1)
## Overwrite old with new if different, or nonexistant.
## Remove the write permission on the result to avoid accidental edition
## of generated files.
def lazy_overwrite (old, new):
if not os.path.isfile (old):
print "> Create: " + old
shutil.move (new, old)
os.system("diff -uw /dev/null " + old)
elif not filecmp.cmp (old, new):
print "> Overwrite: " + old
# Change the file modes to write the file
file_modes = os.stat (old) [stat.ST_MODE]
os.chmod (old, file_modes | 0666);
shutil.move (old, old + "~")
shutil.move (new, old)
os.system("diff -uw " + old + "~ " + old)
else:
os.remove (new)
# Prevent generated file modifications
file_modes = os.stat (old) [stat.ST_MODE]
os.chmod(old, file_modes & 0555);
def lazy_install (srcdir, name):
"""Install name.tmp as srcdir/name."""
lazy_overwrite (os.path.join (srcdir, name), name + ".tmp")
## String helpers -------------------------------------------------------------
## Return a conventional macro identifier from a class name.
## (FooBar -> FOO_BAR).
def define_id (s):
return re.sub ("([^_])([A-Z])", "\\1_\\2", s).upper ()
## Return a conventional file name from a class name.
## (FooBar -> foo-bar).
def file_id (s):
return re.sub ("^-", "", re.sub ("([A-Z])", "-\\1", s)).lower ()
# FIXME: Improve this generator
# (see http://en.wikipedia.org/wiki/A_and_an for instance).
# Reported by Nicolas Pierron.
## Return the indefinite article to be put before NOUN.
def indef_article (noun):
if re.match ("[aeiouAEIOU]", noun):
return "an"
else:
return "a"
## Wrap a function prototype.
## This is simplistic, but enough to process our generated code.
def wrap_proto (fundec, width):
## Look for the first parenthesis to get the level of indentation.
indent = fundec.find ("(")
pieces = fundec.split(",")
output = ""
line = ""
while pieces:
if len (pieces) == 1:
sep = ""
else:
sep = ","
piece = pieces.pop (0)
if len (line) + len (piece) + len (sep) > width:
# "Flush" the current line.
output += line + "\n"
line = " " * indent + piece + sep
else:
line += piece + sep
output += line
return output
def banner(name, description):
'''Given a name and description, return the file's banner.'''
return """\
//<<-
// Generated, do not edit by hand.
//->>
/**
** \\file """ + name + """
** \\brief """ + description + """
*/
"""
| bsd-3-clause | Python |
56e8f247b149862eb66bcfba196eb20a3ed64e4b | reduce number of threads to 8 so don't need a whole node on TSCC so jobs get scheduled faster | YeoLab/gscripts,YeoLab/gscripts,YeoLab/gscripts,YeoLab/gscripts | gscripts/mapping/map_paired_with_STAR.py | gscripts/mapping/map_paired_with_STAR.py | #!/usr/bin/env python
from glob import glob
from gscripts.qtools._Submitter import Submitter
import sys
species = sys.argv[1]
try:
jobname = sys.argv[2] + "_map_to_" + species
except IndexError:
jobname = "map_to_" + species
cmd_list = []
for file in glob('*R1*fastq'):
pair = file.replace('R1', 'R2')
name = file.replace('_R1', '')
cmd_list.append('/home/yeo-lab/software/STAR_2.3.0e/STAR \
--runMode alignReads \
--runThreadN 16 \
--genomeDir /projects/ps-yeolab/genomes/{}/star/ \
--genomeLoad LoadAndRemove \
--readFilesIn {} {} \
--outFileNamePrefix {}. \
--outSAMunmapped Within \
--outFilterMultimapNmax 1'.format(species, file, pair, name))
for file in glob('*R1*norep'):
pair = file.replace('R1', 'R2')
name = file.replace('_R1', '')
cmd_list.append('/home/yeo-lab/software/STAR_2.3.0e/STAR \
--runMode alignReads \
--runThreadN 16 \
--genomeDir /projects/ps-yeolab/genomes/{}/star/ \
--genomeLoad LoadAndRemove \
--readFilesIn {} {} \
--outFileNamePrefix {}. \
--outSAMunmapped Within \
--outFilterMultimapNmax 1'.format(species, file, pair, name))
for file in glob('*R1*gz'):
pair = file.replace('R1', 'R2')
name = file.replace('_R1', '')
cmd_list.append('STAR \
--runMode alignReads \
--runThreadN 8 \
--genomeDir /projects/ps-yeolab/genomes/{}/star_sjdb/ \
--genomeLoad LoadAndRemove \
--readFilesCommand zcat \
--readFilesIn {} {} \
--outFileNamePrefix {}. \
--outSAMunmapped Within \
--outReadsUnmapped Fastx \
--outFilterMismatchNmax 5 \
--clip5pNbases 10 \
--clip3pNbases 10 \
--outFilterMultimapNmax 5'.format(species, file, pair, name))
sub = Submitter(queue_type='PBS', sh_file=jobname + '.sh',
command_list=cmd_list,
job_name=jobname)
sub.write_sh(submit=True, nodes=1, ppn=8, walltime='0:30:00', use_array=True,
max_running=20)
| #!/usr/bin/env python
from glob import glob
from gscripts.qtools._Submitter import Submitter
import sys
species = sys.argv[1]
try:
jobname = sys.argv[2] + "_map_to_" + species
except IndexError:
jobname = "map_to_" + species
cmd_list = []
for file in glob('*R1*fastq'):
pair = file.replace('R1', 'R2')
name = file.replace('_R1', '')
cmd_list.append('/home/yeo-lab/software/STAR_2.3.0e/STAR \
--runMode alignReads \
--runThreadN 16 \
--genomeDir /projects/ps-yeolab/genomes/{}/star/ \
--genomeLoad LoadAndRemove \
--readFilesIn {} {} \
--outFileNamePrefix {}. \
--outSAMunmapped Within \
--outFilterMultimapNmax 1'.format(species, file, pair, name))
for file in glob('*R1*norep'):
pair = file.replace('R1', 'R2')
name = file.replace('_R1', '')
cmd_list.append('/home/yeo-lab/software/STAR_2.3.0e/STAR \
--runMode alignReads \
--runThreadN 16 \
--genomeDir /projects/ps-yeolab/genomes/{}/star/ \
--genomeLoad LoadAndRemove \
--readFilesIn {} {} \
--outFileNamePrefix {}. \
--outSAMunmapped Within \
--outFilterMultimapNmax 1'.format(species, file, pair, name))
for file in glob('*R1*gz'):
pair = file.replace('R1', 'R2')
name = file.replace('_R1', '')
cmd_list.append('STAR \
--runMode alignReads \
--runThreadN 16 \
--genomeDir /projects/ps-yeolab/genomes/{}/star_sjdb/ \
--genomeLoad LoadAndRemove \
--readFilesCommand zcat \
--readFilesIn {} {} \
--outFileNamePrefix {}. \
--outSAMunmapped Within \
--outReadsUnmapped Fastx \
--outFilterMismatchNmax 5 \
--clip5pNbases 10 \
--clip3pNbases 10 \
--outFilterMultimapNmax 5'.format(species, file, pair, name))
sub = Submitter(queue_type='PBS', sh_file=jobname + '.sh',
command_list=cmd_list,
job_name=jobname)
sub.write_sh(submit=True, nodes=1, ppn=16, walltime='0:30:00', use_array=True,
max_running=20)
| mit | Python |
863515a72aaf4c881240f47c1fda8129b9b6d96d | add songs to the **end** of the queue... | wtodom/spotipi | spotipi/sandbox/requesthandler.py | spotipi/sandbox/requesthandler.py | from collections import deque
from ConfigParser import SafeConfigParser
import threading
from flask import Flask, request, render_template
import spotify
config = SafeConfigParser()
config.read("spotipi.cfg")
app = Flask(__name__)
app.debug = True
song_queue = deque()
session = spotify.Session()
loop = spotify.EventLoop(session)
loop.start()
audio = spotify.AlsaSink(session)
## Events
logged_in = threading.Event()
end_of_track = threading.Event()
playback_in_progress = threading.Event()
def on_connection_state_updated(session):
if session.connection.state is spotify.ConnectionState.LOGGED_IN:
logged_in.set()
def on_end_of_track(self):
if len(song_queue) > 0:
play_track(song_queue.pop())
else:
end_of_track.set()
def on_playback_in_progress():
print("started.")
playback_in_progress.set()
## Handlers
session.on(
spotify.SessionEvent.CONNECTION_STATE_UPDATED,
on_connection_state_updated
)
session.on(
spotify.SessionEvent.END_OF_TRACK,
on_end_of_track
)
session.on(
spotify.SessionEvent.START_PLAYBACK,
on_playback_in_progress
)
username = config.get("credentials", "username")
password = config.get("credentials", "password")
session.login(username, password, remember_me=True)
## API
@app.route("/queue", methods=["GET"])
def get_queue():
return str(song_queue)
@app.route("/queue/add/<link>", methods=["GET", "POST"])
def add_to_queue(link):
song_queue.appendleft(link)
if len(song_queue) == 1 and not playback_in_progress.is_set():
play_track(song_queue.pop())
return "added"
@app.route("/queue/clear", methods=["POST"])
def clear_queue():
song_queue.clear()
return "cleared"
@app.route("/pause", methods=["POST"])
def pause():
pass
@app.route("/skip", methods=["POST"])
def skip():
pass
@app.route("/play/<link>", methods=["GET", "POST"])
def play_track(link):
session.emit(spotify.SessionEvent.START_PLAYBACK)
track = session.get_track(link).load()
session.player.load(track)
session.player.play()
return "playing..."
if __name__ == '__main__':
app.run('0.0.0.0', port=5001)
| from collections import deque
from ConfigParser import SafeConfigParser
import threading
from flask import Flask, request, render_template
import spotify
config = SafeConfigParser()
config.read("spotipi.cfg")
app = Flask(__name__)
app.debug = True
song_queue = deque()
session = spotify.Session()
loop = spotify.EventLoop(session)
loop.start()
audio = spotify.AlsaSink(session)
## Events
logged_in = threading.Event()
end_of_track = threading.Event()
playback_in_progress = threading.Event()
def on_connection_state_updated(session):
if session.connection.state is spotify.ConnectionState.LOGGED_IN:
logged_in.set()
def on_end_of_track(self):
if len(song_queue) > 0:
play_track(song_queue.pop())
else:
end_of_track.set()
def on_playback_in_progress():
print("started.")
playback_in_progress.set()
## Handlers
session.on(
spotify.SessionEvent.CONNECTION_STATE_UPDATED,
on_connection_state_updated
)
session.on(
spotify.SessionEvent.END_OF_TRACK,
on_end_of_track
)
session.on(
spotify.SessionEvent.START_PLAYBACK,
on_playback_in_progress
)
username = config.get("credentials", "username")
password = config.get("credentials", "password")
session.login(username, password, remember_me=True)
## API
@app.route("/queue", methods=["GET"])
def get_queue():
return str(song_queue)
@app.route("/queue/add/<link>", methods=["GET", "POST"])
def add_to_queue(link):
song_queue.append(link)
if len(song_queue) == 1 and not playback_in_progress.is_set():
play_track(song_queue.pop())
return "added"
@app.route("/queue/clear", methods=["POST"])
def clear_queue():
song_queue.clear()
return "cleared"
@app.route("/pause", methods=["POST"])
def pause():
pass
@app.route("/skip", methods=["POST"])
def skip():
pass
@app.route("/play/<link>", methods=["GET", "POST"])
def play_track(link):
session.emit(spotify.SessionEvent.START_PLAYBACK)
track = session.get_track(link).load()
session.player.load(track)
session.player.play()
return "playing..."
if __name__ == '__main__':
app.run('0.0.0.0', port=5001)
| mit | Python |
94695827c8f70e305021a25d2d02a5b1b7241957 | Fix returning `None` if badge ID is unknown | m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps | byceps/services/user_badge/service.py | byceps/services/user_badge/service.py | """
byceps.services.user_badge.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from collections import defaultdict
from ...database import db
from .models import Badge, BadgeAwarding
def create_badge(label, image_filename, *, brand_id=None, description=None):
"""Introduce a new badge."""
badge = Badge(label, image_filename, brand_id=brand_id,
description=description)
db.session.add(badge)
db.session.commit()
return badge.to_tuple()
def find_badge(badge_id):
"""Return the badge with that id, or `None` if not found."""
badge = Badge.query.get(badge_id)
if badge is None:
return None
return badge.to_tuple()
def get_badges(badge_ids):
"""Return the badges with those IDs."""
if not badge_ids:
return []
badges = Badge.query \
.filter(Badge.id.in_(badge_ids)) \
.all()
return [badge.to_tuple() for badge in badges]
def get_badges_for_user(user_id):
"""Return all badges that have been awarded to the user."""
badges = Badge.query \
.join(BadgeAwarding).filter_by(user_id=user_id) \
.all()
return [badge.to_tuple() for badge in badges]
def get_badges_for_users(user_ids):
"""Return all badges that have been awarded to the users, indexed
by user ID.
"""
if not user_ids:
return {}
awardings = BadgeAwarding.query \
.filter(BadgeAwarding.user_id.in_(user_ids)) \
.all()
badge_ids = frozenset(awarding.badge_id for awarding in awardings)
badges = get_badges(badge_ids)
badges_by_id = {badge.id: badge for badge in badges}
badges_by_user_id = defaultdict(set)
for awarding in awardings:
badge = badges_by_id[awarding.badge_id]
badges_by_user_id[awarding.user_id].add(badge)
return dict(badges_by_user_id)
def get_all_badges():
"""Return all badges."""
badges = Badge.query.all()
return [badge.to_tuple() for badge in badges]
def award_badge_to_user(badge_id, user_id):
"""Award the badge to the user."""
awarding = BadgeAwarding(badge_id, user_id)
db.session.add(awarding)
db.session.commit()
return awarding.to_tuple()
def get_awardings_of_badge(badge_id):
"""Return the awardings (user and date) of this badge."""
awardings = BadgeAwarding.query \
.filter_by(badge_id=badge_id) \
.all()
return [awarding.to_tuple() for awarding in awardings]
| """
byceps.services.user_badge.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from collections import defaultdict
from ...database import db
from .models import Badge, BadgeAwarding
def create_badge(label, image_filename, *, brand_id=None, description=None):
"""Introduce a new badge."""
badge = Badge(label, image_filename, brand_id=brand_id,
description=description)
db.session.add(badge)
db.session.commit()
return badge.to_tuple()
def find_badge(badge_id):
"""Return the badge with that id, or `None` if not found."""
badge = Badge.query.get(badge_id)
return badge.to_tuple()
def get_badges(badge_ids):
"""Return the badges with those IDs."""
if not badge_ids:
return []
badges = Badge.query \
.filter(Badge.id.in_(badge_ids)) \
.all()
return [badge.to_tuple() for badge in badges]
def get_badges_for_user(user_id):
"""Return all badges that have been awarded to the user."""
badges = Badge.query \
.join(BadgeAwarding).filter_by(user_id=user_id) \
.all()
return [badge.to_tuple() for badge in badges]
def get_badges_for_users(user_ids):
"""Return all badges that have been awarded to the users, indexed
by user ID.
"""
if not user_ids:
return {}
awardings = BadgeAwarding.query \
.filter(BadgeAwarding.user_id.in_(user_ids)) \
.all()
badge_ids = frozenset(awarding.badge_id for awarding in awardings)
badges = get_badges(badge_ids)
badges_by_id = {badge.id: badge for badge in badges}
badges_by_user_id = defaultdict(set)
for awarding in awardings:
badge = badges_by_id[awarding.badge_id]
badges_by_user_id[awarding.user_id].add(badge)
return dict(badges_by_user_id)
def get_all_badges():
"""Return all badges."""
badges = Badge.query.all()
return [badge.to_tuple() for badge in badges]
def award_badge_to_user(badge_id, user_id):
"""Award the badge to the user."""
awarding = BadgeAwarding(badge_id, user_id)
db.session.add(awarding)
db.session.commit()
return awarding.to_tuple()
def get_awardings_of_badge(badge_id):
"""Return the awardings (user and date) of this badge."""
awardings = BadgeAwarding.query \
.filter_by(badge_id=badge_id) \
.all()
return [awarding.to_tuple() for awarding in awardings]
| bsd-3-clause | Python |
17e4141703e8b9d2975e8c7de37729aef8905751 | fix test_playlist.test_title | pytube/pytube | tests/contrib/test_playlist.py | tests/contrib/test_playlist.py | # -*- coding: utf-8 -*-
from unittest import mock
from pytube import Playlist
@mock.patch("pytube.contrib.playlist.request.get")
def test_title(request_get):
request_get.return_value = "<title>(149) Python Tutorial for Beginners (For Absolute Beginners) - YouTube</title>"
url = "https://www.fakeurl.com/playlist?list=PLsyeobzWxl7poL9JTVyndKe62ieoN"
pl = Playlist(url)
pl_title = pl.title()
assert pl_title == "(149) Python Tutorial for Beginners (For Absolute Beginners)"
| # -*- coding: utf-8 -*-
from unittest import mock
from unittest.mock import MagicMock
from pytube import Playlist
@mock.patch("request.get")
def test_title(request_get):
request_get.return_value = ""
list_key = "PLsyeobzWxl7poL9JTVyndKe62ieoN-MZ3"
url = "https://www.fakeurl.com/playlist?list=" + list_key
pl = Playlist(url)
pl_title = pl.title()
assert pl_title == "Python Tutorial for Beginners"
| unlicense | Python |
fbbd5a7d28cdda0f38e01822b904e24d6b4f232e | Update main.py | rjagerman/aidu,MartienLagerweij/aidu,MartienLagerweij/aidu,rjagerman/aidu,MartienLagerweij/aidu,rjagerman/aidu | aidu_user_management/src/main.py | aidu_user_management/src/main.py | #!/usr/bin/env python
__author__ = 'Rolf Jagerman'
import roslib; roslib.load_manifest('aidu_user_management')
import rospy
from aidu_user_management.srv import Authenticate, AuthenticateResponse
from aidu_user_management.msg import User, Authentication
authenticate_service = None
authentication_publisher = None
def authenticate(req):
# TODO: Get real user from database
rospy.loginfo("Processing authentication request");
success = False
user = None
if req.id == 'campus-card-code':
success = True
user = User(req.id, 'Firstname', 'Lastname', [1])
#if req.id == 'campus-card-code':
# success = True
# user = User(req.id, 'Firstname', 'Lastname', [1])
authentication_publisher.publish(Authentication(req.login, success, user))
return AuthenticateResponse(req.id, success, user)
if __name__ == "__main__":
rospy.init_node('user_management')
authenticate_service = rospy.Service('authenticate', Authenticate, authenticate)
authentication_publisher = rospy.Publisher('authentication', Authentication)
rospy.spin()
| #!/usr/bin/env python
__author__ = 'Rolf Jagerman'
import roslib; roslib.load_manifest('aidu_user_management')
import rospy
from aidu_user_management.srv import Authenticate, AuthenticateResponse
from aidu_user_management.msg import User, Authentication
authenticate_service = None
authentication_publisher = None
def authenticate(req):
# TODO: Get real user from database
rospy.loginfo("Processing authentication request");
success = False
user = None
if req.id == '670056546044310CA932A80':
success = True
user = User(req.id, 'Robert', 'Peuchen', [1])
if req.id == '670047480047453BA202A80':
success = True
user = User(req.id, 'Dylan', 'de Carvalho Cruz', [2])
if req.id == '67018583304156612942A80':
success = True
user = User(req.id, 'Floris', 'Gaisser', [3])
authentication_publisher.publish(Authentication(req.login, success, user))
return AuthenticateResponse(req.id, success, user)
if __name__ == "__main__":
rospy.init_node('user_management')
authenticate_service = rospy.Service('authenticate', Authenticate, authenticate)
authentication_publisher = rospy.Publisher('authentication', Authentication)
rospy.spin()
| mit | Python |
6c5fd62262e4194a7ea95f504c01bf686955a052 | Hide root in DirDialog. | shaurz/devo | dirdialog.py | dirdialog.py | import os
import wx
from dirtree import DirTreeCtrl, DirTreeFilter, DirNode
class DirDialog(wx.Dialog):
def __init__(self, parent, size=wx.DefaultSize, message="", path="", select_path=""):
style = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER
if size == wx.DefaultSize:
size = wx.Size(450, 600)
wx.Dialog.__init__(self, parent, size=size, title=message, style=style)
toplevel = [DirNode(path)] if path else None
filter = DirTreeFilter(show_files=False)
self.dirtree = DirTreeCtrl(self, self, toplevel=toplevel, filter=filter)
if select_path:
self.dirtree.SelectPath(select_path)
btnsizer = wx.StdDialogButtonSizer()
btn_ok = wx.Button(self, wx.ID_OK)
btn_ok.SetDefault()
btnsizer.AddButton(btn_ok)
btnsizer.AddButton(wx.Button(self, wx.ID_CANCEL))
btnsizer.Realize()
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.dirtree, 1, wx.EXPAND)
sizer.Add(btnsizer, 0, wx.EXPAND | wx.ALL, 5)
self.SetSizer(sizer)
self.dirtree.SetFocus()
def OpenFile(self, path):
pass
def GetPath(self):
return self.dirtree.GetSelectedPath()
| import os
import wx
from dirtree import DirTreeCtrl, DirTreeFilter, DirNode
class DirDialog(wx.Dialog):
def __init__(self, parent, size=wx.DefaultSize, message="", path="", select_path=""):
style = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER
if size == wx.DefaultSize:
size = wx.Size(450, 600)
wx.Dialog.__init__(self, parent, size=size, title=message, style=style)
toplevel = [DirNode(path)] if path else None
filter = DirTreeFilter(show_files=False)
self.dirtree = DirTreeCtrl(self, self,
toplevel=toplevel, filter=filter, show_root=True)
if select_path:
self.dirtree.SelectPath(select_path)
btnsizer = wx.StdDialogButtonSizer()
btn_ok = wx.Button(self, wx.ID_OK)
btn_ok.SetDefault()
btnsizer.AddButton(btn_ok)
btnsizer.AddButton(wx.Button(self, wx.ID_CANCEL))
btnsizer.Realize()
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.dirtree, 1, wx.EXPAND)
sizer.Add(btnsizer, 0, wx.EXPAND | wx.ALL, 5)
self.SetSizer(sizer)
self.dirtree.SetFocus()
def OpenFile(self, path):
pass
def GetPath(self):
return self.dirtree.GetSelectedPath()
| mit | Python |
6a6682c15324efe5cc617d619f8af1275f32770a | update with ssl | fedspendingtransparency/data-act-build-tools,fedspendingtransparency/data-act-build-tools,fedspendingtransparency/data-act-build-tools | databricks/databricks-jobs.py | databricks/databricks-jobs.py | import sys
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import json
INSTANCE_ID = sys.argv[1]
JOB_NAME = sys.argv[2]
API_VERSION = '/api/2.1'
print("----------RUNNING JOB " + JOB_NAME )
# Run Get request with api_command param
# /jobs/list/ with api 2.0 returns all jobs, 2.1 does not
def getRequest(api_command, params={}):
if api_command == '/jobs/list':
url = "https://{}{}{}".format(INSTANCE_ID, '/api/2.0', api_command)
else:
url = "https://{}{}{}".format(INSTANCE_ID, API_VERSION, api_command)
response = requests.get(
url = url,
json = params,
)
return response
# Start a job run
def postRequest(api_command, params):
url = "https://{}{}{}".format(INSTANCE_ID, API_VERSION, api_command)
response = requests.post(
url = url,
json = params,
)
return response
# Get all job names and jobID's and map to dict
def getJobIds(res):
tempDict = {}
for job in res.json()['jobs']:
tempDict[job['settings']['name']] = job['job_id']
return tempDict
jobs = getJobIds(getRequest('/jobs/list'))
if( JOB_NAME in jobs ):
print("JOB ID: " + str(jobs[JOB_NAME]))
job_params = {'job_id': jobs[JOB_NAME]}
startJob = postRequest('/jobs/run-now', job_params)
run_id = startJob.json()['run_id']
run_params = { 'run_id' : run_id }
job_status = getRequest('/jobs/runs/get-output', run_params).json()["metadata"]["state"]["life_cycle_state"]
#Wait for job to finish running
while(job_status == "RUNNING" or job_status == "PENDING"):
job_status = getRequest('/jobs/runs/get-output', run_params).json()["metadata"]["state"]["life_cycle_state"]
finishedJob = getRequest('/jobs/runs/get-output', run_params)
print(json.dumps(json.loads(finishedJob.text), indent = 2))
run_url = finishedJob.json()["metadata"]["run_page_url"].replace("webapp", INSTANCE_ID+"/")
print("---------------SEE JOB RUN HERE: " + run_url)
else:
raise ValueError(sys.argv[2] + " is not a job in databricks")
| import sys
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import json
# REMOVE WHEN SSL IS ENABLED
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
INSTANCE_ID = sys.argv[1]
JOB_NAME = sys.argv[2]
API_VERSION = '/api/2.1'
print("----------RUNNING JOB " + JOB_NAME )
# Run Get request with api_command param
def getRequest(api_command, params={}):
if api_command == '/jobs/list':
url = "https://{}{}{}".format(INSTANCE_ID, '/api/2.0', api_command)
else:
url = "https://{}{}{}".format(INSTANCE_ID, API_VERSION, api_command)
response = requests.get(
url = url,
json = params,
verify = False #Needed because we dont have ssl
)
return response
# Start a job run
def postRequest(api_command, params):
url = "https://{}{}{}".format(INSTANCE_ID, API_VERSION, api_command)
response = requests.post(
url = url,
json = params,
verify = False #Needed because we dont have ssl
)
return response
# Get all job names and jobID's and map to dict
def getJobIds(res):
tempDict = {}
for job in res.json()['jobs']:
tempDict[job['settings']['name']] = job['job_id']
return tempDict
jobs = getJobIds(getRequest('/jobs/list'))
if( JOB_NAME in jobs ):
print("JOB ID: " + str(jobs[JOB_NAME]))
job_params = {'job_id': jobs[JOB_NAME]}
startJob = postRequest('/jobs/run-now', job_params)
run_id = startJob.json()['run_id']
run_params = { 'run_id' : run_id }
job_status = getRequest('/jobs/runs/get-output', run_params).json()["metadata"]["state"]["life_cycle_state"]
#Wait for job to finish running
while(job_status == "RUNNING" or job_status == "PENDING"):
job_status = getRequest('/jobs/runs/get-output', run_params).json()["metadata"]["state"]["life_cycle_state"]
finishedJob = getRequest('/jobs/runs/get-output', run_params)
print(json.dumps(json.loads(finishedJob.text), indent = 2))
run_url = finishedJob.json()["metadata"]["run_page_url"].replace("webapp", INSTANCE_ID+"/")
print("---------------SEE JOB RUN HERE: " + run_url)
else:
raise ValueError(sys.argv[2] + " is not a job in databricks")
| cc0-1.0 | Python |
0c27d4ab049b436b1bd54d09ca457c64c1b9e3f1 | add dynamic cache expiration time | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | htdocs/json/nwstext_search.py | htdocs/json/nwstext_search.py | #!/usr/bin/env python
"""
Search for NWS Text, return JSON
"""
import memcache
import cgi
import sys
import datetime
import pytz
import json
def run(sts, ets, awipsid):
""" Actually do some work! """
import psycopg2
dbconn = psycopg2.connect(database='afos', host='iemdb', user='nobody')
cursor = dbconn.cursor()
res = {'results': []}
cursor.execute("""
SELECT data,
to_char(entered at time zone 'UTC', 'YYYY-MM-DDThh24:MIZ'),
source, wmo from products WHERE pil = %s
and entered >= %s and entered < %s ORDER by entered ASC
""", (awipsid, sts, ets))
for row in cursor:
res['results'].append(dict(ttaaii=row[3],
utcvalid=row[1],
data=row[0],
cccc=row[2]))
return json.dumps(res)
def main():
""" Do Stuff """
sys.stdout.write("Content-type: application/json\n\n")
form = cgi.FieldStorage()
awipsid = form.getfirst('awipsid')[:6]
sts = form.getfirst('sts')
ets = form.getfirst('ets')
cb = form.getfirst('callback', None)
mckey = "/json/nwstext_search/%s/%s/%s?callback=%s" % (sts, ets,
awipsid, cb)
mc = memcache.Client(['iem-memcached:11211'], debug=0)
res = mc.get(mckey)
if not res:
sts = datetime.datetime.strptime(sts, '%Y-%m-%dT%H:%MZ')
sts = sts.replace(tzinfo=pytz.timezone("UTC"))
ets = datetime.datetime.strptime(ets, '%Y-%m-%dT%H:%MZ')
ets = ets.replace(tzinfo=pytz.timezone("UTC"))
now = datetime.datetime.utcnow()
now = now.replace(tzinfo=pytz.timezone("UTC"))
cacheexpire = 0 if ets < now else 120
res = run(sts, ets, awipsid)
mc.set(mckey, res, cacheexpire)
if cb is None:
sys.stdout.write( res )
else:
sys.stdout.write("%s(%s)" % (cb, res))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
Search for NWS Text, return JSON
"""
import memcache
import cgi
import sys
import datetime
import pytz
import json
def run(sts, ets, awipsid):
""" Actually do some work! """
import psycopg2
dbconn = psycopg2.connect(database='afos', host='iemdb', user='nobody')
cursor = dbconn.cursor()
res = {'results': []}
cursor.execute("""
SELECT data,
to_char(entered at time zone 'UTC', 'YYYY-MM-DDThh24:MIZ'),
source, wmo from products WHERE pil = %s
and entered >= %s and entered < %s ORDER by entered ASC
""", (awipsid, sts, ets))
for row in cursor:
res['results'].append(dict(ttaaii=row[3],
utcvalid=row[1],
data=row[0],
cccc=row[2]))
return json.dumps(res)
def main():
""" Do Stuff """
sys.stdout.write("Content-type: application/json\n\n")
form = cgi.FieldStorage()
awipsid = form.getfirst('awipsid')[:6]
sts = form.getfirst('sts')
ets = form.getfirst('ets')
cb = form.getfirst('callback', None)
mckey = "/json/nwstext_search/%s/%s/%s?callback=%s" % (sts, ets,
awipsid, cb)
mc = memcache.Client(['iem-memcached:11211'], debug=0)
res = mc.get(mckey)
if not res:
sts = datetime.datetime.strptime(sts, '%Y-%m-%dT%H:%MZ')
sts = sts.replace(tzinfo=pytz.timezone("UTC"))
ets = datetime.datetime.strptime(ets, '%Y-%m-%dT%H:%MZ')
ets = ets.replace(tzinfo=pytz.timezone("UTC"))
res = run(sts, ets, awipsid)
mc.set(mckey, res)
if cb is None:
sys.stdout.write( res )
else:
sys.stdout.write("%s(%s)" % (cb, res))
if __name__ == '__main__':
main()
| mit | Python |
71df41f93eaf80279ab32c865096269739918d66 | update to keep additional track record | pravj/Doga,KorayAgaya/Doga | Doga/statistics.py | Doga/statistics.py | # -*- coding: utf-8 -*-
"""
Doga.statistics
This module manage information about log statistics
Shows data summary periodically in each 10 seconds
"""
import threading
from collections import Counter
from thread_timer import ThreadTimer
class Statistics():
def __init__(self):
self.queue = []
self.total = 0
self.stop_event = threading.Event()
self.stats_timer = ThreadTimer(10, self.stop_event, self.update_queue)
self.stats_timer.start()
def queue_event(self, method, host, section):
""" Queue each request to be used in statistics
param: method(str) : request method type
param: host(str) : requested resource host
param: section(str) : requested resource section
"""
self.queue.append(host + section)
def max_queue(self):
""" return resource section info having maximum hits and count of total hits
"""
if (len(self.queue) > 0):
frequency = Counter(self.queue)
print frequency.most_common()[0][0], frequency.most_common()[0][1]
else:
print "No requests"
print len(self.queue)
self.total += len(self.queue)
print self.total
def update_queue(self):
""" update the queue periodically and call for 'max_queue' method
"""
self.max_queue()
self.queue = []
| # -*- coding: utf-8 -*-
"""
Doga.statistics
This module manage information about log statistics
Shows data summary periodically in each 10 seconds
"""
import time
import threading
from thread_timer import ThreadTimer
#class ThreadTimer(threading.Thread):
#
# def __init__(self, event, callback):
# threading.Thread.__init__(self)
# self.stopped = event
#
# self.callback = callback
#
# def run(self):
# while not self.stopped.wait(10):
# self.callback()
class Statistics():
def __init__(self):
self.queue = []
self.total_count = 0
stop_event = threading.Event()
thread_timer = ThreadTimer(10, stop_event, self.update_queue)
thread_timer.start()
def queue_event(self, method, host, section):
""" Queue each request to be used in statistics
"""
self.queue.append(host + section)
def update_queue(self):
print len(self.queue)
self.queue = []
| mit | Python |
85537b6bee3efdd1862726a7e68bd79f693ecd4f | Make still class available when decorating class with @page | tornado-utils/tornado-menumaker | tornado_menumaker/helper/Page.py | tornado_menumaker/helper/Page.py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
"""
"""
import inspect
from tornado.web import Application
from .Route import Route
from .Index import IndexRoute
__author__ = 'Martin Martimeo <martin@martimeo.de>'
__date__ = '16.06.13 - 23:46'
class Page(Route):
"""
A Page
"""
def __init__(self, url: str=None, **kwargs):
super().__init__(url=url, **kwargs)
self._index = None
def __call__(self, *args, **kwargs):
if isinstance(args[0], Application):
if self._index is not None:
return self._index(*args, **kwargs)
self.handler = self.cls(*args, **kwargs)
return self.handler
elif isinstance(args[0], type):
self.cls = args[0]
for n, route in inspect.getmembers(self.cls, Route.isroute):
route.url = self._url + route.url
route.cls = self.cls
for n, method in inspect.getmembers(self.cls, IndexRoute.isindex):
self._index = method
return self.cls
raise Exception()
| #!/usr/bin/python
# -*- encoding: utf-8 -*-
"""
"""
import inspect
from tornado.web import Application
from .Route import Route
from .Index import IndexRoute
__author__ = 'Martin Martimeo <martin@martimeo.de>'
__date__ = '16.06.13 - 23:46'
class Page(Route):
"""
A Page
"""
def __init__(self, url: str=None, **kwargs):
super().__init__(url=url, **kwargs)
self._index = None
def __call__(self, *args, **kwargs):
if isinstance(args[0], Application):
if self._index is not None:
return self._index(*args, **kwargs)
self.handler = self.cls(*args, **kwargs)
return self.handler
elif isinstance(args[0], type):
self.cls = args[0]
for n, route in inspect.getmembers(self.cls, Route.isroute):
route.url = self._url + route.url
route.cls = self.cls
for n, method in inspect.getmembers(self.cls, IndexRoute.isindex):
self._index = method
return self
raise Exception()
| agpl-3.0 | Python |
8d888d735b0a061f07e20531843e578aea97b770 | Make opp_player required | davidrobles/mlnd-capstone-code | capstone/rl/utils/plot.py | capstone/rl/utils/plot.py | from __future__ import division
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.ticker import FuncFormatter
from .callbacks import Callback
from ...game.players import GreedyQ, RandPlayer
from ...game.utils import play_series
class EpisodicWLDPlotter(Callback):
'''
Plots the episodic win, loss and draws of a learner
against a fixed opponent
'''
def __init__(self, game, opp_player, n_matches=1000, period=1, filepath='test.pdf'):
self.game = game
self.opp_player = opp_player
self.n_matches = n_matches
self.period = period
self.filepath = filepath
self.x = []
self.y_wins = []
self.y_draws = []
self.y_losses = []
def on_episode_end(self, episode, qf):
if episode % self.period != 0:
return
self._plot(episode, qf)
def _plot(self, episode, qf):
results = play_series(
game=self.game.copy(),
players=[GreedyQ(qf), self.opp_player],
n_matches=self.n_matches,
verbose=False
)
self.x.append(episode)
self.y_wins.append(results['W'] / self.n_matches)
self.y_draws.append(results['D'] / self.n_matches)
self.y_losses.append(results['L'] / self.n_matches)
def on_train_end(self, qf):
n_episodes = len(self.x) * self.period
self._plot(n_episodes - 1, qf)
fig = plt.figure()
ax = fig.add_subplot(111)
w_line, = ax.plot(self.x, self.y_wins, label='Win')
l_line, = ax.plot(self.x, self.y_losses, label='Loss')
d_line, = ax.plot(self.x, self.y_draws, label='Draw')
ax.set_xlim([0, n_episodes])
ax.set_ylim([0, 1.0])
plt.xlabel('Episodes')
formatter = FuncFormatter(lambda y, _: '{:d}%'.format(int(y * 100)))
plt.gca().yaxis.set_major_formatter(formatter)
plt.legend(handles=[w_line, l_line, d_line], loc=7)
plt.savefig(self.filepath)
| from __future__ import division
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.ticker import FuncFormatter
from .callbacks import Callback
from ...game.players import GreedyQ, RandPlayer
from ...game.utils import play_series
class EpisodicWLDPlotter(Callback):
'''
Plots the episodic win, loss and draws of a learner
against a fixed opponent
'''
def __init__(self, game, opp_player=None, n_matches=1000,
period=1, filepath='test.pdf'):
self.game = game
self.opp_player = opp_player
self.n_matches = n_matches
self.period = period
self.filepath = filepath
self.x = []
self.y_wins = []
self.y_draws = []
self.y_losses = []
def on_episode_end(self, episode, qf):
if episode % self.period != 0:
return
self._plot(episode, qf)
def _plot(self, episode, qf):
results = play_series(
game=self.game.copy(),
players=[GreedyQ(qf), self.opp_player],
n_matches=self.n_matches,
verbose=False
)
self.x.append(episode)
self.y_wins.append(results['W'] / self.n_matches)
self.y_draws.append(results['D'] / self.n_matches)
self.y_losses.append(results['L'] / self.n_matches)
def on_train_end(self, qf):
n_episodes = len(self.x) * self.period
self._plot(n_episodes - 1, qf)
fig = plt.figure()
ax = fig.add_subplot(111)
w_line, = ax.plot(self.x, self.y_wins, label='Win')
l_line, = ax.plot(self.x, self.y_losses, label='Loss')
d_line, = ax.plot(self.x, self.y_draws, label='Draw')
ax.set_xlim([0, n_episodes])
ax.set_ylim([0, 1.0])
plt.xlabel('Episodes')
formatter = FuncFormatter(lambda y, _: '{:d}%'.format(int(y * 100)))
plt.gca().yaxis.set_major_formatter(formatter)
plt.legend(handles=[w_line, l_line, d_line], loc=7)
plt.savefig(self.filepath)
| mit | Python |
de9e48174f921a84408d9fb0d48e59a7d0693336 | Change unnecessary Sonos coroutine to callback (#60643) | w1ll1am23/home-assistant,mezz64/home-assistant,GenericStudent/home-assistant,w1ll1am23/home-assistant,nkgilley/home-assistant,rohitranjan1991/home-assistant,rohitranjan1991/home-assistant,toddeye/home-assistant,rohitranjan1991/home-assistant,home-assistant/home-assistant,GenericStudent/home-assistant,jawilson/home-assistant,home-assistant/home-assistant,mezz64/home-assistant,toddeye/home-assistant,jawilson/home-assistant,nkgilley/home-assistant | homeassistant/components/sonos/number.py | homeassistant/components/sonos/number.py | """Entity representing a Sonos number control."""
from __future__ import annotations
from homeassistant.components.number import NumberEntity
from homeassistant.const import ENTITY_CATEGORY_CONFIG
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import SONOS_CREATE_LEVELS
from .entity import SonosEntity
from .helpers import soco_error
from .speaker import SonosSpeaker
LEVEL_TYPES = ("bass", "treble")
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Sonos number platform from a config entry."""
@callback
def _async_create_entities(speaker: SonosSpeaker) -> None:
entities = []
for level_type in LEVEL_TYPES:
entities.append(SonosLevelEntity(speaker, level_type))
async_add_entities(entities)
config_entry.async_on_unload(
async_dispatcher_connect(hass, SONOS_CREATE_LEVELS, _async_create_entities)
)
class SonosLevelEntity(SonosEntity, NumberEntity):
"""Representation of a Sonos level entity."""
_attr_entity_category = ENTITY_CATEGORY_CONFIG
_attr_min_value = -10
_attr_max_value = 10
def __init__(self, speaker: SonosSpeaker, level_type: str) -> None:
"""Initialize the level entity."""
super().__init__(speaker)
self.level_type = level_type
@property
def unique_id(self) -> str:
"""Return the unique ID."""
return f"{self.soco.uid}-{self.level_type}"
@property
def name(self) -> str:
"""Return the name."""
return f"{self.speaker.zone_name} {self.level_type.capitalize()}"
async def _async_poll(self) -> None:
"""Poll the value if subscriptions are not working."""
# Handled by SonosSpeaker
@soco_error()
def set_value(self, value: float) -> None:
"""Set a new value."""
setattr(self.soco, self.level_type, value)
@property
def value(self) -> float:
"""Return the current value."""
return getattr(self.speaker, self.level_type)
| """Entity representing a Sonos number control."""
from __future__ import annotations
from homeassistant.components.number import NumberEntity
from homeassistant.const import ENTITY_CATEGORY_CONFIG
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import SONOS_CREATE_LEVELS
from .entity import SonosEntity
from .helpers import soco_error
from .speaker import SonosSpeaker
LEVEL_TYPES = ("bass", "treble")
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Sonos number platform from a config entry."""
async def _async_create_entities(speaker: SonosSpeaker) -> None:
entities = []
for level_type in LEVEL_TYPES:
entities.append(SonosLevelEntity(speaker, level_type))
async_add_entities(entities)
config_entry.async_on_unload(
async_dispatcher_connect(hass, SONOS_CREATE_LEVELS, _async_create_entities)
)
class SonosLevelEntity(SonosEntity, NumberEntity):
"""Representation of a Sonos level entity."""
_attr_entity_category = ENTITY_CATEGORY_CONFIG
_attr_min_value = -10
_attr_max_value = 10
def __init__(self, speaker: SonosSpeaker, level_type: str) -> None:
"""Initialize the level entity."""
super().__init__(speaker)
self.level_type = level_type
@property
def unique_id(self) -> str:
"""Return the unique ID."""
return f"{self.soco.uid}-{self.level_type}"
@property
def name(self) -> str:
"""Return the name."""
return f"{self.speaker.zone_name} {self.level_type.capitalize()}"
async def _async_poll(self) -> None:
"""Poll the value if subscriptions are not working."""
# Handled by SonosSpeaker
@soco_error()
def set_value(self, value: float) -> None:
"""Set a new value."""
setattr(self.soco, self.level_type, value)
@property
def value(self) -> float:
"""Return the current value."""
return getattr(self.speaker, self.level_type)
| apache-2.0 | Python |
93157800175722eb36ef1f8a920f0052f6c64495 | Update honeypot.py | FabioChiodini/ProjectSpawnSwarmtc,FabioChiodini/ProjectSpawnSwarmtc | honeypot/honeypot.py | honeypot/honeypot.py | from flask import Flask, jsonify, request
import os
import logging
import logstash
import sys
if 'LOG_HOST' not in os.environ:
raise(Exception("LOG_HOST NOT DEFINED"))
host = os.environ['LOG_HOST']
test_logger = logging.getLogger('python-logstash-logger')
test_logger.setLevel(logging.INFO)
test_logger.addHandler(logstash.TCPLogstashHandler(host, 5000, version=1))
app = Flask(__name__)
def log_request(req):
extra = {
'ip': request.environ.get('X-Forwarded-For', request.remote_addr),
'url': req.full_path,
}
test_logger.info('honeypot: ', extra=extra)
# data_to_log.update(req.headers)
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def honey(path):
log_request(request)
return jsonify({'result': 'ok'})
if __name__ == '__main__':
app.run(host="0.0.0.0",port=8080)
| from flask import Flask, jsonify, request
import os
import requests
from pprint import pprint
import json
if 'LOG_HOST' not in os.environ or 'LOG_PORT' not in os.environ:
raise(Exception("LOG_HOST OR LOG_PORT NOT DEFINED"))
POST_URL = "http://{host}:{port}/log".format(host=os.environ['LOG_HOST'],port=os.environ['LOG_PORT'])
app = Flask(__name__)
def log_request(req):
data_to_log = {}
data_to_log.update(req.headers)
ip = request.environ.get('X-Forwarded-For', request.remote_addr)
data_to_log.update({"ip": ip})
data_to_log.update({"url": req.full_path})
try:
requests.post(POST_URL,json=json.dumps(data_to_log))
except Exception as e:
print(e)
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def honey(path):
log_request(request)
return jsonify({'result': 'ok'})
if __name__ == '__main__':
app.run(host="0.0.0.0",port=8080)
| mit | Python |
d2110e629ea38c6ed7bfffd87a771ec3177c0c75 | Bump version: 0.4.1 → 0.5.2 | rwanyoike/time2relax | time2relax/__version__.py | time2relax/__version__.py | __title__ = 'time2relax'
__description__ = 'A CouchDB driver for Python.'
__url__ = 'https://github.com/rwanyoike/time2relax'
__version__ = '0.5.2'
__author__ = 'Raymond Wanyoike'
__author_email__ = 'raymond.wanyoike@gmail.com'
__license__ = 'MIT'
__copyright__ = '2017 Raymond Wanyoike'
__couch__ = u'\u2728 \U0001f6cb \u2728'
| __title__ = 'time2relax'
__description__ = 'A CouchDB driver for Python.'
__url__ = 'https://github.com/rwanyoike/time2relax'
__version__ = '0.4.1'
__author__ = 'Raymond Wanyoike'
__author_email__ = 'raymond.wanyoike@gmail.com'
__license__ = 'MIT'
__copyright__ = '2017 Raymond Wanyoike'
__couch__ = u'\u2728 \U0001f6cb \u2728'
| mit | Python |
4a36798e38703f890452c2c03b99e8abf00c8e0d | Add tornado and bokeh to get_versions (#1181) | dask/distributed,blaze/distributed,mrocklin/distributed,dask/distributed,dask/distributed,blaze/distributed,mrocklin/distributed,dask/distributed,mrocklin/distributed | distributed/versions.py | distributed/versions.py | """ utilities for package version introspection """
from __future__ import print_function, division, absolute_import
import platform
import struct
import os
import sys
import locale
import importlib
required_packages = [('dask', lambda p: p.__version__),
('distributed', lambda p: p.__version__),
('msgpack', lambda p: '.'.join([str(v) for v in p.version])),
('cloudpickle', lambda p: p.__version__),
('tornado', lambda p: p.version),
('toolz', lambda p: p.__version__)]
optional_packages = [('numpy', lambda p: p.__version__),
('pandas', lambda p: p.__version__),
('bokeh', lambda p: p.__version__),
('lz4', lambda p: p.__version__),
('blosc', lambda p: p.__version__)]
def get_versions():
""" Return basic information on our software installation,
and out installed versions of packages. """
d = {'host': get_system_info(),
'packages': {'required': get_package_info(required_packages),
'optional': get_package_info(optional_packages)}
}
return d
def get_system_info():
(sysname, nodename, release,
version, machine, processor) = platform.uname()
host = [("python", "%d.%d.%d.%s.%s" % sys.version_info[:]),
("python-bits", struct.calcsize("P") * 8),
("OS", "%s" % (sysname)),
("OS-release", "%s" % (release)),
("machine", "%s" % (machine)),
("processor", "%s" % (processor)),
("byteorder", "%s" % sys.byteorder),
("LC_ALL", "%s" % os.environ.get('LC_ALL', "None")),
("LANG", "%s" % os.environ.get('LANG', "None")),
("LOCALE", "%s.%s" % locale.getlocale()),
]
return host
def get_package_info(pkgs):
""" get package versions for the passed required & optional packages """
pversions = []
for (modname, ver_f) in pkgs:
try:
mod = importlib.import_module(modname)
ver = ver_f(mod)
pversions.append((modname, ver))
except:
pversions.append((modname, None))
return pversions
| """ utilities for package version introspection """
from __future__ import print_function, division, absolute_import
import platform
import struct
import os
import sys
import locale
import importlib
required_packages = [('dask', lambda p: p.__version__),
('distributed', lambda p: p.__version__),
('msgpack', lambda p: '.'.join([str(v) for v in p.version])),
('cloudpickle', lambda p: p.__version__),
('toolz', lambda p: p.__version__)]
optional_packages = [('numpy', lambda p: p.__version__),
('pandas', lambda p: p.__version__)]
def get_versions():
""" Return basic information on our software installation,
and out installed versions of packages. """
d = {'host': get_system_info(),
'packages': {'required': get_package_info(required_packages),
'optional': get_package_info(optional_packages)}
}
return d
def get_system_info():
(sysname, nodename, release,
version, machine, processor) = platform.uname()
host = [("python", "%d.%d.%d.%s.%s" % sys.version_info[:]),
("python-bits", struct.calcsize("P") * 8),
("OS", "%s" % (sysname)),
("OS-release", "%s" % (release)),
("machine", "%s" % (machine)),
("processor", "%s" % (processor)),
("byteorder", "%s" % sys.byteorder),
("LC_ALL", "%s" % os.environ.get('LC_ALL', "None")),
("LANG", "%s" % os.environ.get('LANG', "None")),
("LOCALE", "%s.%s" % locale.getlocale()),
]
return host
def get_package_info(pkgs):
""" get package versions for the passed required & optional packages """
pversions = []
for (modname, ver_f) in pkgs:
try:
mod = importlib.import_module(modname)
ver = ver_f(mod)
pversions.append((modname, ver))
except:
pversions.append((modname, None))
return pversions
| bsd-3-clause | Python |
2fa968d7ad9123b9b45712357ec5b844336056e7 | fix #6 | DataKind-BLR/ichangemycity | dedupe/scripts/icmc.py | dedupe/scripts/icmc.py | import json
import math
import codecs
import pandas as pd
import requests
import argparse
import sys
import time
ID_FIELD = "complaint_complaint_iid"
TEXT_FIELDS = {
"" : "title",
"complaint_description" : "description",
}
LATITUDE = "complaint_latitude"
LONGITUDE = "complaint_longitude"
HOST = "http://localhost:5000"
def load(path):
print "Loading Data into the API"
df = pd.read_csv(path)
# CLEAR UP OLD DATA
requests.delete(HOST + "/v1/points/clean")
total = len(df)
startTime = time.time()
for i, (index, row) in enumerate(df.iterrows()):
if math.isnan(row[LATITUDE]) or math.isnan(row[LONGITUDE]):
continue
data = {
"id" : str(row[ID_FIELD]),
"title" : row["complaint_title"],
"description" : str(row["complaint_description"]),
"latitude" : row[LATITUDE],
"longitude" : row[LONGITUDE],
}
#print data
response = requests.post(HOST + "/v1/point/", json=data)
if response.status_code != 200:
print response.text, data
if i % 1000 == 0:
print "{} of {} done. Elapsed Seconds: {}".format(i, total, time.time() - startTime)
print "{} done. Elapsed Seconds: {}".format(total, time.time() - startTime)
def query(path, outPath):
print "Querying the API"
df = pd.read_csv(path)
total = len(df)
startTime = time.time()
with codecs.open(outPath, "w", "utf-8") as writer:
for i, (index, row) in enumerate(df.iterrows()):
if math.isnan(row[LATITUDE]) or math.isnan(row[LONGITUDE]):
continue
data = {
"id" : str(row[ID_FIELD]),
"title" : row["complaint_title"],
"description" : str(row["complaint_description"]),
"latitude" : row[LATITUDE],
"longitude" : row[LONGITUDE],
}
url = "{}/v1/query/".format(HOST)
response = requests.post(url, json=data)
if response.status_code != 200:
print "ERR:", response.status_code, data["id"]
writer.write(json.dumps(response.json()))
writer.write("\n")
if i % 1000 == 0:
print "{} of {} done. Elapsed Seconds: {}".format(i, total, time.time() - startTime)
print "{} done. Elapsed Seconds: {}".format(total, time.time() - startTime)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("path", help="path of the complaints csv file")
parser.add_argument("-o", "--out", help="output path of the results")
parser.add_argument("-l", "--load", help="loads the data",
action="store_true")
parser.add_argument("-q", "--query", help="queries the api to fetch JSON results, stores output in OUT",
action="store_true")
args = parser.parse_args()
if args.load:
load(args.path)
if args.query:
if args.out is None:
print "OUT required if querying"
sys.exit(-1)
query(args.path, args.out)
if not args.load and not args.query:
print "No action specified" | import json
import math
import codecs
import pandas as pd
import requests
PATH = "/home/samarth/workspaces/datakind-workspace/ichangemycity/Sprint-ComplaintsDeduplication/icmyc_complaints.csv"
ID_FIELD = "complaint_complaint_iid"
TEXT_FIELDS = {
"" : "title",
"complaint_description" : "description",
}
LATITUDE = "complaint_latitude"
LONGITUDE = "complaint_longitude"
HOST = "http://localhost:5000"
def load():
df = pd.read_csv(PATH)
# CLEAR UP OLD DATA
requests.delete(HOST + "/v1/points/clean")
total = len(df)
for i, (index, row) in enumerate(df.iterrows()):
if math.isnan(row[LATITUDE]) or math.isnan(row[LONGITUDE]):
continue
data = {
"id" : str(row[ID_FIELD]),
"title" : row["complaint_title"],
"description" : str(row["complaint_description"]),
"latitude" : row[LATITUDE],
"longitude" : row[LONGITUDE],
}
#print data
response = requests.post(HOST + "/v1/point/", json=data)
if response.status_code != 200:
print response.text, data
if i % 1000 == 0:
print "{} of {} done".format(i, total)
def query():
df = pd.read_csv(PATH)
total = len(df)
for i, (index, row) in enumerate(df.iterrows()):
if math.isnan(row[LATITUDE]) or math.isnan(row[LONGITUDE]):
continue
data = {
"id" : str(row[ID_FIELD]),
"title" : row["complaint_title"],
"description" : str(row["complaint_description"]),
"latitude" : row[LATITUDE],
"longitude" : row[LONGITUDE],
}
url = "{}/v1/query/".format(HOST)
response = requests.post(url, json=data)
if response.status_code != 200:
print response.text, data["id"]
print response.json()
if i % 1000 == 0:
print "{} of {} done".format(i, total)
if __name__ == '__main__':
query() | mit | Python |
f09a870a3246462f73096f0d95a13f770aabb599 | Move logging format to settings | AustralianAntarcticDataCentre/metadata_xml_convert,AustralianAntarcticDataCentre/metadata_xml_convert | delete_old_converts.py | delete_old_converts.py | import argparse
import logging
import os
from file_checks import get_files_in_folder, get_input_path
from settings import CONVERSIONS, EXPORT_PATH, LOGGING_FORMAT
logger = logging.getLogger(__name__)
def get_arg_parser():
"""
Return an argument parser for this script.
Does not include any subparsers.
Returns
-------
argparse.ArgumentParser
Argument parser that has the `parse_args()` statement.
"""
parser = argparse.ArgumentParser(description='Delete conversions.')
parser.add_argument(
'-f',
'--force',
action='store_true',
dest='force',
default=False,
help='Force deletion of converted files.'
)
return parser
def main(args):
full_deletion_count = 0
# Loop each conversion type, getting the folder name.
for xsl_file_name, output_folder, checker in CONVERSIONS:
# Get the conversion output folder.
output_path = os.path.join(EXPORT_PATH, output_folder)
# Skip this conversion type if the folder does not exist.
if not os.path.exists(output_path):
logger.debug('Skipping %s', output_path)
continue
# Loop the converted XML files in the output folder.
logger.debug('Loop XML in %s', output_path)
deletion_count = 0
# Loop the XML files in the conversion output folder.
for output_file_path in get_files_in_folder(output_path, '.xml'):
if not args.force:
# Get the input file path from the output file path.
input_file_path = get_input_path(output_file_path)
# Skip deletion if the original file exists.
if os.path.exists(input_file_path):
continue
logger.info('Deleting %s', output_file_path)
deletion_count += 1
full_deletion_count += 1
# Remove the converted file.
os.remove(output_file_path)
logger.info('Deleted %s files in "%s".', deletion_count, output_folder)
logger.info('Deleted %s files in total.', full_deletion_count)
if '__main__' == __name__:
logging.basicConfig(format=LOGGING_FORMAT, level=logging.DEBUG)
logger.debug('File checking started.')
parser = get_arg_parser()
args = parser.parse_args()
main(args)
logger.debug('File checking complete.')
| import argparse
import logging
import os
from file_checks import get_files_in_folder, get_input_path
from settings import CONVERSIONS, EXPORT_PATH
logger = logging.getLogger(__name__)
def get_arg_parser():
"""
Return an argument parser for this script.
Does not include any subparsers.
Returns
-------
argparse.ArgumentParser
Argument parser that has the `parse_args()` statement.
"""
parser = argparse.ArgumentParser(description='Delete conversions.')
parser.add_argument(
'-f',
'--force',
action='store_true',
dest='force',
default=False,
help='Force deletion of converted files.'
)
return parser
def main(args):
full_deletion_count = 0
# Loop each conversion type, getting the folder name.
for xsl_file_name, output_folder, checker in CONVERSIONS:
# Get the conversion output folder.
output_path = os.path.join(EXPORT_PATH, output_folder)
# Skip this conversion type if the folder does not exist.
if not os.path.exists(output_path):
logger.debug('Skipping %s', output_path)
continue
# Loop the converted XML files in the output folder.
logger.debug('Loop XML in %s', output_path)
deletion_count = 0
# Loop the XML files in the conversion output folder.
for output_file_path in get_files_in_folder(output_path, '.xml'):
if not args.force:
# Get the input file path from the output file path.
input_file_path = get_input_path(output_file_path)
# Skip deletion if the original file exists.
if os.path.exists(input_file_path):
continue
logger.info('Deleting %s', output_file_path)
deletion_count += 1
full_deletion_count += 1
# Remove the converted file.
os.remove(output_file_path)
logger.info('Deleted %s files in "%s".', deletion_count, output_folder)
logger.info('Deleted %s files in total.', full_deletion_count)
yaml_fmt = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
if '__main__' == __name__:
logging.basicConfig(format=yaml_fmt, level=logging.DEBUG)
logger.debug('File checking started.')
parser = get_arg_parser()
args = parser.parse_args()
main(args)
logger.debug('File checking complete.')
| mit | Python |
6d0a6f70ddf8ee23a53e6933445f11a736a13a3c | correct worker2 | mhallett/MeDaReDa,mhallett/MeDaReDa | demos/demo1/worker2.py | demos/demo1/worker2.py | # worker2.py
import datetime
import pyrax
import medareda_lib
import os
class Worker2(object):
def __init__(self,in_table):
self.conn = medareda_lib.get_conn()
self.in_table = in_table
self.server_id = self._getServerId()
self.setStatusIdle()
def _getServerId(self):
hostname = os.uname()[1]
server_id = hostname.split('-')[1]
print server_id
#return 4
return int(server_id)
def setStatusIdle(self):
self._setStatus('idle')
def setStatusWorking(self):
self._setStatus('working')
def _setStatus(self, status):
cur = self.conn.cursor()
now = datetime.datetime.now()
sql = "UPDATE Worker SET status = '%s', since = '%s' WHERE serverId = %s" %(status,now,self.server_id)
cur.execute(sql)
#print sql
cur.execute(sql)
self.conn.commit()
self.conn.close()
def doWork(self):
print 'process in data'
# ( if status == deleting, return )
# get amount of work to to
# while work todo
# set status to work
# get work
# process work
# set status to idle
def testWorker2():
in_table = 'iPrice'
w = Worker2(in_table)
print w.server_id
w.doWork()
testWorker2()
| # worker2.py
import datetime
import pyrax
import medareda_worker_lib
import os
class Worker2(object):
def __init__(self,in_table):
self.conn = medareda_worker_lib.get_conn()
self.in_table = in_table
self.server_id = self._getServerId()
self.setStatusIdle()
def _getServerId(self):
hostname = os.uname()[1]
server_id = hostname.split('-')[1]
print server_id
#return 4
return int(server_id)
def setStatusIdle(self):
self._setStatus('idle')
def setStatusWorking(self):
self._setStatus('working')
def _setStatus(self, status):
cur = self.conn.cursor()
now = datetime.datetime.now()
sql = "UPDATE Worker SET status = '%s', since = '%s' WHERE serverId = %s" %(status,now,self.server_id)
cur.execute(sql)
#print sql
cur.execute(sql)
self.conn.commit()
self.conn.close()
def doWork(self):
print 'process in data'
# ( if status == deleting, return )
# get amount of work to to
# while work todo
# set worker status to work
# get work
# process work
# set status to idle
def testWorker2():
in_table = 'iPrice'
w = Worker2(in_table)
print w.server_id
w.doWork()
testWorker2() | mit | Python |
da37c4c7504568ede5ce12487887eceb4dd1e5c4 | Update djrill/mail/__init__.py | barseghyanartur/Djrill,janusnic/Djrill,idlweb/Djrill,janusnic/Djrill,brack3t/Djrill,idlweb/Djrill,barseghyanartur/Djrill | djrill/mail/__init__.py | djrill/mail/__init__.py | from django.core.exceptions import ImproperlyConfigured
from django.core.mail import EmailMultiAlternatives
class DjrillMessage(EmailMultiAlternatives):
alternative_subtype = "mandrill"
def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
connection=None, attachments=None, headers=None, alternatives=None,
cc=None, from_name=None, tags=None, track_opens=True,
track_clicks=True, preserve_recipients=False):
super(DjrillMessage, self).__init__(subject, body, from_email, to, bcc,
connection, attachments, headers, alternatives, cc)
self.from_name = from_name
self.tags = self._set_mandrill_tags(tags)
self.track_opens = track_opens
self.track_clicks = track_clicks
self.preserve_recipients = preserve_recipients
def _set_mandrill_tags(self, tags):
"""
Check that all tags are below 50 chars and that they do not start
with an underscore.
Raise ImproperlyConfigured if an underscore tag is passed in to
alert the user. Any tag over 50 chars is left out of the list.
"""
tag_list = []
for tag in tags:
if len(tag) <= 50 and not tag.startswith("_"):
tag_list.append(tag)
elif tag.startswith("_"):
raise ImproperlyConfigured(
"Tags starting with an underscore are reserved for "
"internal use and will cause errors with Mandill's API")
return tag_list
| from django.core.exceptions import ImproperlyConfigured
from django.core.mail import EmailMultiAlternatives
class DjrillMessage(EmailMultiAlternatives):
alternative_subtype = "mandrill"
def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
connection=None, attachments=None, headers=None, alternatives=None,
cc=None, from_name=None, tags=None, track_opens=True,
track_clicks=True, preserve_recipients=True):
super(DjrillMessage, self).__init__(subject, body, from_email, to, bcc,
connection, attachments, headers, alternatives, cc)
self.from_name = from_name
self.tags = self._set_mandrill_tags(tags)
self.track_opens = track_opens
self.track_clicks = track_clicks
self.preserve_recipients = preserve_recipients
def _set_mandrill_tags(self, tags):
"""
Check that all tags are below 50 chars and that they do not start
with an underscore.
Raise ImproperlyConfigured if an underscore tag is passed in to
alert the user. Any tag over 50 chars is left out of the list.
"""
tag_list = []
for tag in tags:
if len(tag) <= 50 and not tag.startswith("_"):
tag_list.append(tag)
elif tag.startswith("_"):
raise ImproperlyConfigured(
"Tags starting with an underscore are reserved for "
"internal use and will cause errors with Mandill's API")
return tag_list
| bsd-3-clause | Python |
4bcf35efcfc751a1c337fdcf50d23d9d06549717 | Fix typo in doc string | pgovers/oscar-wagtail-demo,pgovers/oscar-wagtail-demo | demo/apps/catalogue/models.py | demo/apps/catalogue/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore.models import Page
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(
verbose_name=_('name'),
max_length=255,
help_text=_("Category name")
)
from oscar.apps.catalogue.models import * # noqa
| from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore.models import Page
class Category(Page):
"""
user oscars category as a wagtail Page.
this works becuase they both use treebeard
"""
name = models.CharField(
verbose_name=_('name'),
max_length=255,
help_text=_("Category name")
)
from oscar.apps.catalogue.models import * # noqa
| mit | Python |
a33b05785b2b2710670b9ad974239c9cb0cffa14 | Add TwrTimelineError | tchx84/twitter-gobject | twitter/twr_error.py | twitter/twr_error.py | #!/usr/bin/env python
#
# Copyright (c) 2013 Martin Abente Lahaye. - tch@sugarlabs.org
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
class TwrStatusNotCreated(Exception):
pass
class TwrStatusAlreadyCreated(Exception):
pass
class TwrStatusNotFound(Exception):
pass
class TwrStatusError(Exception):
pass
class TwrTimelineError(Exception):
pass
| #!/usr/bin/env python
#
# Copyright (c) 2013 Martin Abente Lahaye. - tch@sugarlabs.org
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
class TwrStatusNotCreated(Exception):
pass
class TwrStatusAlreadyCreated(Exception):
pass
class TwrStatusNotFound(Exception):
pass
class TwrStatusError(Exception):
pass
| lgpl-2.1 | Python |
96cdeac4dbb1ac2e3863cbc98438750701198563 | fix imports for __init__ (CircuitListenerMixin misisng) | meejah/txtorcon,david415/txtorcon,meejah/txtorcon,david415/txtorcon | txtorcon/__init__.py | txtorcon/__init__.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import with_statement
from txtorcon._metadata import __version__, __author__, __contact__
from txtorcon._metadata import __license__, __copyright__, __url__
from txtorcon.router import Router
from txtorcon.circuit import Circuit
from txtorcon.circuit import build_timeout_circuit
from txtorcon.circuit import CircuitBuildTimedOutError
from txtorcon.stream import Stream
from txtorcon.torcontrolprotocol import TorControlProtocol
from txtorcon.torcontrolprotocol import TorProtocolError
from txtorcon.torcontrolprotocol import TorProtocolFactory
from txtorcon.torcontrolprotocol import DEFAULT_VALUE
from txtorcon.torstate import TorState
from txtorcon.torstate import build_tor_connection
from txtorcon.torstate import build_local_tor_connection
from txtorcon.torconfig import TorConfig
from txtorcon.torconfig import HiddenService
from txtorcon.torconfig import EphemeralHiddenService
from txtorcon.torconfig import TorProcessProtocol
from txtorcon.torconfig import launch_tor
from txtorcon.torconfig import TorNotFound
from txtorcon.torinfo import TorInfo
from txtorcon.addrmap import AddrMap
from txtorcon.endpoints import TorOnionAddress
from txtorcon.endpoints import TorOnionListeningPort
from txtorcon.endpoints import TCPHiddenServiceEndpoint
from txtorcon.endpoints import TCPHiddenServiceEndpointParser
from txtorcon.endpoints import TorClientEndpoint
from txtorcon.endpoints import TorClientEndpointStringParser
from txtorcon.endpoints import IHiddenService, IProgressProvider
from txtorcon.endpoints import get_global_tor
from . import util
from . import interface
from txtorcon.interface import (
ITorControlProtocol,
IStreamListener, IStreamAttacher, StreamListenerMixin,
ICircuitContainer, ICircuitListener, CircuitListenerMixin,
IRouterContainer, IAddrListener,
)
__all__ = [
"Router",
"Circuit",
"Stream",
"TorControlProtocol", "TorProtocolError", "TorProtocolFactory",
"TorState", "DEFAULT_VALUE",
"TorInfo",
"build_tor_connection", "build_local_tor_connection", "launch_tor",
"TorNotFound", "TorConfig", "HiddenService", "EphemeralHiddenService",
"TorProcessProtocol",
"TorInfo",
"TCPHiddenServiceEndpoint", "TCPHiddenServiceEndpointParser",
"TorClientEndpoint", "TorClientEndpointStringParser",
"IHiddenService", "IProgressProvider",
"TorOnionAddress", "TorOnionListeningPort",
"get_global_tor",
"build_timeout_circuit",
"CircuitBuildTimedOutError",
"AddrMap",
"util", "interface",
"ITorControlProtocol",
"IStreamListener", "IStreamAttacher", "StreamListenerMixin",
"ICircuitContainer", "ICircuitListener", "CircuitListenerMixin",
"IRouterContainer", "IAddrListener", "IProgressProvider",
"__version__", "__author__", "__contact__",
"__license__", "__copyright__", "__url__",
]
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import with_statement
from txtorcon._metadata import __version__, __author__, __contact__
from txtorcon._metadata import __license__, __copyright__, __url__
from txtorcon.router import Router
from txtorcon.circuit import Circuit
from txtorcon.circuit import build_timeout_circuit
from txtorcon.circuit import CircuitBuildTimedOutError
from txtorcon.stream import Stream
from txtorcon.torcontrolprotocol import TorControlProtocol
from txtorcon.torcontrolprotocol import TorProtocolError
from txtorcon.torcontrolprotocol import TorProtocolFactory
from txtorcon.torcontrolprotocol import DEFAULT_VALUE
from txtorcon.torstate import TorState
from txtorcon.torstate import build_tor_connection
from txtorcon.torstate import build_local_tor_connection
from txtorcon.torconfig import TorConfig
from txtorcon.torconfig import HiddenService
from txtorcon.torconfig import EphemeralHiddenService
from txtorcon.torconfig import TorProcessProtocol
from txtorcon.torconfig import launch_tor
from txtorcon.torconfig import TorNotFound
from txtorcon.torinfo import TorInfo
from txtorcon.addrmap import AddrMap
from txtorcon.endpoints import TorOnionAddress
from txtorcon.endpoints import TorOnionListeningPort
from txtorcon.endpoints import TCPHiddenServiceEndpoint
from txtorcon.endpoints import TCPHiddenServiceEndpointParser
from txtorcon.endpoints import TorClientEndpoint
from txtorcon.endpoints import TorClientEndpointStringParser
from txtorcon.endpoints import IHiddenService, IProgressProvider
from txtorcon.endpoints import get_global_tor
from . import util
from . import interface
from txtorcon.interface import (
ITorControlProtocol,
IStreamListener, IStreamAttacher, StreamListenerMixin,
ICircuitContainer, ICircuitListener,
IRouterContainer, IAddrListener,
)
__all__ = [
"Router",
"Circuit",
"Stream",
"TorControlProtocol", "TorProtocolError", "TorProtocolFactory",
"TorState", "DEFAULT_VALUE",
"TorInfo",
"build_tor_connection", "build_local_tor_connection", "launch_tor",
"TorNotFound", "TorConfig", "HiddenService", "EphemeralHiddenService",
"TorProcessProtocol",
"TorInfo",
"TCPHiddenServiceEndpoint", "TCPHiddenServiceEndpointParser",
"TorClientEndpoint", "TorClientEndpointStringParser",
"IHiddenService", "IProgressProvider",
"TorOnionAddress", "TorOnionListeningPort",
"get_global_tor",
"build_timeout_circuit",
"CircuitBuildTimedOutError",
"AddrMap",
"util", "interface",
"ITorControlProtocol",
"IStreamListener", "IStreamAttacher", "StreamListenerMixin",
"ICircuitContainer", "ICircuitListener", "CircuitListenerMixin",
"IRouterContainer", "IAddrListener", "IProgressProvider",
"__version__", "__author__", "__contact__",
"__license__", "__copyright__", "__url__",
]
| mit | Python |
b37bbf65e1211f1c398e43072c27a1a20c409d6b | Update models.py | IT-PM-OpenAdaptronik/Webapp,IT-PM-OpenAdaptronik/Webapp,IT-PM-OpenAdaptronik/Webapp | apps/projects/models.py | apps/projects/models.py | from django.db import models
from django.conf import settings
'''crates model Projects with
userId as ForeignKey from User
name as CharField
category as ForeignKey from Category
subcategory as ForeignKey from Subategory
producer as CharField
typ as CharField
note as TextField
'''
class Project(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE,)
name = models.CharField(max_length=100, unique=True)
category = models.ForeignKey('Category', on_delete=models.CASCADE, related_name='projects_category_set')
subcategory = models.ForeignKey('Category', on_delete=models.CASCADE, related_name='projects_subcategory_set')
manufacturer = models.CharField(max_length=100)
typ = models.CharField(max_length=100)
description = models.TextField(max_length=500)
created = models.DateTimeField
updated = models.DateTimeField
'''creates model Category with
categoryID as PrimaryKey
name as CharField
parent as Foreignkey from itself'''
class Category(models.Model):
category = models.IntegerField(primary_key=True)
name = models.CharField(max_length=100, unique=True)
parent = models.ForeignKey('self', on_delete=models.CASCADE,)
class ProjectImage(models.Model):
project_image = models.IntegerField(primary_key=True)
project = models.ForeignKey('Project', on_delete=models.CASCADE,)
image = models.ImageField
| from django.db import models
from django.conf import settings
'''crates model Projects with
userId as ForeignKey from User
name as CharField
category as ForeignKey from Category
subcategory as ForeignKey from Subategory
producer as CharField
typ as CharField
note as TextField
'''
class Project(models.Model):
project = models.IntegerField(primary_key=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE,)
name = models.CharField(max_length=100, unique=True)
category = models.ForeignKey('Category', on_delete=models.CASCADE, related_name='projects_category_set')
subcategory = models.ForeignKey('Category', on_delete=models.CASCADE, related_name='projects_subcategory_set')
manufacturer = models.CharField(max_length=100)
typ = models.CharField(max_length=100)
description = models.TextField(max_length=500)
created = models.DateTimeField
updated = models.DateTimeField
'''creates model Category with
categoryID as PrimaryKey
name as CharField
parent as Foreignkey from itself'''
class Category(models.Model):
categoryID = models.IntegerField(primary_key=True)
name = models.CharField(max_length=100, unique=True)
parent = models.ForeignKey('self', on_delete=models.CASCADE,)
class ProjectImage(models.Model):
project_imageID = models.IntegerField(primary_key=True)
projectID = models.ForeignKey('Project', on_delete=models.CASCADE,)
image = models.ImageField
| mit | Python |
0a69133e44810dd0469555f62ec49eba120e6ecc | Add utility function to convert a language code to a its full name | denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase | apps/storybase/utils.py | apps/storybase/utils.py | """Shared utility functions"""
from django.conf import settings
from django.template.defaultfilters import slugify as django_slugify
from django.utils.translation import ugettext as _
def get_language_name(language_code):
"""Convert a language code into its full (localized) name"""
languages = dict(settings.LANGUAGES)
return _(languages[language_code])
def slugify(value):
"""
Normalizes string, converts to lowercase, removes non-alpha characters,
converts spaces to hyphens, and truncates to 50 characters.
"""
slug = django_slugify(value)
slug = slug[:50]
return slug.rstrip('-')
| """Shared utility functions"""
from django.template.defaultfilters import slugify as django_slugify
def slugify(value):
"""
Normalizes string, converts to lowercase, removes non-alpha characters,
converts spaces to hyphens, and truncates to 50 characters.
"""
slug = django_slugify(value)
slug = slug[:50]
return slug.rstrip('-')
| mit | Python |
193327d17072324cffa6275ea4c2e2a69c2ce7c3 | Use Python 3 supported import | venmo/nose-detecthttp | detecthttp/__init__.py | detecthttp/__init__.py | from ._version import __version__ # noqa
from .plugin import DetectHTTP # noqa
| from ._version import __version__ # noqa
from plugin import DetectHTTP # noqa
| mit | Python |
0e3cad231e3010ad69b92ce9470cf09a5433d9d6 | Fix display of output in Bash kernel | Calysto/metakernel | metakernel_bash/metakernel_bash.py | metakernel_bash/metakernel_bash.py | from __future__ import print_function
from metakernel import MetaKernel
class MetaKernelBash(MetaKernel):
implementation = 'MetaKernel Bash'
implementation_version = '1.0'
language = 'bash'
language_version = '0.1'
banner = "MetaKernel Bash - interact with bash"
language_info = {
'mimetype': 'text/x-bash',
'name': 'bash',
# ------ If different from 'language':
# 'codemirror_mode': {
# "version": 2,
# "name": "ipython"
# }
# 'pygments_lexer': 'language',
# 'version' : "x.y.z",
'file_extension': '.sh',
'help_links': MetaKernel.help_links,
}
kernel_json = {
'argv': [
'python', '-m', 'metakernel_bash', '-f', '{connection_file}'],
'display_name': 'MetaKernel Bash',
'language': 'bash',
'name': 'metakernel_bash'
}
def get_usage(self):
return "This is the bash kernel."
def do_execute_direct(self, code):
if not code.strip():
return
self.log.debug('execute: %s' % code)
shell_magic = self.line_magics['shell']
resp = shell_magic.eval(code.strip())
self.log.debug('execute done')
return resp
def get_completions(self, info):
shell_magic = self.line_magics['shell']
return shell_magic.get_completions(info)
def get_kernel_help_on(self, info, level=0, none_on_fail=False):
code = info['code'].strip()
if not code or len(code.split()) > 1:
if none_on_fail:
return None
else:
return ""
shell_magic = self.line_magics['shell']
return shell_magic.get_help_on(info, level, none_on_fail)
def repr(self, data):
return data
if __name__ == '__main__':
MetaKernelBash.run_as_main()
| from __future__ import print_function
from metakernel import MetaKernel
class MetaKernelBash(MetaKernel):
implementation = 'MetaKernel Bash'
implementation_version = '1.0'
language = 'bash'
language_version = '0.1'
banner = "MetaKernel Bash - interact with bash"
language_info = {
'mimetype': 'text/x-bash',
'name': 'bash',
# ------ If different from 'language':
# 'codemirror_mode': {
# "version": 2,
# "name": "ipython"
# }
# 'pygments_lexer': 'language',
# 'version' : "x.y.z",
'file_extension': '.sh',
'help_links': MetaKernel.help_links,
}
kernel_json = {
'argv': [
'python', '-m', 'metakernel_bash', '-f', '{connection_file}'],
'display_name': 'MetaKernel Bash',
'language': 'bash',
'name': 'metakernel_bash'
}
def get_usage(self):
return "This is the bash kernel."
def do_execute_direct(self, code):
if not code.strip():
return
self.log.debug('execute: %s' % code)
shell_magic = self.line_magics['shell']
shell_magic.eval(code.strip())
self.log.debug('execute done')
def get_completions(self, info):
shell_magic = self.line_magics['shell']
return shell_magic.get_completions(info)
def get_kernel_help_on(self, info, level=0, none_on_fail=False):
code = info['code'].strip()
if not code or len(code.split()) > 1:
if none_on_fail:
return None
else:
return ""
shell_magic = self.line_magics['shell']
return shell_magic.get_help_on(info, level, none_on_fail)
def repr(self, data):
return data
if __name__ == '__main__':
MetaKernelBash.run_as_main()
| bsd-3-clause | Python |
052a14ad0af6ed0ac8eb44d2ff7a208d101aff7b | disable refreshing the cache | crate-archive/crate-site,crateio/crate.pypi,crate-archive/crate-site | crate_project/apps/pypi/models.py | crate_project/apps/pypi/models.py | from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from model_utils import Choices
from model_utils.models import TimeStampedModel
class PyPIMirrorPage(TimeStampedModel):
package = models.ForeignKey("packages.Package", unique=True)
content = models.TextField()
def __unicode__(self):
return self.package.name
def get_relative_url(self, current_url):
absolute_url_split = reverse("pypi_package_detail", kwargs={"slug": self.package.name}).split("/")
current_url_split = current_url.split("/")
relative_url_split = absolute_url_split[:]
for i, part in enumerate(absolute_url_split):
if len(current_url_split) > i and part == current_url_split[i]:
relative_url_split = relative_url_split[1:]
return "/".join(relative_url_split)
class PyPIServerSigPage(TimeStampedModel):
package = models.ForeignKey("packages.Package")
content = models.TextField()
def __unicode__(self):
return self.package.name
class PyPIIndexPage(TimeStampedModel):
content = models.TextField()
def __unicode__(self):
return "PyPI Index Page: %s" % self.created.isoformat()
class Log(TimeStampedModel):
TYPES = Choices(
("sync", "Synchronize Mirror"),
("package", "Synchronize Package"),
("version", "Synchronize Package Version"),
)
type = models.CharField(max_length=50, choices=TYPES)
index = models.CharField(max_length=255)
message = models.TextField(blank=True)
class Meta:
ordering = ["-created"]
def __unicode__(self):
return self.message
class ChangeLog(TimeStampedModel):
package = models.CharField(max_length=150)
version = models.CharField(max_length=150, null=True, blank=True)
timestamp = models.DateTimeField()
action = models.TextField(blank=True, null=True)
handled = models.BooleanField(default=False)
class Meta:
ordering = ["-timestamp"]
def __unicode__(self):
return u"%(package)s %(version)s %(timestamp)s %(action)s" % {
"package": self.package,
"version": self.version,
"timestamp": self.timestamp,
"action": self.action,
}
# @receiver(post_save, sender=PyPIMirrorPage)
# @receiver(post_delete, sender=PyPIMirrorPage)
def regenerate_simple_index(sender, **kwargs):
from pypi.tasks import refresh_pypi_package_index_cache
refresh_pypi_package_index_cache.delay()
| from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from model_utils import Choices
from model_utils.models import TimeStampedModel
class PyPIMirrorPage(TimeStampedModel):
package = models.ForeignKey("packages.Package", unique=True)
content = models.TextField()
def __unicode__(self):
return self.package.name
def get_relative_url(self, current_url):
absolute_url_split = reverse("pypi_package_detail", kwargs={"slug": self.package.name}).split("/")
current_url_split = current_url.split("/")
relative_url_split = absolute_url_split[:]
for i, part in enumerate(absolute_url_split):
if len(current_url_split) > i and part == current_url_split[i]:
relative_url_split = relative_url_split[1:]
return "/".join(relative_url_split)
class PyPIServerSigPage(TimeStampedModel):
package = models.ForeignKey("packages.Package")
content = models.TextField()
def __unicode__(self):
return self.package.name
class PyPIIndexPage(TimeStampedModel):
content = models.TextField()
def __unicode__(self):
return "PyPI Index Page: %s" % self.created.isoformat()
class Log(TimeStampedModel):
TYPES = Choices(
("sync", "Synchronize Mirror"),
("package", "Synchronize Package"),
("version", "Synchronize Package Version"),
)
type = models.CharField(max_length=50, choices=TYPES)
index = models.CharField(max_length=255)
message = models.TextField(blank=True)
class Meta:
ordering = ["-created"]
def __unicode__(self):
return self.message
class ChangeLog(TimeStampedModel):
package = models.CharField(max_length=150)
version = models.CharField(max_length=150, null=True, blank=True)
timestamp = models.DateTimeField()
action = models.TextField(blank=True, null=True)
handled = models.BooleanField(default=False)
class Meta:
ordering = ["-timestamp"]
def __unicode__(self):
return u"%(package)s %(version)s %(timestamp)s %(action)s" % {
"package": self.package,
"version": self.version,
"timestamp": self.timestamp,
"action": self.action,
}
@receiver(post_save, sender=PyPIMirrorPage)
@receiver(post_delete, sender=PyPIMirrorPage)
def regenerate_simple_index(sender, **kwargs):
from pypi.tasks import refresh_pypi_package_index_cache
refresh_pypi_package_index_cache.delay()
| bsd-2-clause | Python |
f86bf4a7f3b194cd75ca8b58bd0960f18c23ff8d | Handle exceptions with generating thumbnails on demand (return "") | ZG-Tennis/django-cropduster,ZG-Tennis/django-cropduster,ZG-Tennis/django-cropduster | cropduster/templatetags/images.py | cropduster/templatetags/images.py | from coffin import template
from coffin.template.loader import get_template
register = template.Library()
from django.conf import settings
from cropduster.models import Size
from cropduster.models import AUTO_SIZE
from os.path import exists
CROPDUSTER_CROP_ONLOAD = getattr(settings, "CROPDUSTER_CROP_ONLOAD", True)
CROPDUSTER_KITTY_MODE = getattr(settings, "CROPDUSTER_KITTY_MODE", False)
# preload a map of image sizes so it doesn"t make a DB call for each templatetag use
IMAGE_SIZE_MAP = {}
for size in Size.objects.all():
IMAGE_SIZE_MAP[(size.size_set_id, size.slug)] = size
@register.object
def get_image(image, size_name=None, template_name="image.html", retina=False, **kwargs):
""" Templatetag to get the HTML for an image from a cropduster image object """
if image:
if CROPDUSTER_CROP_ONLOAD:
# If set, will check for thumbnail existence
# if not there, will create the thumb based on predefiend crop/size settings
thumb_path = image.thumbnail_path(size_name)
if not exists(thumb_path) and exists(image.image.path):
try:
size = image.size_set.size_set.get(slug=size_name)
except Size.DoesNotExist:
return ""
try:
image.create_thumbnail(size, force_crop=True)
except:
return ""
if retina:
image_url = image.retina_thumbnail_url(size_name)
else:
image_url = image.thumbnail_url(size_name)
if not image_url:
return ""
try:
image_size = IMAGE_SIZE_MAP[(image.size_set_id, size_name)]
except KeyError:
return ""
# Set all the args that get passed to the template
kwargs["image_url"] = image_url
if hasattr(image_size, "auto_size") and image_size.auto_size != AUTO_SIZE:
kwargs["width"] = image_size.width if hasattr(image_size, "width") else ""
kwargs["height"] = image_size.height if hasattr(image_size, "height") else ""
if CROPDUSTER_KITTY_MODE:
kwargs["image_url"] = "http://placekitten.com/%s/%s" % (kwargs["width"], kwargs["height"])
kwargs["size_name"] = size_name
kwargs["attribution"] = image.attribution
if hasattr(image, "caption"): kwargs["alt"] = image.caption
if "title" not in kwargs: kwargs["title"] = kwargs["alt"]
tmpl = get_template("templatetags/" + template_name)
context = template.Context(kwargs)
return tmpl.render(context)
else:
return ""
| from coffin import template
from coffin.template.loader import get_template
register = template.Library()
from django.conf import settings
from cropduster.models import Size
from cropduster.models import AUTO_SIZE
from os.path import exists
CROPDUSTER_CROP_ONLOAD = getattr(settings, "CROPDUSTER_CROP_ONLOAD", True)
CROPDUSTER_KITTY_MODE = getattr(settings, "CROPDUSTER_KITTY_MODE", False)
# preload a map of image sizes so it doesn"t make a DB call for each templatetag use
IMAGE_SIZE_MAP = {}
for size in Size.objects.all():
IMAGE_SIZE_MAP[(size.size_set_id, size.slug)] = size
@register.object
def get_image(image, size_name=None, template_name="image.html", retina=False, **kwargs):
""" Templatetag to get the HTML for an image from a cropduster image object """
if image:
if CROPDUSTER_CROP_ONLOAD:
# If set, will check for thumbnail existence
# if not there, will create the thumb based on predefiend crop/size settings
thumb_path = image.thumbnail_path(size_name)
if not exists(thumb_path) and exists(image.image.path):
try:
size = image.size_set.size_set.get(slug=size_name)
except Size.DoesNotExist:
return ""
image.create_thumbnail(size, force_crop=True)
if retina:
image_url = image.retina_thumbnail_url(size_name)
else:
image_url = image.thumbnail_url(size_name)
if not image_url:
return ""
try:
image_size = IMAGE_SIZE_MAP[(image.size_set_id, size_name)]
except KeyError:
return ""
# Set all the args that get passed to the template
kwargs["image_url"] = image_url
if hasattr(image_size, "auto_size") and image_size.auto_size != AUTO_SIZE:
kwargs["width"] = image_size.width if hasattr(image_size, "width") else ""
kwargs["height"] = image_size.height if hasattr(image_size, "height") else ""
if CROPDUSTER_KITTY_MODE:
kwargs["image_url"] = "http://placekitten.com/%s/%s" % (kwargs["width"], kwargs["height"])
kwargs["size_name"] = size_name
kwargs["attribution"] = image.attribution
if hasattr(image, "caption"): kwargs["alt"] = image.caption
if "title" not in kwargs: kwargs["title"] = kwargs["alt"]
tmpl = get_template("templatetags/" + template_name)
context = template.Context(kwargs)
return tmpl.render(context)
else:
return ""
| bsd-2-clause | Python |
1d3e0822243d2a48d756ae8371795342767e4338 | remove unnecessary constructor | pmbarrett314/curses-menu | cursesmenu/items/external_item.py | cursesmenu/items/external_item.py | import curses
import cursesmenu.curses_menu
from cursesmenu.curses_menu import MenuItem
class ExternalItem(MenuItem):
"""
A base class for items that need to do stuff on the console outside of curses mode.
Sets the terminal back to standard mode until the action is done.
Should probably be subclassed.
"""
def set_up(self):
"""
This class overrides this method
"""
self.menu.pause()
curses.def_prog_mode()
cursesmenu.curses_menu.clear_terminal()
self.menu.clear_screen()
def clean_up(self):
"""
This class overrides this method
"""
self.menu.clear_screen()
curses.reset_prog_mode()
curses.curs_set(1) # reset doesn't do this right
curses.curs_set(0)
self.menu.resume()
| import curses
import cursesmenu.curses_menu
from cursesmenu.curses_menu import MenuItem
class ExternalItem(MenuItem):
"""
A base class for items that need to do stuff on the console outside of curses mode.
Sets the terminal back to standard mode until the action is done.
Should probably be subclassed.
"""
def __init__(self, text, menu=None, should_exit=False):
# Here so Sphinx doesn't copy extraneous info from the superclass's docstring
super(ExternalItem, self).__init__(
text=text,
menu=menu,
should_exit=should_exit,
)
def set_up(self):
"""
This class overrides this method
"""
self.menu.pause()
curses.def_prog_mode()
cursesmenu.curses_menu.clear_terminal()
self.menu.clear_screen()
def clean_up(self):
"""
This class overrides this method
"""
self.menu.clear_screen()
curses.reset_prog_mode()
curses.curs_set(1) # reset doesn't do this right
curses.curs_set(0)
self.menu.resume()
| mit | Python |
62b1c49c67c72c36d4177c657df49a4700586c06 | Change the url patters from python_social_auth to social_django | tosp/djangoTemplate,tosp/djangoTemplate | djangoTemplate/urls.py | djangoTemplate/urls.py | """djangoTemplate URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url('', include('base.urls')),
url('', include('social_django.urls', namespace='social')),
url(r'^admin/', admin.site.urls),
url(r'^tosp_auth/', include('tosp_auth.urls'))
]
| """djangoTemplate URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url('', include('base.urls')),
url('', include('social.apps.django_app.urls', namespace='social')),
url(r'^admin/', admin.site.urls),
url(r'^tosp_auth/', include('tosp_auth.urls'))
]
| mit | Python |
e14599ce03c7c326479655ead88246047eee7c16 | bump version | benjaoming/django-nyt,benjaoming/django-nyt | django_nyt/__init__.py | django_nyt/__init__.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
_disable_notifications = False
VERSION = "0.9.8"
def notify(*args, **kwargs):
"""
DEPRECATED - please access django_nyt.utils.notify
"""
from django_nyt.utils import notify
return notify(*args, **kwargs)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
_disable_notifications = False
VERSION = "0.9.7.3"
def notify(*args, **kwargs):
"""
DEPRECATED - please access django_nyt.utils.notify
"""
from django_nyt.utils import notify
return notify(*args, **kwargs)
| apache-2.0 | Python |
62f4c6b7d24176284054b13c4e1e9b6d631c7b42 | Update basic test Now uses the new format by @BookOwl. | PySlither/Slither,PySlither/Slither | basicTest.py | basicTest.py | import slither, pygame, time
snakey = slither.Sprite()
snakey.setCostumeByName("costume0")
SoExcited = slither.Sprite()
SoExcited.addCostume("SoExcited.png", "avatar")
SoExcited.setCostumeByNumber(0)
SoExcited.goTo(300, 300)
SoExcited.setScaleTo(0.33)
slither.slitherStage.setColor(40, 222, 40)
screen = slither.setup() # Begin slither
def run_a_frame():
snakey.changeXBy(1)
SoExcited.changeDirectionBy(1)
slither.runMainLoop(run_a_frame)
| import slither, pygame, time
snakey = slither.Sprite()
snakey.setCostumeByName("costume0")
SoExcited = slither.Sprite()
SoExcited.addCostume("SoExcited.png", "avatar")
SoExcited.setCostumeByNumber(0)
SoExcited.goTo(300, 300)
SoExcited.setScaleTo(0.33)
slither.slitherStage.setColor(40, 222, 40)
screen = slither.setup() # Begin slither
continueLoop = True
while continueLoop:
slither.blit(screen) # Display
snakey.changeXBy(1)
SoExcited.changeDirectionBy(1)
# Handle quitting
for event in pygame.event.get():
if event.type == pygame.QUIT:
continueLoop = False
time.sleep(0.01)
| mit | Python |
6c8cc6d25d078bd0ce6000e650338017703a76cb | mark no sleep etag test as xfail if it fails | pdav/khal,sdx23/khal,geier/khal,hobarrera/khal,pimutils/khal | tests/vdir_test.py | tests/vdir_test.py | # Copyright (c) 2013-2016 Christian Geier et al.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import time
import pytest
from khal.khalendar import vdir
@pytest.mark.xfail
def test_etag(tmpdir):
fpath = os.path.join(str(tmpdir), 'foo')
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
old_etag = vdir.get_etag_from_file(fpath)
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
new_etag = vdir.get_etag_from_file(fpath)
try:
assert old_etag != new_etag
except AssertionError:
pytest.xfail(
"Do we need to sleep?"
)
def test_etag_sync(tmpdir):
fpath = os.path.join(str(tmpdir), 'foo')
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
os.sync()
old_etag = vdir.get_etag_from_file(fpath)
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
new_etag = vdir.get_etag_from_file(fpath)
assert old_etag != new_etag
def test_etag_sleep(tmpdir):
fpath = os.path.join(str(tmpdir), 'foo')
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
old_etag = vdir.get_etag_from_file(fpath)
time.sleep(0.1)
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
new_etag = vdir.get_etag_from_file(fpath)
assert old_etag != new_etag
| # Copyright (c) 2013-2016 Christian Geier et al.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import time
import pytest
from khal.khalendar import vdir
@pytest.mark.xfail
def test_etag(tmpdir):
fpath = os.path.join(str(tmpdir), 'foo')
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
old_etag = vdir.get_etag_from_file(fpath)
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
new_etag = vdir.get_etag_from_file(fpath)
assert old_etag != new_etag
def test_etag_sync(tmpdir):
fpath = os.path.join(str(tmpdir), 'foo')
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
os.sync()
old_etag = vdir.get_etag_from_file(fpath)
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
new_etag = vdir.get_etag_from_file(fpath)
assert old_etag != new_etag
def test_etag_sleep(tmpdir):
fpath = os.path.join(str(tmpdir), 'foo')
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
old_etag = vdir.get_etag_from_file(fpath)
time.sleep(0.1)
file_ = open(fpath, 'w')
file_.write('foo')
file_.close()
new_etag = vdir.get_etag_from_file(fpath)
assert old_etag != new_etag
| mit | Python |
9492ab707d92693559fa50922fa15f9bff096f02 | Bump to 0.2.3 | alexmojaki/snoop,alexmojaki/snoop | snoop/__init__.py | snoop/__init__.py | '''
Usage:
import snoop
@snoop
def your_function(x):
...
A log will be written to stderr showing the lines executed and variables
changed in the decorated function.
For more information, see https://github.com/alexmojaki/snoop
'''
from .configuration import install, Config
from .variables import Attrs, Exploding, Indices, Keys
import collections
import sys
__VersionInfo = collections.namedtuple('VersionInfo',
('major', 'minor', 'micro'))
__version__ = '0.2.3'
__version_info__ = __VersionInfo(*(map(int, __version__.split('.'))))
config = Config()
snoop = config.snoop
pp = config.pp
spy = config.spy
install = staticmethod(install)
sys.modules['snoop'] = snoop # make the module callable
# Add all the attributes to the 'module' so things can be imported normally
for key, value in list(globals().items()):
if key in 'collections sys __VersionInfo key value config':
# Avoid polluting the namespace
continue
setattr(snoop, key, value)
| '''
Usage:
import snoop
@snoop
def your_function(x):
...
A log will be written to stderr showing the lines executed and variables
changed in the decorated function.
For more information, see https://github.com/alexmojaki/snoop
'''
from .configuration import install, Config
from .variables import Attrs, Exploding, Indices, Keys
import collections
import sys
__VersionInfo = collections.namedtuple('VersionInfo',
('major', 'minor', 'micro'))
__version__ = '0.2.2'
__version_info__ = __VersionInfo(*(map(int, __version__.split('.'))))
config = Config()
snoop = config.snoop
pp = config.pp
spy = config.spy
install = staticmethod(install)
sys.modules['snoop'] = snoop # make the module callable
# Add all the attributes to the 'module' so things can be imported normally
for key, value in list(globals().items()):
if key in 'collections sys __VersionInfo key value config':
# Avoid polluting the namespace
continue
setattr(snoop, key, value)
| mit | Python |
99aaf537197b8f652780cbbb16fb2090e448ecc5 | increment minor version to 1.8.0 | tony/tmuxp | tmuxp/__about__.py | tmuxp/__about__.py | __title__ = 'tmuxp'
__package_name__ = 'tmuxp'
__version__ = '1.8.0'
__description__ = 'tmux session manager'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/tmuxp'
__docs__ = 'https://tmuxp.git-pull.com'
__tracker__ = 'https://github.com/tmux-python/tmuxp/issues'
__pypi__ = 'https://pypi.org/project/tmuxp/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013- Tony Narlock'
| __title__ = 'tmuxp'
__package_name__ = 'tmuxp'
__version__ = '1.7.0a3'
__description__ = 'tmux session manager'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/tmuxp'
__docs__ = 'https://tmuxp.git-pull.com'
__tracker__ = 'https://github.com/tmux-python/tmuxp/issues'
__pypi__ = 'https://pypi.org/project/tmuxp/'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013- Tony Narlock'
| bsd-3-clause | Python |
da5555301e1d837a8ca39a48913f825e772bd2de | Update to Milestone v0.4 complete. Addresses #6 & #7 | abhinavbom/Threat-Intelligence-Hunter | bin/md5vt.py | bin/md5vt.py | #stdlib imports
import json
import argparse
import sys
import requests
#local imports
from bin.parse import *
#Add your public API key before starting.
api = '100e582a15884a9c5cc37e298766065695e551fb1fc88ee05eadc85eacc3b61e'
base = 'https://www.virustotal.com/vtapi/v2/'
if api == '':
print "No API key provided. Please add your VirusTotal public API key to /bin/md5vt.py"
sys.exit(1)
def vt_md5(md5):
c=0
url = base + "file/report"
#data = urllib.urlencode(param)
#result = urllib2.urlopen(url,data)
print "Connecting to Virustotal"
while c < len(md5):
param = {'resource':md5[c],'apikey':api}
r = requests.get(url,
headers=create_basic_headers(),
proxies={'http': HTTP_PROXY, 'https': HTTPS_PROXY},
params=param)
data = r.json()
#print data
if data['response_code'] == 0:
print "\n\tResults for MD5: ", data['resource'], " -- Not Found in VT"
print data['verbose_msg']
#print r.json()
else:
print "\n\tResults for MD5: ",data['resource'],"\n\n\tDetected by: ",data['positives'],'/',data['total'],'\n'
c+=1
def vt_ip(ip):
c=0
url = base + "ip-address/report"
#data = urllib.urlencode(param)
#result = urllib2.urlopen(url,data)
print "Connecting to Virustotal"
while c < len(ip):
print "looking for IP", ip[c]
param = {'ip':ip[c],'apikey':api}
r = requests.get(url,
headers=create_basic_headers(),
proxies={'http': HTTP_PROXY, 'https': HTTPS_PROXY},
params=param)
data = r.json()
#print data
if data['response_code'] == 0:
print ip + "---Not found in VT"
elif data['response_code'] == -1:
print "Invalid IP address"
#print data
elif data['response_code'] == 1:
#print data
if 'detected_communicating_samples' in data :
for each in data['detected_communicating_samples']:
print "\n\tDetected: ",each['positives'],'/',each['total']
else:
print "\nIP is not found in VT, but here is some info\n"
print "Owner: ",data['as_owner']
print "Country: ", data['country']
c+=1
| #stdlib imports
import json
import argparse
import sys
import requests
#local imports
from bin.parse import *
#Add your public API key before starting.
api = ''
base = 'https://www.virustotal.com/vtapi/v2/'
if api == '':
print "No API key provided. Please add your VirusTotal public API key to /bin/md5vt.py"
sys.exit(1)
def vt_md5(md5):
param = {'resource':md5,'apikey':api}
url = base + "file/report"
#data = urllib.urlencode(param)
#result = urllib2.urlopen(url,data)
print "Connecting to Virustotal"
r = requests.get(url,
headers=create_basic_headers(),
proxies={'http': HTTP_PROXY, 'https': HTTPS_PROXY},
params=param)
data = r.json()
if data['response_code'] == 0:
print md5 + " -- Not Found in VT"
return 0
#print r.json()
print "\n\tResults for MD5: ",md5,"\n\n\tDetected by: ",data['positives'],'/',data['total'],'\n'
def vt_ip(ip):
param = {'ip':ip,'apikey':api}
url = base + "ip-address/report"
#data = urllib.urlencode(param)
#result = urllib2.urlopen(url,data)
print "Connecting to Virustotal"
r = requests.get(url,
headers=create_basic_headers(),
proxies={'http': HTTP_PROXY, 'https': HTTPS_PROXY},
params=param)
data = r.json()
if data['response_code'] == 0:
print ip + "---Not found in VT"
return 0
elif data['response_code'] == -1:
print "Invalid IP address"
return 0
#print data
elif data['response_code'] == 1:
#print data
for each in data['detected_communicating_samples']:
print "\n\tDetected: ",each['positives'],'/',each['total']
| mit | Python |
adfe03b431f13d6982fdb7a85e57f52d765e54f2 | handle all errors coming from an attempt to connect | ceph/ceph-deploy,ghxandsky/ceph-deploy,jumpstarter-io/ceph-deploy,branto1/ceph-deploy,Vicente-Cheng/ceph-deploy,Vicente-Cheng/ceph-deploy,ddiss/ceph-deploy,trhoden/ceph-deploy,SUSE/ceph-deploy,shenhequnying/ceph-deploy,alfredodeza/ceph-deploy,ktdreyer/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,codenrhoden/ceph-deploy,ceph/ceph-deploy,zhouyuan/ceph-deploy,jumpstarter-io/ceph-deploy,isyippee/ceph-deploy,ktdreyer/ceph-deploy,rtulke/ceph-deploy,ghxandsky/ceph-deploy,imzhulei/ceph-deploy,ddiss/ceph-deploy,rtulke/ceph-deploy,shenhequnying/ceph-deploy,SUSE/ceph-deploy,branto1/ceph-deploy,alfredodeza/ceph-deploy,zhouyuan/ceph-deploy,isyippee/ceph-deploy,trhoden/ceph-deploy,osynge/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,osynge/ceph-deploy,codenrhoden/ceph-deploy,imzhulei/ceph-deploy | ceph_deploy/connection.py | ceph_deploy/connection.py | from ceph_deploy.lib.remoto import Connection
from sudo_pushy import needs_sudo # TODO move this to utils once pushy is out
def get_connection(hostname, logger):
"""
A very simple helper, meant to return a connection
that will know about the need to use sudo.
"""
try:
return Connection(
hostname,
logger=logger,
sudo=needs_sudo(),
)
except Exception as error:
msg = "connecting to host: %s " % hostname
errors = "resulted in errors: %s %s" % (error.__class__.__name__, error)
raise RuntimeError(msg + errors)
| from ceph_deploy.lib.remoto import Connection
from sudo_pushy import needs_sudo # TODO move this to utils once pushy is out
def get_connection(hostname, logger):
"""
A very simple helper, meant to return a connection
that will know about the need to use sudo.
"""
return Connection(
hostname,
logger=logger,
sudo=needs_sudo(),
)
| mit | Python |
6bdbfe74813744476681074f5578737795d9b728 | remove bootstrap-rgw key via forgetkeys | codenrhoden/ceph-deploy,osynge/ceph-deploy,Vicente-Cheng/ceph-deploy,SUSE/ceph-deploy,SUSE/ceph-deploy,zhouyuan/ceph-deploy,isyippee/ceph-deploy,isyippee/ceph-deploy,trhoden/ceph-deploy,Vicente-Cheng/ceph-deploy,zhouyuan/ceph-deploy,shenhequnying/ceph-deploy,ceph/ceph-deploy,codenrhoden/ceph-deploy,ceph/ceph-deploy,trhoden/ceph-deploy,osynge/ceph-deploy,shenhequnying/ceph-deploy | ceph_deploy/forgetkeys.py | ceph_deploy/forgetkeys.py | import logging
import errno
from .cliutil import priority
LOG = logging.getLogger(__name__)
def forgetkeys(args):
import os
for f in [
'mon',
'client.admin',
'bootstrap-osd',
'bootstrap-mds',
'bootstrap-rgw',
]:
try:
os.unlink('{cluster}.{what}.keyring'.format(
cluster=args.cluster,
what=f,
))
except OSError, e:
if e.errno == errno.ENOENT:
pass
else:
raise
@priority(100)
def make(parser):
"""
Remove authentication keys from the local directory.
"""
parser.set_defaults(
func=forgetkeys,
)
| import logging
import errno
from .cliutil import priority
LOG = logging.getLogger(__name__)
def forgetkeys(args):
import os
for f in [
'mon',
'client.admin',
'bootstrap-osd',
'bootstrap-mds',
]:
try:
os.unlink('{cluster}.{what}.keyring'.format(
cluster=args.cluster,
what=f,
))
except OSError, e:
if e.errno == errno.ENOENT:
pass
else:
raise
@priority(100)
def make(parser):
"""
Remove authentication keys from the local directory.
"""
parser.set_defaults(
func=forgetkeys,
)
| mit | Python |
de0e8c54827d8c79c47f28c93c6951b2921fe8e1 | create a working dev sub-command | ceph/ceph-installer,ceph/ceph-installer,ceph/mariner-installer,ceph/ceph-installer | ceph_installer/cli/dev.py | ceph_installer/cli/dev.py | from os import path
from textwrap import dedent
from tambo import Transport
from ceph_installer import process
from ceph_installer.cli import log
this_dir = path.abspath(path.dirname(__file__))
top_dir = path.dirname(path.dirname(this_dir))
playbook_path = path.join(top_dir, 'deploy/playbooks')
class Dev(object):
help = "Development options"
options = ['--user', '--branch']
_help = dedent("""
Deploying the ceph-installer HTTP service to a remote server with ansible.
This command wraps ansible and certain flags to make it easier to deploy
a development version.
Usage::
ceph-installer dev $HOST
Note: Requires a remote user with passwordless sudo. User defaults to
"vagrant".
Options:
--user Define a user to connect to the remote server. Defaults to 'vagrant'
--branch What branch to use for the deployment. Defaults to 'master'
-vvvv Enable high verbosity when running ansible
""")
def __init__(self, arguments):
self.arguments = arguments
def main(self):
parser = Transport(self.arguments, options=self.options, check_help=True)
parser.catch_help = self._help
parser.parse_args()
parser.catches_help()
branch = parser.get('branch', 'master')
user = parser.get('user', 'vagrant')
high_verbosity = '-vvvv' if parser.has('-vvvv') else '-v'
if not parser.unknown_commands:
log.error("it is required to pass a host to deploy to, but none was provided")
raise SystemExit(1)
command = [
"ansible-playbook",
"-i", "%s," % parser.unknown_commands[-1],
high_verbosity,
"-u", user,
"--extra-vars", '"branch=%s"' % branch,
"deploy.yml",
]
log.debug("Running command: %s" % ' '.join(command))
out, err, code = process.run(command, cwd=playbook_path)
log.error(err)
log.debug(out)
|
class Dev(object):
help = "Development options"
def __init__(self, arguments):
self.arguments = arguments
def main(self):
raise SystemExit(0)
| mit | Python |
ce5c26ea73ac9764dc45d6f0cef7b7958dc06a30 | FIx minor bugs and styles | chainer/chainer,chainer/chainer,niboshi/chainer,hvy/chainer,chainer/chainer,niboshi/chainer,wkentaro/chainer,hvy/chainer,chainer/chainer,hvy/chainer,wkentaro/chainer,niboshi/chainer,pfnet/chainer,wkentaro/chainer,niboshi/chainer,wkentaro/chainer,hvy/chainer | chainer/testing/matrix.py | chainer/testing/matrix.py | import numpy
from chainer.utils import argument
def generate_matrix(shape, dtype=float, **kwargs):
r"""Generates a random matrix with given singular values.
This function generates a random NumPy matrix (or a set of matrices) that
has specified singular values. It can be used to generate the inputs for a
test that can be instable when the input value behaves bad.
Notation: denote the shape of the generated array by :math:`(B..., M, N)`,
and :math:`K = min\{M, N\}`. :math:`B...` may be an empty sequence.
Args:
shape (tuple of int): Shape of the generated array, i.e.,
:math:`(B..., M, N)`.
dtype: Dtype of the generated array.
singular_values (array-like): Singular values of the generated
matrices. It must be broadcastable to shape :math:`(B..., K)`.
"""
singular_values, = argument.parse_kwargs(
kwargs, ('singular_values', None),
)
if len(shape) <= 1:
raise ValueError(
'shpae {} is invalid for matrices: too few axes'.format(shape)
)
k_shape = shape[:-2] + (min(shape[-2:]),)
# TODO(beam2d): consider supporting integer/boolean matrices
dtype = numpy.dtype(dtype)
if dtype.kind not in 'fc':
raise ValueError('dtype {} is not supported'.format(dtype))
if singular_values is None:
raise TypeError('singular_values is not given')
singular_values = numpy.asarray(singular_values)
if (singular_values < 0).any():
raise ValueError('negative singular value is given')
# Generate random matrices with given singular values. We simply generate
# orthogonal vectors using SVD on random matrices and then combine them
# with the given singular values.
a = numpy.random.randn(*shape)
if dtype.kind == 'c':
a = a + 1j * numpy.random.randn(*shape)
u, _, vh = numpy.linalg.svd(a, full_matrices=False)
a = numpy.einsum('...ik,...k,...kj->...ij', u, singular_values, vh)
return a.astype(dtype)
| import numpy
from chainer.utils import argument
def generate_matrix(shape, dtype=float, **kwargs):
"""Generates a random matrix with given singular values.
This function generates a random NumPy matrix (or a set of matrices) that
has specified singular values. It can be used to generate the inputs for a
test that can be instable when the input value behaves bad.
Notation: denote the shape of the generated array by :math:`(B..., M, N)`,
and :math:`K = min\{M, N\}`. :math:`B...` may be an empty sequence.
Args:
shape (tuple of int): Shape of the generated array, i.e.,
:math:`(B..., M, N)`.
dtype: Dtype of the generated array.
singular_values (array-like): Singular values of the generated
matrices. It must be broadcastable to shape :math:`(B..., K)`.
"""
singular_values, = argument.parse_kwargs(
kwargs, singular_values=None,
)
if len(shape) <= 1:
raise ValueError(
'shpae {} is invalid for matrices: too few axes'.format(shape))
k_shape = shape[:-2] + (min(shape[-2:]),)
# TODO(beam2d): consider supporting integer/boolean matrices
if dtype.kind not in 'fc':
raise ValueError('dtype {} is not supported'.format(dtype))
if singular_values is None:
raise TypeError('singular_values is not given')
singular_values = numpy.asarray(singular_values)
if any(singular_values < 0):
raise ValueError('negative singular value is given')
# Generate random matrices with given singular values. We simply generate
# orthogonal vectors using SVD on random matrices and then combine them
# with the given singular values.
a = numpy.random.randn(*shape)
if dtype.kind == 'c':
a += 1j * numpy.random.randn(*shape)
u, _, vh = numpy.linalg.svd(a, full_matrices=False)
a = numpy.einsum('...ik,...k,...kj->...ij', u, singular_values, vh)
return a.astype(dtype, copy=False)
| mit | Python |
ed0484931b104026ebd7ac9a411c89e50efa9bdb | Revert "fix" | chainer/chainercv,pfnet/chainercv,chainer/chainercv,yuyu2172/chainercv,yuyu2172/chainercv | chainercv/testing/attr.py | chainercv/testing/attr.py | from chainer.testing import attr
from attr import cudnn # NOQA
from attr import slow # NOQA
from attr import multi_gpu # NOQA
from attr import gpu # NOQA
try:
import pytest
disk = pytest.mark.disk
except ImportError:
disk = attr._dummy_callable
| from chainer.testing.attr import cudnn # NOQA
from chainer.testing.attr import slow # NOQA
from chainer.testing.attr import multi_gpu # NOQA
from chainer.testing.attr import gpu # NOQA
try:
import pytest
disk = pytest.mark.disk
except ImportError:
from chainer.testing.attr import _dummy_callable
disk = _dummy_callable
| mit | Python |
395fec516acdfe1a8ed6fc2ec8025e317fee311e | bump master for jenkins | istresearch/traptor,istresearch/traptor | traptor/version.py | traptor/version.py | __version__ = '1.4.13.0'
if __name__ == '__main__':
print(__version__)
| __version__ = '1.4.13.0'
if __name__ == '__main__':
print(__version__)
| mit | Python |
7bf848b17e892d9c99c2d4632d4ac1acda78d939 | make it work | whosonfirst/whosonfirst-www-iamhere,whosonfirst/whosonfirst-www-iamhere,whosonfirst/whosonfirst-www-iamhere | bin/start.py | bin/start.py | #!/usr/bin/env python
# -*-python-*-
import os
import sys
import logging
import subprocess
import signal
import time
# please rewrite me in go (so it can be cross-compiled)
if __name__ == '__main__':
import optparse
opt_parser = optparse.OptionParser()
opt_parser.add_option('-d', '--data', dest='data', action='store', default=None, help='The path to your Who\'s On First data')
opt_parser.add_option('-v', '--verbose', dest='verbose', action='store_true', default=False, help='Be chatty (default is false)')
options, args = opt_parser.parse_args()
if options.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
whatami = sys.platform
whoami = os.path.abspath(sys.argv[0])
bin = os.path.dirname(whoami)
root = os.path.dirname(bin)
bin = os.path.join(root, "bin")
www = os.path.join(root, "www")
# please to check for linux and windows
if whatami == 'darwin':
bin = os.path.join(bin, "osx")
else:
logging.error("unsupported platform")
sys.exit()
pip_server = os.path.join(bin, "wof-pip-server")
file_server = os.path.join(bin, "wof-fileserver")
pip_cmd = [pip_server, "-cors", "-port", "8080", "-data", options.data]
pip_cmd.extend(args)
data_cmd = [file_server, "-cors", "-port", "9999", "-path", options.data]
www_cmd = [file_server, "-port", "8001", "-path", www]
logging.debug(" ".join(pip_cmd))
logging.debug(" ".join(data_cmd))
logging.debug(" ".join(www_cmd))
# sys.exit()
pip = subprocess.Popen(pip_cmd)
data = subprocess.Popen(data_cmd)
www = subprocess.Popen(www_cmd)
def signal_handler(signal, frame):
pip.terminate()
data.terminate()
www.terminate()
raise Exception, "all done"
signal.signal(signal.SIGINT, signal_handler)
try:
while True:
time.sleep(.5)
except Exception, e:
pass
logging.info("all done")
sys.exit()
| #!/usr/bin/env python
# -*-python-*-
import os
import sys
import logging
import subprocess
import signal
import time
# please rewrite me in go (so it can be cross-compiled)
if __name__ == '__main__':
import optparse
opt_parser = optparse.OptionParser()
opt_parser.add_option('-d', '--data', dest='data', action='store', default=None, help='The path to your Who\'s On First data')
opt_parser.add_option('-v', '--verbose', dest='verbose', action='store_true', default=False, help='Be chatty (default is false)')
options, args = opt_parser.parse_args()
if options.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
whatami = sys.platform
whoami = os.path.abspath(sys.argv[0])
bin = os.path.dirname(whoami)
root = os.path.dirname(bin)
bin = os.path.join(root, "bin")
www = os.path.join(root, "www")
# please to check for linux and windows
if whatami == 'darwin':
bin = os.path.join(bin, "osx")
else:
logging.error("unsupported platform")
sys.exit()
pip_server = os.path.join(bin, "wof-pip-server")
file_server = os.path.join(bin, "wof-fileserver")
pip_cmd = [pip_server, "-cors", "-port", "8080", "-data", options.data]
pip_cmd.extend(args)
data_cmd = [file_server, "-cors", "-port", "9999", options.data]
www_cmd = [file_server, "-port", "8001", www]
logging.debug(" ".join(pip_cmd))
logging.debug(" ".join(data_cmd))
logging.debug(" ".join(www_cmd))
pip = subprocess.Popen(pip_cmd)
data = subprocess.Popen(data_cmd)
www = subprocess.Popen(www_cmd)
def signal_handler(signal, frame):
pip.terminate()
data.terminate()
www.terminate()
raise Exception, "all done"
signal.signal(signal.SIGINT, signal_handler)
try:
while True:
time.sleep(.5)
except Exception, e:
pass
logging.info("all done")
sys.exit()
| bsd-3-clause | Python |
db5072f372e7e5d68955641ff801461e4ec4038e | Change the redrawing strategy in the watch script | learning-on-chip/example,learning-on-chip/example | bin/watch.py | bin/watch.py | #!/usr/bin/env python3
import os, sys
sys.path.append(os.path.dirname(__file__))
import matplotlib.pyplot as pp
import numpy as np
import socket
def main(dimension_count, address):
print('Connecting to {}...'.format(address))
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(address)
client = client.makefile(mode="r")
plots = prepare(dimension_count)
x_limit = [0, 1]
y_limit = [-1, 1]
while True:
row = [float(number) for number in client.readline().split(',')]
sample_count = len(row) // 2
x = np.arange(0, sample_count)
y = np.reshape(np.array(row[0:sample_count]), [-1, dimension_count])
y_hat = np.reshape(np.array(row[sample_count:]), [-1, dimension_count])
x_limit[1] = sample_count - 1
y_limit[0] = min(y_limit[0], np.min(y), np.min(y_hat))
y_limit[1] = max(y_limit[1], np.max(y), np.max(y_hat))
for i in range(dimension_count):
plots[3*i + 0].set_xdata(x)
plots[3*i + 0].set_ydata(y[:, i])
plots[3*i + 1].set_xdata(x)
plots[3*i + 1].set_ydata(y_hat[:, i])
plots[3*i + 2].set_xdata(x_limit)
pp.subplot(dimension_count, 1, i + 1)
pp.xlim(x_limit)
pp.ylim(y_limit)
pp.pause(1e-3)
def prepare(dimension_count):
pp.figure(figsize=(14, 6), dpi=80, facecolor='w', edgecolor='k')
plots = []
for i in range(dimension_count):
pp.subplot(dimension_count, 1, i + 1)
plots.append(pp.plot([0, 1], [0, 0], 'b')[0])
plots.append(pp.plot([0, 1], [0, 0], 'g')[0])
plots.append(pp.plot([0, 1], [0, 0], 'r')[0])
pp.legend(['Observed', 'Predicted'])
pp.pause(1e-3)
return plots
if __name__ == '__main__':
assert(len(sys.argv) == 2)
chunks = sys.argv[1].split(':')
assert(len(chunks) == 2)
main(1, (chunks[0], int(chunks[1])))
| #!/usr/bin/env python3
import os, sys
sys.path.append(os.path.dirname(__file__))
import matplotlib.pyplot as pp
import numpy as np
import socket
def main(dimension_count, address):
print('Connecting to {}...'.format(address))
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(address)
client = client.makefile(mode="r")
pp.figure(figsize=(14, 6), dpi=80, facecolor='w', edgecolor='k')
pp.pause(1e-3)
y_limit = [-1, 1]
while True:
row = [float(number) for number in client.readline().split(',')]
half = len(row) // 2
y = np.reshape(np.array(row[0:half]), [-1, dimension_count])
y_hat = np.reshape(np.array(row[half:]), [-1, dimension_count])
y_limit[0] = min(y_limit[0], np.min(y), np.min(y_hat))
y_limit[1] = max(y_limit[1], np.max(y), np.max(y_hat))
pp.clf()
for i in range(dimension_count):
pp.subplot(dimension_count, 1, i + 1)
pp.plot(y[:, i])
pp.plot(y_hat[:, i])
pp.xlim([0, y.shape[0] - 1])
pp.ylim(y_limit)
pp.plot([0, y.shape[0] - 1], [0, 0], 'r')
pp.legend(['Observed', 'Predicted'])
pp.pause(1e-3)
if __name__ == '__main__':
assert(len(sys.argv) == 2)
chunks = sys.argv[1].split(':')
assert(len(chunks) == 2)
main(1, (chunks[0], int(chunks[1])))
| mit | Python |
9850ec3200ac80fd85f400fbbe115483ce0c3173 | Create symlink instead of copy | danielcorreia/dotfiles,danielcorreia/dotfiles | bootstrap.py | bootstrap.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import shutil
import subprocess
commands = {
'ssh_key': "ssh-keygen -t rsa -b 4096 -C '{email}'"
}
DOTFILES = [
'.aliases',
'.bash_profile',
'.bash_prompt',
'.exports',
'.functions',
'.gitconfig',
'.gitignore',
'.hushlogin',
'.inputrc',
'.pythonrc',
'.tmux.conf',
'.vimrc',
]
def question(sentence, options=None, defaults=None):
if options:
options_str = " [{}]".format('/'.join(options))
complete_question = '{question}{options}'.format(sentence, options_str)
answer = raw_input(complete_question)
def bootstrap_dotfiles(home_dir):
for dotfile in DOTFILES:
dest = os.path.join(home_dir, dotfile)
os.symlink(dotfile, dest)
print("Created symbolic links for dotfiles in `{}`".format(home_dir))
def setup_vundle():
# FIXME: this creates a folder in this directory called "~"
subprocess.call(
"git clone https://github.com/VundleVim/Vundle.vim.git "
"~/.vim/bundle/Vundle.vim".split(' ')
)
def install_virtualenvwrapper():
subprocess.call("pip install virtualenvwrapper".split(' '))
def main():
home_dir = os.path.expanduser('~')
bootstrap_dotfiles(home_dir)
install_virtualenvwrapper()
# setup_vundle()
subprocess.call("pip install --upgrade pip setuptools".split(' '))
subprocess.call("pip3 install --upgrade pip setuptools".split(' '))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import shutil
import subprocess
commands = {
'ssh_key': "ssh-keygen -t rsa -b 4096 -C '{email}'"
}
DOTFILES = [
'.aliases',
'.bash_profile',
'.bash_prompt',
'.exports',
'.functions',
'.gitconfig',
'.gitignore',
'.hushlogin',
'.inputrc',
'.pythonrc',
'.tmux.conf',
'.vimrc',
]
def question(sentence, options=None, defaults=None):
if options:
options_str = " [{}]".format('/'.join(options))
complete_question = '{question}{options}'.format(sentence, options_str)
answer = raw_input(complete_question)
def bootstrap_dotfiles(home_dir):
total = len(DOTFILES)
for index, dotfile in enumerate(DOTFILES):
dest = os.path.join(home_dir, dotfile)
shutil.copy(dotfile, dest)
print("Copied dotfiles to {}".format(home_dir))
def setup_vundle():
# FIXME: this creates a folder in this directory called "~"
subprocess.call(
"git clone https://github.com/VundleVim/Vundle.vim.git "
"~/.vim/bundle/Vundle.vim".split(' ')
)
def install_virtualenvwrapper():
subprocess.call("pip install virtualenvwrapper".split(' '))
def main():
home_dir = os.path.expanduser('~')
bootstrap_dotfiles(home_dir)
install_virtualenvwrapper()
# setup_vundle()
subprocess.call("pip install --upgrade pip setuptools".split(' '))
subprocess.call("pip3 install --upgrade pip setuptools".split(' '))
if __name__ == '__main__':
main()
| mit | Python |
8aab608efa6f7a53de061f57da64bebc54adf143 | Update to check_next_launch() | ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server | bot/tasks.py | bot/tasks.py | from bot.app.DailyDigest import DailyDigestServer
from celery.schedules import crontab
from celery.task import task, periodic_task
from celery.utils.log import get_task_logger
from bot.app.Notifications import NotificationServer
logger = get_task_logger('bot')
TAG = 'Digest Server'
@periodic_task(
run_every=(crontab(minute=0, hour=10,
day_of_week='mon-sun')),
name="run_daily",
ignore_result=True
)
def run_daily():
logger.info('Task - Running Digest - Daily...')
daily_digest = DailyDigestServer()
daily_digest.run(daily=True)
@periodic_task(
run_every=(crontab(minute=0, hour=8,
day_of_week='mon')),
name="run_weekly",
ignore_result=True
)
def run_weekly():
logger.info('Task - Running Digest - Weekly...')
daily_digest = DailyDigestServer()
daily_digest.run(weekly=True)
@periodic_task(run_every=(crontab(minute='*/5')))
def check_next_launch():
logger.info('Task - Running Notifications...')
notification = NotificationServer()
notification.check_next_launch()
| from bot.app.DailyDigest import DailyDigestServer
from celery.schedules import crontab
from celery.task import task, periodic_task
from celery.utils.log import get_task_logger
from bot.app.Notifications import NotificationServer
logger = get_task_logger('bot')
TAG = 'Digest Server'
@periodic_task(
run_every=(crontab(minute=0, hour=10,
day_of_week='mon-sun')),
name="run_daily",
ignore_result=True
)
def run_daily():
logger.info('Task - Running Digest - Daily...')
daily_digest = DailyDigestServer()
daily_digest.run(daily=True)
@periodic_task(
run_every=(crontab(minute=0, hour=8,
day_of_week='mon')),
name="run_weekly",
ignore_result=True
)
def run_weekly():
logger.info('Task - Running Digest - Weekly...')
daily_digest = DailyDigestServer()
daily_digest.run(weekly=True)
@periodic_task(run_every=(crontab(minute='*/5')))
def check_next_launch():
logger.info('Task - Running Notifications...')
notification = NotificationServer()
notification.run()
| apache-2.0 | Python |
516ea71d120eca94a76245fbcb62811b7980b63c | Fix run. | philippebeaudoin/mmomie,philippebeaudoin/mmomie | build/run.py | build/run.py | #!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import os
import sys
import game_project
import vinn
def _RelPathToUnixPath(p):
return p.replace(os.sep, '/')
def RunTests():
project = game_project.GameProject()
cmd = """
loadHTML('/game.html');
"""
res = vinn.RunJsString(
cmd, source_paths=list([project.source_path]),
js_args=[], stdout=sys.stdout, stdin=sys.stdin)
return res.returncode
def Main(argv):
parser = argparse.ArgumentParser(description='Run game JS.')
args = parser.parse_args(argv[1:])
sys.exit(RunTests())
| #!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import os
import sys
import game_project
import vinn
def _RelPathToUnixPath(p):
return p.replace(os.sep, '/')
def RunTests():
project = game_project.GameProject()
cmd = """
loadHTML('/game.html');
"""
res = vinn.RunJsString(
cmd, source_paths=list(project.source_paths),
js_args=[], stdout=sys.stdout, stdin=sys.stdin)
return res.returncode
def Main(argv):
parser = argparse.ArgumentParser(description='Run game JS.')
args = parser.parse_args(argv[1:])
sys.exit(RunTests())
| apache-2.0 | Python |
974fa61b0ea26c07d7b16ff8d53eb682d3f0c21f | Fix tests for IPython.testing | ipython/ipython,ipython/ipython | IPython/testing/plugin/show_refs.py | IPython/testing/plugin/show_refs.py | """Simple script to show reference holding behavior.
This is used by a companion test case.
"""
from __future__ import print_function
import gc
class C(object):
def __del__(self):
pass
#print 'deleting object...' # dbg
if __name__ == '__main__':
c = C()
c_refs = gc.get_referrers(c)
ref_ids = list(map(id,c_refs))
print('c referrers:',list(map(type,c_refs)))
| """Simple script to show reference holding behavior.
This is used by a companion test case.
"""
from __future__ import print_function
import gc
class C(object):
def __del__(self):
pass
#print 'deleting object...' # dbg
if __name__ == '__main__':
c = C()
c_refs = gc.get_referrers(c)
ref_ids = map(id,c_refs)
print('c referrers:',map(type,c_refs))
| bsd-3-clause | Python |
a3d2cc9f86342859d7ad6d68689e03f8e1b1266e | Fix docstring style | fnielsen/dasem,fnielsen/dasem | dasem/__main__.py | dasem/__main__.py | """dasem.
Usage:
dasem
"""
def main():
"""Handle command-line interface."""
raise NotImplementedError
if __name__ == '__main__':
main()
| """
Usage:
dasem <query>
Options:
--max-n-pages=<int> Maximum number of pages
-v --verbose Verbose debug messaging
"""
def main():
raise NotImplementedError
if __name__ == '__main__':
main()
| apache-2.0 | Python |
19a7e6bab3bf497ef23d72f3d5bbecde5c7a8cc5 | Fix error message for php mess detector | hglattergotz/sfdeploy | bin/tools.py | bin/tools.py | # -*- coding: utf-8 -*-
# config.py
#
# Copyright (c) 2012-2013 Henning Glatter-Götz
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
# tools.py - Collection of configuration tasks for fabric
#
# To include it in the fabfile.py add this near the top
#
# import sys
# import tools
import os
from fabric.api import *
from fabric.colors import red, green
import pear
@task
def loc():
"""
Run phploc on the project
"""
if (pear.pear_detect('phploc')):
local('phploc --exclude app/cache,app/logs/vendor')
else:
print(red('The PEAR package phploc is not installed.', True) + '\nInstall it as follows (first command as root)\n pear config-set auto_discover 1\n pear install pear.phpunit.de/phploc')
@task
def messdetector():
"""
Run messdetector on the project
"""
if (pear.pear_detect('PHP_PMD')):
with settings(warn_only=True):
result = local('phpmd . html codesize,unusedcode,design --reportfile ../messdetector.html --exclude app/cache,app/logs,vendor', capture=True)
if result.return_code == 0 or result.return_code == 2:
local('open ../messdetector.html');
else:
abort(result)
else:
print(red('The PEAR package PHP_PMD is not installed.', True) + '\nInstall it as follows (first command as root)\n pear config-set auto_discover 1\n pear install pear.phpunit.de/PHP_PMD')
@task
def ct():
"""
Build the ctags file for this project (for vim use)
"""
local("/usr/local/bin/ctags -R --exclude=.svn --tag-relative=yes --PHP-kinds=+cf-v --regex-PHP='/abstract\s+class\s+([^ ]+)/\1/c/' --regex-PHP='/interface\s+([^ ]+)/\1/c/' --regex-PHP='/(public\s+|static\s+|abstract\s+|protected\s+|private\s+)function\s+\&?\s*([^ (]+)/\2/f/'")
| # -*- coding: utf-8 -*-
# config.py
#
# Copyright (c) 2012-2013 Henning Glatter-Götz
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
# tools.py - Collection of configuration tasks for fabric
#
# To include it in the fabfile.py add this near the top
#
# import sys
# import tools
import os
from fabric.api import *
from fabric.colors import red, green
import pear
@task
def loc():
"""
Run phploc on the project
"""
if (pear.pear_detect('phploc')):
local('phploc --exclude app/cache,app/logs/vendor')
else:
print(red('The PEAR package phploc is not installed.', True) + '\nInstall it as follows (first command as root)\n pear config-set auto_discover 1\n pear install pear.phpunit.de/phploc')
@task
def messdetector():
"""
Run messdetector on the project
"""
if (pear.pear_detect('PHP_PMD')):
with settings(warn_only=True):
result = local('phpmd . html codesize,unusedcode,design --reportfile ../messdetector.html --exclude app/cache,app/logs,vendor', capture=True)
if result.return_code == 0 or result.return_code == 2:
local('open ../messdetector.html');
else:
abort(result)
else:
print(red('The PEAR package phploc is not installed.', True) + '\nInstall it as follows (first command as root)\n pear config-set auto_discover 1\n pear install pear.phpunit.de/phploc')
@task
def ct():
"""
Build the ctags file for this project (for vim use)
"""
local("/usr/local/bin/ctags -R --exclude=.svn --tag-relative=yes --PHP-kinds=+cf-v --regex-PHP='/abstract\s+class\s+([^ ]+)/\1/c/' --regex-PHP='/interface\s+([^ ]+)/\1/c/' --regex-PHP='/(public\s+|static\s+|abstract\s+|protected\s+|private\s+)function\s+\&?\s*([^ (]+)/\2/f/'")
| mit | Python |
385f190d782c7d2edbba8b425db2418e6891ea86 | Consolidate hasattr + getter to single statement with default value. | kch8qx/osf.io,aaxelb/osf.io,chrisseto/osf.io,baylee-d/osf.io,billyhunt/osf.io,acshi/osf.io,SSJohns/osf.io,kwierman/osf.io,emetsger/osf.io,icereval/osf.io,DanielSBrown/osf.io,mattclark/osf.io,njantrania/osf.io,jmcarp/osf.io,HalcyonChimera/osf.io,emetsger/osf.io,Nesiehr/osf.io,bdyetton/prettychart,doublebits/osf.io,lyndsysimon/osf.io,icereval/osf.io,cwisecarver/osf.io,caseyrollins/osf.io,jolene-esposito/osf.io,GageGaskins/osf.io,haoyuchen1992/osf.io,cosenal/osf.io,HarryRybacki/osf.io,samanehsan/osf.io,felliott/osf.io,adlius/osf.io,brianjgeiger/osf.io,mluo613/osf.io,mluke93/osf.io,cwisecarver/osf.io,acshi/osf.io,acshi/osf.io,ckc6cz/osf.io,jolene-esposito/osf.io,kch8qx/osf.io,hmoco/osf.io,TomBaxter/osf.io,DanielSBrown/osf.io,cslzchen/osf.io,mluo613/osf.io,MerlinZhang/osf.io,jmcarp/osf.io,mfraezz/osf.io,Ghalko/osf.io,binoculars/osf.io,jnayak1/osf.io,hmoco/osf.io,amyshi188/osf.io,baylee-d/osf.io,mattclark/osf.io,acshi/osf.io,billyhunt/osf.io,ZobairAlijan/osf.io,sbt9uc/osf.io,HarryRybacki/osf.io,laurenrevere/osf.io,RomanZWang/osf.io,crcresearch/osf.io,dplorimer/osf,Ghalko/osf.io,Ghalko/osf.io,Ghalko/osf.io,sloria/osf.io,lyndsysimon/osf.io,samanehsan/osf.io,wearpants/osf.io,kch8qx/osf.io,ZobairAlijan/osf.io,brianjgeiger/osf.io,bdyetton/prettychart,jmcarp/osf.io,DanielSBrown/osf.io,lyndsysimon/osf.io,mluo613/osf.io,caseyrygt/osf.io,zamattiac/osf.io,brandonPurvis/osf.io,asanfilippo7/osf.io,haoyuchen1992/osf.io,TomHeatwole/osf.io,cosenal/osf.io,sbt9uc/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,kwierman/osf.io,reinaH/osf.io,cldershem/osf.io,wearpants/osf.io,HarryRybacki/osf.io,jolene-esposito/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,cldershem/osf.io,adlius/osf.io,RomanZWang/osf.io,billyhunt/osf.io,ticklemepierce/osf.io,njantrania/osf.io,hmoco/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,laurenrevere/osf.io,jnayak1/osf.io,emetsger/osf.io,saradbowman/osf.io,petermalcolm/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,asanfilippo7/osf.io,caneruguz/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,wearpants/osf.io,GageGaskins/osf.io,arpitar/osf.io,chennan47/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,ticklemepierce/osf.io,billyhunt/osf.io,mluo613/osf.io,baylee-d/osf.io,billyhunt/osf.io,laurenrevere/osf.io,sbt9uc/osf.io,samanehsan/osf.io,danielneis/osf.io,chennan47/osf.io,caseyrygt/osf.io,chennan47/osf.io,haoyuchen1992/osf.io,abought/osf.io,TomHeatwole/osf.io,felliott/osf.io,GageGaskins/osf.io,RomanZWang/osf.io,caseyrygt/osf.io,ZobairAlijan/osf.io,doublebits/osf.io,KAsante95/osf.io,mattclark/osf.io,cosenal/osf.io,Nesiehr/osf.io,ckc6cz/osf.io,doublebits/osf.io,monikagrabowska/osf.io,erinspace/osf.io,alexschiller/osf.io,aaxelb/osf.io,ckc6cz/osf.io,ticklemepierce/osf.io,leb2dg/osf.io,binoculars/osf.io,erinspace/osf.io,mluke93/osf.io,mluke93/osf.io,aaxelb/osf.io,dplorimer/osf,lyndsysimon/osf.io,rdhyee/osf.io,bdyetton/prettychart,zachjanicki/osf.io,caneruguz/osf.io,MerlinZhang/osf.io,jolene-esposito/osf.io,rdhyee/osf.io,SSJohns/osf.io,pattisdr/osf.io,caseyrollins/osf.io,sloria/osf.io,cwisecarver/osf.io,rdhyee/osf.io,MerlinZhang/osf.io,monikagrabowska/osf.io,abought/osf.io,dplorimer/osf,adlius/osf.io,erinspace/osf.io,asanfilippo7/osf.io,arpitar/osf.io,dplorimer/osf,SSJohns/osf.io,jnayak1/osf.io,felliott/osf.io,bdyetton/prettychart,adlius/osf.io,ticklemepierce/osf.io,samchrisinger/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,sbt9uc/osf.io,cwisecarver/osf.io,KAsante95/osf.io,jnayak1/osf.io,pattisdr/osf.io,samchrisinger/osf.io,felliott/osf.io,emetsger/osf.io,aaxelb/osf.io,caneruguz/osf.io,samchrisinger/osf.io,amyshi188/osf.io,cosenal/osf.io,binoculars/osf.io,saradbowman/osf.io,caseyrygt/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,KAsante95/osf.io,danielneis/osf.io,kwierman/osf.io,chrisseto/osf.io,zachjanicki/osf.io,abought/osf.io,petermalcolm/osf.io,cslzchen/osf.io,zamattiac/osf.io,njantrania/osf.io,kwierman/osf.io,TomBaxter/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,danielneis/osf.io,brandonPurvis/osf.io,TomBaxter/osf.io,brandonPurvis/osf.io,doublebits/osf.io,crcresearch/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,petermalcolm/osf.io,RomanZWang/osf.io,mluo613/osf.io,kch8qx/osf.io,arpitar/osf.io,cslzchen/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,MerlinZhang/osf.io,petermalcolm/osf.io,reinaH/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,zachjanicki/osf.io,jmcarp/osf.io,KAsante95/osf.io,cldershem/osf.io,reinaH/osf.io,DanielSBrown/osf.io,alexschiller/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,zachjanicki/osf.io,cslzchen/osf.io,ckc6cz/osf.io,abought/osf.io,CenterForOpenScience/osf.io,reinaH/osf.io,arpitar/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,wearpants/osf.io,chrisseto/osf.io,alexschiller/osf.io,ZobairAlijan/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,Nesiehr/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,SSJohns/osf.io,KAsante95/osf.io,crcresearch/osf.io,cldershem/osf.io,HarryRybacki/osf.io,pattisdr/osf.io,mluke93/osf.io,amyshi188/osf.io,rdhyee/osf.io,doublebits/osf.io,sloria/osf.io,njantrania/osf.io,haoyuchen1992/osf.io,caneruguz/osf.io,acshi/osf.io,zamattiac/osf.io,TomHeatwole/osf.io,samanehsan/osf.io,danielneis/osf.io | framework/mongo/__init__.py | framework/mongo/__init__.py | # -*- coding: utf-8 -*-
from flask import request
from modularodm.storedobject import StoredObject as GenericStoredObject
from modularodm.ext.concurrency import with_proxies, proxied_members
from bson import ObjectId
from .handlers import client, database, set_up_storage
from api.base.api_globals import api_globals
class DummyRequest(object):
pass
dummy_request = DummyRequest()
def get_cache_key():
"""
Fetch a request key from either a Django or Flask request. Fall back on a process-global dummy object
if we are not in either type of request
"""
# TODO: This is ugly use of exceptions; is there a better way to track whether in a given type of request?
try:
return request._get_current_object()
except RuntimeError: # Not in a flask request context
if getattr(api_globals, 'request', None) is not None:
return api_globals.request
else: # Not in a Django request
return dummy_request
@with_proxies(proxied_members, get_cache_key)
class StoredObject(GenericStoredObject):
pass
__all__ = [
'StoredObject',
'ObjectId',
'client',
'database',
'set_up_storage',
]
| # -*- coding: utf-8 -*-
from flask import request
from modularodm.storedobject import StoredObject as GenericStoredObject
from modularodm.ext.concurrency import with_proxies, proxied_members
from bson import ObjectId
from .handlers import client, database, set_up_storage
from api.base.api_globals import api_globals
class DummyRequest(object):
pass
dummy_request = DummyRequest()
def get_cache_key():
"""
Fetch a request key from either a Django or Flask request. Fall back on a process-global dummy object
if we are not in either type of request
"""
# TODO: This is ugly use of exceptions; is there a better way to track whether in a given type of request?
try:
return request._get_current_object()
except RuntimeError: # Not in a flask request context
if hasattr(api_globals, 'request') and api_globals.request is not None:
return api_globals.request
else: # Not in a Django request
return dummy_request
@with_proxies(proxied_members, get_cache_key)
class StoredObject(GenericStoredObject):
pass
__all__ = [
'StoredObject',
'ObjectId',
'client',
'database',
'set_up_storage',
]
| apache-2.0 | Python |
2fb48c09362f30935396bde06fcdeb16f504a67f | add scene ownership tests | gopro/gopro-lib-node.gl,gopro/gopro-lib-node.gl,gopro/gopro-lib-node.gl,gopro/gopro-lib-node.gl | tests/api.py | tests/api.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 GoPro Inc.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import pynodegl as ngl
def test_backend():
viewer = ngl.Viewer()
assert viewer.configure(backend=0x1234) < 0
del viewer
def test_reconfigure():
viewer = ngl.Viewer()
assert viewer.configure(offscreen=1, width=16, height=16) == 0
scene = ngl.Render(ngl.Quad())
viewer.set_scene(scene)
viewer.draw(0)
assert viewer.configure(offscreen=1, width=16, height=16, samples=4) == 0
# FIXME: errors should be raised by the draw call so we can assert here
viewer.draw(1)
del viewer
def test_ctx_ownership():
viewer = ngl.Viewer()
viewer2 = ngl.Viewer()
assert viewer.configure(offscreen=1, width=16, height=16) == 0
assert viewer2.configure(offscreen=1, width=16, height=16) == 0
scene = ngl.Render(ngl.Quad())
viewer.set_scene(scene)
viewer.draw(0)
assert viewer2.set_scene(scene) != 0
viewer2.draw(0)
del viewer
del viewer2
def test_ctx_ownership_subgraph():
for shared in (True, False):
viewer = ngl.Viewer()
viewer2 = ngl.Viewer()
assert viewer.configure(offscreen=1, width=16, height=16) == 0
assert viewer2.configure(offscreen=1, width=16, height=16) == 0
quad = ngl.Quad()
render1 = ngl.Render(quad)
if not shared:
quad = ngl.Quad()
render2 = ngl.Render(quad)
scene = ngl.Group([render1, render2])
viewer.set_scene(render2)
viewer.draw(0)
assert viewer2.set_scene(scene) != 0
viewer2.draw(0)
del viewer
del viewer2
if __name__ == '__main__':
test_backend()
test_reconfigure()
test_ctx_ownership()
test_ctx_ownership_subgraph()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 GoPro Inc.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import pynodegl as ngl
def test_backend():
viewer = ngl.Viewer()
assert viewer.configure(backend=0x1234) < 0
del viewer
def test_reconfigure():
viewer = ngl.Viewer()
assert viewer.configure(offscreen=1, width=16, height=16) == 0
scene = ngl.Render(ngl.Quad())
viewer.set_scene(scene)
viewer.draw(0)
assert viewer.configure(offscreen=1, width=16, height=16, samples=4) == 0
# FIXME: errors should be raised by the draw call so we can assert here
viewer.draw(1)
del viewer
if __name__ == '__main__':
test_backend()
test_reconfigure()
| apache-2.0 | Python |
37771791dea67d5df88afdd2e5720731a5303ce8 | Fix test running for running from src dir. | aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments | tests/run.py | tests/run.py | # -*- coding: utf-8 -*-
"""
Pygments unit tests
~~~~~~~~~~~~~~~~~~
Usage::
python run.py [testfile ...]
:copyright: Copyright 2006-2009 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys, os
if sys.version_info >= (3,):
# copy test suite over to "build/lib" and convert it
print ('Copying and converting sources to build/lib/test...')
from distutils.util import copydir_run_2to3
testroot = os.path.dirname(__file__)
newroot = os.path.join(testroot, '..', 'build/lib/test')
copydir_run_2to3(testroot, newroot)
# make nose believe that we run from the converted dir
os.chdir(newroot)
else:
# only find tests in this directory
os.chdir(os.path.dirname(__file__))
try:
import nose
except ImportError:
print ('nose is required to run the Pygments test suite')
sys.exit(1)
try:
# make sure the current source is first on sys.path
sys.path.insert(0, '..')
import pygments
except ImportError:
print ('Cannot find Pygments to test: %s' % sys.exc_info()[1])
sys.exit(1)
else:
print ('Pygments %s test suite running (Python %s)...' %
(pygments.__version__, sys.version.split()[0]))
nose.main()
| # -*- coding: utf-8 -*-
"""
Pygments unit tests
~~~~~~~~~~~~~~~~~~
Usage::
python run.py [testfile ...]
:copyright: Copyright 2006-2009 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys, os
if sys.version_info >= (3,):
# copy test suite over to "build/lib" and convert it
print ('Copying and converting sources to build/lib/test...')
from distutils.util import copydir_run_2to3
testroot = os.path.dirname(__file__)
newroot = os.path.join(testroot, '..', 'build/lib/test')
copydir_run_2to3(testroot, newroot)
# make nose believe that we run from the converted dir
os.chdir(newroot)
__file__ = os.path.join('run.py')
try:
import nose
except ImportError:
print ('nose is required to run the Pygments test suite')
sys.exit(1)
try:
# make sure the current source is first on sys.path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
import pygments
except ImportError:
print ('Cannot find Pygments to test: %s' % sys.exc_info()[1])
sys.exit(1)
else:
print ('Pygments %s test suite running (Python %s)...' %
(pygments.__version__, sys.version.split()[0]))
nose.main()
| bsd-2-clause | Python |
890190f40b9061f7bfa15bbd7e56abc0f1b7d44a | Make fitters available at main import level. | erykoff/redmapper,erykoff/redmapper | redmapper/__init__.py | redmapper/__init__.py | from __future__ import division, absolute_import, print_function
from ._version import __version__, __version_info__
version = __version__
from .configuration import Configuration
from .runcat import RunCatalog
from .solver_nfw import Solver
from .catalog import DataObject, Entry, Catalog
from .redsequence import RedSequenceColorPar
from .chisq_dist import compute_chisq
from .background import Background, ZredBackground
from .cluster import Cluster, ClusterCatalog
from .galaxy import Galaxy, GalaxyCatalog
from .mask import Mask, HPMask, get_mask
from .zlambda import Zlambda, ZlambdaCorrectionPar
from .cluster_runner import ClusterRunner
from .zred_color import ZredColor
from .centering import Centering, CenteringWcenZred, CenteringBCG
from .depthmap import DepthMap
from .color_background import ColorBackground, ColorBackgroundGenerator
from .fitters import MedZFitter, RedSequenceFitter, RedSequenceOffDiagonalFitter, CorrectionFitter, EcgmmFitter
| from __future__ import division, absolute_import, print_function
from ._version import __version__, __version_info__
version = __version__
from .configuration import Configuration
from .runcat import RunCatalog
from .solver_nfw import Solver
from .catalog import DataObject, Entry, Catalog
from .redsequence import RedSequenceColorPar
from .chisq_dist import compute_chisq
from .background import Background, ZredBackground
from .cluster import Cluster, ClusterCatalog
from .galaxy import Galaxy, GalaxyCatalog
from .mask import Mask, HPMask, get_mask
from .zlambda import Zlambda, ZlambdaCorrectionPar
from .cluster_runner import ClusterRunner
from .zred_color import ZredColor
from .centering import Centering, CenteringWcenZred, CenteringBCG
from .depthmap import DepthMap
from .color_background import ColorBackground, ColorBackgroundGenerator
| apache-2.0 | Python |
e148c2a01272b5aa2e131e6b67e21bbe6b8c37b4 | remove obsoleted python filter for `\cboxbegin` and `\cboxend` | cagix/pandoc-lecture | textohtml.py | textohtml.py | #!/usr/bin/env python
# Author: Carsten Gips <carsten.gips@fh-bielefeld.de>
# Copyright: (c) 2016 Carsten Gips
# License: MIT
"""
Pandoc filter to replace certain LaTeX macros with matching HTML tags.
In my beamer slides I use certain macros like `\blueArrow` which produces an
arrow in deep blue color. This filter translates this TeX macros into the
corresponding HTML markup.
Note, that the `html.css` must also be included in the template for proper
rendering.
"""
from pandocfilters import toJSONFilter, attributes, Span, Str, Space, RawInline, Image
import re
trans = [{'class': 'blueArrow', 're': re.compile('\\\\blueArrow'), 'cont': "=>", 'key': 'Str'}]
def textohtml(key, value, format, meta):
if key == 'RawInline':
fmt, s = value
if fmt == "tex":
for x in trans:
m = x['re'].match(s)
if m:
return [Span(attributes({'class': x['class']}),
[Str( x['cont'] if x['key']=='Str' else m.group(x['cont']) )]),
Space()]
if __name__ == "__main__":
toJSONFilter(textohtml)
| #!/usr/bin/env python
# Author: Carsten Gips <carsten.gips@fh-bielefeld.de>
# Copyright: (c) 2016 Carsten Gips
# License: MIT
"""
Pandoc filter to replace certain LaTeX macros with matching HTML tags.
In my beamer slides I use certain macros like `\blueArrow` which produces an
arrow in deep blue color. This filter translates this TeX macros into the
corresponding HTML markup.
Note, that the `html.css` must also be included in the template for proper
rendering.
"""
from pandocfilters import toJSONFilter, attributes, Span, Str, Space, RawInline, Image
import re
trans = [{'class': 'blueArrow', 're': re.compile('\\\\blueArrow'), 'cont': "=>", 'key': 'Str'}]
cboxStart = re.compile('\\\\cboxbegin')
cboxEnd = re.compile('\\\\cboxend')
def textohtml(key, value, format, meta):
if key == 'RawInline':
fmt, s = value
if fmt == "tex":
for x in trans:
m = x['re'].match(s)
if m:
return [Span(attributes({'class': x['class']}),
[Str( x['cont'] if x['key']=='Str' else m.group(x['cont']) )]),
Space()]
if cboxStart.match(s):
return RawInline("html", "<span class='cbox'>")
if cboxEnd.match(s):
return RawInline("html", "</span>")
if __name__ == "__main__":
toJSONFilter(textohtml)
| mit | Python |
cc71c674e044f31ff154b8cc7a8f55975e4ff2ad | Fix conf.py | arpras/Zopkio,pubnub/Zopkio,jdehrlich/zopkio,pubnub/Zopkio,pubnub/Zopkio,linkedin/Zopkio,jdehrlich/zopkio,arpras/Zopkio,arpras/Zopkio,jdehrlich/zopkio,linkedin/Zopkio,linkedin/Zopkio,arpras/Zopkio,jdehrlich/zopkio,pubnub/Zopkio,linkedin/Zopkio,pubnub/Zopkio | docs/conf.py | docs/conf.py | #!/usr/bin/env
# Copyright 2014 LinkedIn Corp.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# -*- coding: utf-8 -*-
import sys, os, datetime
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'{year}, LinkedIn'.format(year=datetime.datetime.today().year)
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ---------------------------------------------------
html_theme = 'nature'
html_static_path = ['_static']
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| #!/usr/bin/env
# Copyright 2014 LinkedIn Corp.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# -*- coding: utf-8 -*-
import sys, os, datetime
sys.path.append("/Users/avenkatr/Zopkio")
import zopkio
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'{year}, LinkedIn'.format(year=datetime.datetime.today().year)
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ---------------------------------------------------
html_theme = 'nature'
html_static_path = ['_static']
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| apache-2.0 | Python |
165b02d35bf88456a96c0b5e4f63f3033eb3091d | Fix for changed Need datamodel | coders4help/volunteer_planner,MRigal/volunteer_planner,klinger/volunteer_planner,tordans/volunteer_planner,slevon/volunteer_planner,coders4help/volunteer_planner,juliabiro/volunteer_planner,coders4help/volunteer_planner,volunteer-planner/volunteer_planner,FriedrichK/volunteer_planner,FriedrichK/volunteer_planner,christophmeissner/volunteer_planner,slevon/volunteer_planner,MRigal/volunteer_planner,juliabiro/volunteer_planner,FriedrichK/volunteer_planner,juliabiro/volunteer_planner,christophmeissner/volunteer_planner,klinger/volunteer_planner,volunteer-planner/volunteer_planner,pitpalme/volunteer_planner,slevon/volunteer_planner,slevon/volunteer_planner,alper/volunteer_planner,jonasrk/volunteer_planner,klinger/volunteer_planner,FriedrichK/volunteer_planner,alper/volunteer_planner,pitpalme/volunteer_planner,tordans/volunteer_planner,koolfreak/volunteer_planner,koolfreak/volunteer_planner,tordans/volunteer_planner,alper/volunteer_planner,jonasrk/volunteer_planner,koolfreak/volunteer_planner,koolfreak/volunteer_planner,volunteer-planner/volunteer_planner,pitpalme/volunteer_planner,MRigal/volunteer_planner,tordans/volunteer_planner,jonasrk/volunteer_planner,koolfreak/volunteer_planner,flindenberg/volunteer_planner,flindenberg/volunteer_planner,volunteer-planner/volunteer_planner,christophmeissner/volunteer_planner,flindenberg/volunteer_planner,christophmeissner/volunteer_planner,klinger/volunteer_planner,tordans/volunteer_planner,FriedrichK/volunteer_planner,slevon/volunteer_planner,jonasrk/volunteer_planner,jonasrk/volunteer_planner,coders4help/volunteer_planner,pitpalme/volunteer_planner | registration/admin.py | registration/admin.py | from django.contrib import admin
from django.contrib.sites.models import RequestSite
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
from models import RegistrationProfile
from scheduler.models import Need
class RegistrationAdmin(admin.ModelAdmin):
actions = ['activate_users', 'resend_activation_email']
list_display = ('user', 'activation_key_expired', 'get_user_email')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name', 'user__last_name')
def get_field_queryset(self, db, db_field, request): #
if db_field.name == 'needs' \
and db_field.model._meta.object_name == 'RegistrationProfile':
return Need.objects.select_related('topic',
'location')
return super(RegistrationAdmin, self).get_field_queryset(db,
db_field,
request)
def activate_users(self, request, queryset):
"""
Activates the selected users, if they are not alrady
activated.
"""
for profile in queryset:
RegistrationProfile.objects.activate_user(profile.activation_key)
activate_users.short_description = _("Activate users")
def resend_activation_email(self, request, queryset):
"""
Re-sends activation emails for the selected users.
Note that this will *only* send activation emails for users
who are eligible to activate; emails will not be sent to users
whose activation keys have expired or who have already
activated.
"""
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(request)
for profile in queryset:
if not profile.activation_key_expired():
profile.send_activation_email(site)
resend_activation_email.short_description = _("Re-send activation emails")
admin.site.register(RegistrationProfile, RegistrationAdmin)
| from django.contrib import admin
from django.contrib.sites.models import RequestSite
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
from models import RegistrationProfile
from scheduler.models import Need
class RegistrationAdmin(admin.ModelAdmin):
actions = ['activate_users', 'resend_activation_email']
list_display = ('user', 'activation_key_expired', 'get_user_email')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name', 'user__last_name')
def get_field_queryset(self, db, db_field, request): #
if db_field.name == 'needs' \
and db_field.model._meta.object_name == 'RegistrationProfile':
return Need.objects.select_related('time_period_from',
'time_period_to',
'topic',
'location')
return super(RegistrationAdmin, self).get_field_queryset(db,
db_field,
request)
def activate_users(self, request, queryset):
"""
Activates the selected users, if they are not alrady
activated.
"""
for profile in queryset:
RegistrationProfile.objects.activate_user(profile.activation_key)
activate_users.short_description = _("Activate users")
def resend_activation_email(self, request, queryset):
"""
Re-sends activation emails for the selected users.
Note that this will *only* send activation emails for users
who are eligible to activate; emails will not be sent to users
whose activation keys have expired or who have already
activated.
"""
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(request)
for profile in queryset:
if not profile.activation_key_expired():
profile.send_activation_email(site)
resend_activation_email.short_description = _("Re-send activation emails")
admin.site.register(RegistrationProfile, RegistrationAdmin)
| agpl-3.0 | Python |
ae2e8cc85b6b4d2202e42d704dde8757ffd31da6 | Make make_rel_symlink.py use python3. | stb-tester/lirc,stb-tester/lirc,stb-tester/lirc,stb-tester/lirc,stb-tester/lirc,stb-tester/lirc | tools/make_rel_symlink.py | tools/make_rel_symlink.py | #!/usr/bin/env python3
import os
import os.path
import sys
import pdb
import shutil
def relative_ln_s( from_, to_ ):
"""
This is just so dirty & boring: create a relative symlink, making the
to_ path relative to from_. No errorchecks. Both arguments must be
files, a destination directory doesn't work (I think). An existing
file in to_ will be removed.
"""
prefix = os.path.commonprefix( [ to_, from_ ] )
if prefix == '':
prefix = '/'
source = from_.split( prefix )[ 1 ]
dest = to_.split( prefix )[ 1 ]
level = len( dest.split( '/' ) ) - 1
path = ( '../' * level ) + source
return path
USAGE = 'Usage: make_rel_symlink [-p] <sourcefile> <destfile>'
just_print = False;
if sys.argv[1] == "-p":
just_print = True;
sys.argv = sys.argv[ 1:]
if len( sys.argv ) != 3:
print(USAGE)
sys.exit( 1 )
if os.path.isdir( sys.argv[2] ):
print("Removing link target dir:" + sys.argv[2])
shutil.rmtree( sys.argv[2])
link_path = relative_ln_s( sys.argv[1], sys.argv[2] )
if just_print:
print(link_path)
else:
os.chdir( os.path.dirname( sys.argv[2]))
target = os.path.basename( sys.argv[2])
if os.path.exists( target ):
os.unlink( target)
os.symlink( link_path, target)
| #!/usr/bin/env python
import os
import os.path
import sys
import pdb
import shutil
def relative_ln_s( from_, to_ ):
"""
This is just so dirty & boring: create a relative symlink, making the
to_ path relative to from_. No errorchecks. Both arguments must be
files, a destination directory doesn't work (I think). An existing
file in to_ will be removed.
"""
prefix = os.path.commonprefix( [ to_, from_ ] )
if prefix == '':
prefix = '/'
source = from_.split( prefix )[ 1 ]
dest = to_.split( prefix )[ 1 ]
level = len( dest.split( '/' ) ) - 1
path = ( '../' * level ) + source
return path
USAGE = 'Usage: make_rel_symlink [-p] <sourcefile> <destfile>'
just_print = False;
if sys.argv[1] == "-p":
just_print = True;
sys.argv = sys.argv[ 1:]
if len( sys.argv ) != 3:
print USAGE
sys.exit( 1 )
if os.path.isdir( sys.argv[2] ):
print "Removing link target dir:" + sys.argv[2]
shutil.rmtree( sys.argv[2])
link_path = relative_ln_s( sys.argv[1], sys.argv[2] )
if just_print:
print link_path
else:
os.chdir( os.path.dirname( sys.argv[2]))
target = os.path.basename( sys.argv[2])
if os.path.exists( target ):
os.unlink( target)
os.symlink( link_path, target)
| mit | Python |
e4879d5cb5cf266ac8f755c947bb693ed7bebc05 | print hello | Giangblackk/python-togeojson | togeojson.py | togeojson.py | # simple create copycat functions from togeojson.js
# generate a short, numeric hash of a string
def okhash(x):
print('hello')
| # simple create copycat functions from togeojson.js
# generate a short, numeric hash of a string
def okhash(x):
| bsd-2-clause | Python |
aa8234d1e6b4916e6945468a2bc5772df2d53e28 | Add Discord Admin for debugging. | ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server | bot/admin.py | bot/admin.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from . import models
@admin.register(models.Notification)
class NotificationAdmin(admin.ModelAdmin):
list_display = ('launch', 'last_net_stamp', 'last_twitter_post', 'last_notification_sent',
'last_notification_recipient_count', 'days_to_launch')
readonly_fields = ('days_to_launch',)
ordering = ('launch__net',)
search_fields = ('launch__name',)
@admin.register(models.DailyDigestRecord)
class DailyDigestRecordAdmin(admin.ModelAdmin):
list_display = ('id', 'timestamp', 'messages', 'count', 'data')
@admin.register(models.DiscordChannel)
class DiscordBotAdmin(admin.ModelAdmin):
list_display = ('name', 'channel_id', 'server_id') | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from . import models
@admin.register(models.Notification)
class NotificationAdmin(admin.ModelAdmin):
list_display = ('launch', 'last_net_stamp', 'last_twitter_post', 'last_notification_sent',
'last_notification_recipient_count', 'days_to_launch')
readonly_fields = ('days_to_launch',)
ordering = ('launch__net',)
search_fields = ('launch__name',)
@admin.register(models.DailyDigestRecord)
class DailyDigestRecordAdmin(admin.ModelAdmin):
list_display = ('id', 'timestamp', 'messages', 'count', 'data') | apache-2.0 | Python |
716db272743944f8adc18f6af10282c91030a7ac | Fix spelling and test | python-dirbtuves/akl.lt,python-dirbtuves/akl.lt,python-dirbtuves/akl.lt,python-dirbtuves/akl.lt,python-dirbtuves/akl.lt | src/akllt/tests/test_news_import.py | src/akllt/tests/test_news_import.py | import datetime
import unittest
import pkg_resources
import django.test
from django.core.management import call_command
from wagtail.wagtailcore.models import Page
from akllt.dataimport.news import import_news
from akllt.models import NewsStory
def shorten_values(item):
shortened = {}
for key, value in item.items():
if isinstance(value, basestring) and len(value) > 24:
shortened[key] = '%s...' % value[:24]
else:
shortened[key] = value
return shortened
class NewsExportReadTests(unittest.TestCase):
def test_iter_news(self):
news_folder = pkg_resources.resource_filename(
'akllt', 'tests/fixtures/naujienos'
)
news = import_news(news_folder)
self.assertEqual(list(map(shorten_values, news)), [
{
'date': datetime.date(2002, 10, 15),
'title': 'Konkursas',
'blurb': '<p>Vilniuje, dvi dienas ...',
'body': '<p>Vilniuje, dvi dienas ...',
},
])
class NewsImportTests(django.test.TestCase):
def test_create_news(self):
self.assertEqual(NewsStory.objects.count(), 0)
root = Page.add_root(title='Root page')
news_folder = pkg_resources.resource_filename(
'akllt', 'tests/fixtures/naujienos'
)
news = import_news(news_folder)
for news_story in news:
root.add_child(instance=NewsStory(
title=news_story['title'],
date=news_story['date'],
blurb=news_story['blurb'],
body=news_story['body'],
))
class NewsImportCommandTests(django.test.TestCase):
def test_command(self):
self.assertEqual(NewsStory.objects.count(), 0)
call_command(
'akllt_importnews',
pkg_resources.resource_filename(
'akllt',
'tests/fixtures/naujienos'
)
)
self.assertEqual(NewsStory.objects.count(), 1)
| import datetime
import unittest
import pkg_resources
import django.test
from django.core.management import call_command
from wagtail.wagtailcore.models import Page
from akllt.dataimport.news import import_news
from akllt.models import NewsStory
def shorten_values(item):
shortened = {}
for key, value in item.items():
if isinstance(value, basestring) and len(value) > 24:
shortened[key] = '%s...' % value[:24]
else:
shortened[key] = value
return shortened
class NewsExportReadTests(unittest.TestCase):
def test_iter_news(self):
news_folder = pkg_resources.resource_filename(
'akllt', 'tests/fixtures/naujienos'
)
news = import_news(news_folder)
self.assertEqual(list(map(shorten_values, news)), [
{
'date': datetime.date(2002, 10, 15),
'title': 'Konkursas',
'blurb': '<p>Vilniuje, dvi dienas ...',
'body': '<p>Vilniuje, dvi dienas ...',
},
])
class NewsImportTests(django.test.TestCase):
def test_create_news(self):
self.assertEqual(NewsStory.objects.count(), 0)
root = Page.add_root(title='Root page')
news_folder = pkg_resources.resource_filename(
'akllt', 'tests/fixtures/naujienos'
)
news = import_news(news_folder)
for news_story in news:
root.add_child(instance=NewsStory(
title=news_story['title'],
date=news_story['date'],
blurb=news_story['blurb'],
body=news_story['body'],
))
class NewsImportCommandTests(django.test.TestCase):
def test_command(self):
self.assertEqual(NewsStory.objects.count(), 0)
call_command(
'akllt_importnews',
pkg_resources.resource_filename(
'akllt',
'test/fixtures/naujienos'
)
)
self.assertEqual(NewsStory.objects.count(), 2)
| agpl-3.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.