commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
ccd3a50d5518d6fe4d45f31360ebf2c7849af62c | Add OCA as author of OCA addons | Domatix/l10n-spain,factorlibre/l10n-spain,factorlibre/l10n-spain,factorlibre/l10n-spain | l10n_es_account_asset/__openerp__.py | l10n_es_account_asset/__openerp__.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Gestión de activos fijos para España",
"version": "1.0",
"depends": ["account_asset"],
"author": "Serv. Tecnol. Avanzados - Pedro M. Baeza,Odoo Community Association (OCA)",
"description": """
Gestión de activos fijos española
=================================
Cambia la gestión estándar de activos fijos de OpenERP para acomodarla a las
regulaciones españolas:
* Cambia el método de cálculo para el prorrateo temporal.
* Añade un nuevo método de cálculo para porcentaje fijo por periodo.
* Añade la opción de trasladar la depreciación al final del periodo.
""",
"website": "http://www.serviciosbaeza.com",
"category": "Accounting & Finance",
"data": [
"account_asset_view.xml",
],
"active": False,
"installable": True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Gestión de activos fijos para España",
"version": "1.0",
"depends": ["account_asset"],
"author": "Serv. Tecnol. Avanzados - Pedro M. Baeza",
"description": """
Gestión de activos fijos española
=================================
Cambia la gestión estándar de activos fijos de OpenERP para acomodarla a las
regulaciones españolas:
* Cambia el método de cálculo para el prorrateo temporal.
* Añade un nuevo método de cálculo para porcentaje fijo por periodo.
* Añade la opción de trasladar la depreciación al final del periodo.
""",
"website": "http://www.serviciosbaeza.com",
"category": "Accounting & Finance",
"data": [
"account_asset_view.xml",
],
"active": False,
"installable": True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
edb52dcbd58e4f31dff26a6e60ae4d670619121e | correct settings for SMTP | altai/focus,altai/focus,altai/focus | C4GD_web/default_settings.py | C4GD_web/default_settings.py | # coding=utf-8
RELATIVE_TO_API_HOURS_SHIFT = 0 # our system has 13, keystone db 14 => 1
SECRET_KEY = 'g.U(\x8cQ\xbc\xdb\\\xc3\x9a\xb2\xb6,\xec\xad(\xf8"2*\xef\x0bd'
NEXT_TO_LOGIN_ARG = 'next' # GET/POST field name to store next after login URL
DEFAULT_NEXT_TO_LOGIN_VIEW = 'dashboard' # no next? redirect to this view
DEFAULT_NEXT_TO_LOGOUT_VIEW = 'dashboard'
LDAP_URI = 'ldap://ns/'
LDAP_BASEDN = 'ou=people,ou=griddynamics,dc=griddynamics,dc=net'
RO_DB_HOST = ''
RO_DB_PORT = 3306 # must be integer
RO_DB_USER = ''
RO_DB_PASS = ''
RO_DB_NAME = ''
RW_DB_HOST = ''
RW_DB_PORT = 3306 # must be integer
RW_DB_USER = ''
RW_DB_PASS = ''
RW_DB_NAME = ''
KEYSTONE_URL = 'http://172.18.41.1:5000/v2.0'
BILLING_URL = 'http://172.30.0.3:8787/v1'
DEV = False
MAIL_SERVER = 'mail.vm.griddynamics.net'
MAIL_PORT = 25
MAIL_USE_SSL = False
MAIL_DEBUG = True
MAIL_USERNAME = 'c4gd-focus-robot@griddynamics.com'
MAIL_PASSWORD = None
DEFAULT_MAIL_SENDER = 'DoNotReply'
| # coding=utf-8
RELATIVE_TO_API_HOURS_SHIFT = 0 # our system has 13, keystone db 14 => 1
SECRET_KEY = 'g.U(\x8cQ\xbc\xdb\\\xc3\x9a\xb2\xb6,\xec\xad(\xf8"2*\xef\x0bd'
NEXT_TO_LOGIN_ARG = 'next' # GET/POST field name to store next after login URL
DEFAULT_NEXT_TO_LOGIN_VIEW = 'dashboard' # no next? redirect to this view
DEFAULT_NEXT_TO_LOGOUT_VIEW = 'dashboard'
LDAP_URI = 'ldap://ns/'
LDAP_BASEDN = 'ou=people,ou=griddynamics,dc=griddynamics,dc=net'
RO_DB_HOST = ''
RO_DB_PORT = 3306 # must be integer
RO_DB_USER = ''
RO_DB_PASS = ''
RO_DB_NAME = ''
RW_DB_HOST = ''
RW_DB_PORT = 3306 # must be integer
RW_DB_USER = ''
RW_DB_PASS = ''
RW_DB_NAME = ''
KEYSTONE_URL = 'http://172.18.41.1:5000/v2.0'
BILLING_URL = 'http://172.30.0.3:8787/v1'
DEV = False
MAIL_SERVER = 'mail.vm.griddynamics.net'
MAIL_PORT = 25
MAIL_USE_SSL = False
MAIL_DEBUG = True
MAIL_USERNAME = 'c4gd-focus-robot@griddynamics.com'
MAIL_PASSWORD = None
DEFAULT_MAIL_SENDER = 'Do Not Reply'
| lgpl-2.1 | Python |
68a49aa5ad009afd5e58b88f414be898d02b2b30 | add solution for permutation sequence | SwordYoung/cutprob,SwordYoung/cutprob | leetcode/permutation-sequence/sol.py | leetcode/permutation-sequence/sol.py | #!/usr/bin/env python
class Solution:
# @return a string
def getPermutation(self, n, k):
p = {0:1, 1:1}
for i in range(2,n+1):
p[i]=p[i-1]*i
sk = []
for i in range(n):
sk.append(True)
kl = k-1
res = ""
for i in range(n,0,-1):
l = (kl/p[i-1])
kl = kl%p[i-1]
j = 0
ji = 0
while j < l+1:
if sk[ji]:
j += 1
ji += 1
ji -= 1
assert sk[ji]
sk[ji] = False
res += "%d" % (ji+1)
return res
def test(n, k):
sol = Solution()
print "input: %d %d" % (n, k)
print "output: %s" % (sol.getPermutation(n, k))
if __name__ == "__main__":
test(3, 4)
test(4, 8)
| #!/usr/bin/env python
class Solution:
# @return a string
def getPermutation(self, n, k):
p = {0:1, 1:1}
for i in range(2,n+1):
p[i]=p[i-1]*i
sk = []
for i in range(n):
sk.append(True)
kl = k-1
res = []
for i in range(n,0,-1):
l = (kl/p[i-1])
kl = kl%p[i-1]
j = 0
ji = 0
while j < l+1:
if sk[ji]:
j += 1
ji += 1
ji -= 1
assert sk[ji]
sk[ji] = False
res.append(ji+1)
return res
def test(n, k):
sol = Solution()
print "input: %d %d" % (n, k)
print "output: %s" % (sol.getPermutation(n, k))
if __name__ == "__main__":
test(3, 4)
test(4, 8)
| artistic-2.0 | Python |
90abcfc316bcd3b998ea3d120adbe49a1b89d9ec | Update tests_globals.py | RonsenbergVI/trendpy,RonsenbergVI/trendpy | trendpy/tests/tests_globals.py | trendpy/tests/tests_globals.py | # -*- coding: utf-8 -*-
# tests_globals.py
# MIT License
# Copyright (c) 2017 Rene Jean Corneille
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import sys
import inspect
import unittest
from numpy.random import randint
from numpy import inf
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(os.path.dirname(current_dir))
sys.path.insert(0,parent_dir)
import trendpy.globals
class TestGlobals(unittest.TestCase):
def setUp(self):
self.order = int(randint(low=0,high=4,size=1))
self.dim = int(randint(low=self.order+2,high=2000,size=1))
self.D = trendpy.globals.derivative_matrix(self.dim,self.order)
def tearDown(self):
self.dim = None
self.order = None
self.D = None
def test_derivative_matrix_size(self):
self.assertEqual(self.D.shape,(self.dim-self.order,self.dim))
if __name__ == "__main__":
unittest.main()
| # -*- coding: utf-8 -*-
# tests_globals.py
# MIT License
# Copyright (c) 2017 Rene Jean Corneille
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import sys
import inspect
import unittest
from numpy.random import randint
from numpy import inf
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
sys.path.insert(0,parentdir)
import trendpy.globals
class TestGlobals(unittest.TestCase):
def setUp(self):
self.order = int(randint(low=0,high=4,size=1))
self.dim = int(randint(low=self.order+2,high=2000,size=1))
self.D = trendpy.globals.derivative_matrix(self.dim,self.order)
def tearDown(self):
self.dim = None
self.order = None
self.D = None
def test_derivative_matrix_size(self):
self.assertEqual(self.D.shape,(self.dim-self.order,self.dim))
if __name__ == "__main__":
unittest.main()
| mit | Python |
ae78bd758c690e28abaae2c07e8a3890e76044e0 | Allow papers/maxout to be tested without MNIST data | KennethPierce/pylearnk,KennethPierce/pylearnk,Refefer/pylearn2,JesseLivezey/plankton,goodfeli/pylearn2,theoryno3/pylearn2,alexjc/pylearn2,pkainz/pylearn2,fulmicoton/pylearn2,alexjc/pylearn2,alexjc/pylearn2,kastnerkyle/pylearn2,ddboline/pylearn2,se4u/pylearn2,hantek/pylearn2,jeremyfix/pylearn2,nouiz/pylearn2,abergeron/pylearn2,sandeepkbhat/pylearn2,chrish42/pylearn,msingh172/pylearn2,caidongyun/pylearn2,fulmicoton/pylearn2,Refefer/pylearn2,skearnes/pylearn2,sandeepkbhat/pylearn2,mclaughlin6464/pylearn2,hantek/pylearn2,sandeepkbhat/pylearn2,fyffyt/pylearn2,bartvm/pylearn2,fulmicoton/pylearn2,jamessergeant/pylearn2,se4u/pylearn2,mkraemer67/pylearn2,TNick/pylearn2,junbochen/pylearn2,sandeepkbhat/pylearn2,hyqneuron/pylearn2-maxsom,woozzu/pylearn2,CIFASIS/pylearn2,pombredanne/pylearn2,fishcorn/pylearn2,JesseLivezey/pylearn2,ashhher3/pylearn2,lunyang/pylearn2,mkraemer67/pylearn2,lisa-lab/pylearn2,pombredanne/pylearn2,daemonmaker/pylearn2,ddboline/pylearn2,junbochen/pylearn2,JesseLivezey/plankton,hantek/pylearn2,msingh172/pylearn2,ashhher3/pylearn2,ddboline/pylearn2,matrogers/pylearn2,abergeron/pylearn2,ashhher3/pylearn2,shiquanwang/pylearn2,goodfeli/pylearn2,pombredanne/pylearn2,nouiz/pylearn2,nouiz/pylearn2,matrogers/pylearn2,cosmoharrigan/pylearn2,kastnerkyle/pylearn2,shiquanwang/pylearn2,kose-y/pylearn2,aalmah/pylearn2,abergeron/pylearn2,junbochen/pylearn2,w1kke/pylearn2,fyffyt/pylearn2,daemonmaker/pylearn2,fyffyt/pylearn2,skearnes/pylearn2,JesseLivezey/plankton,mclaughlin6464/pylearn2,nouiz/pylearn2,hantek/pylearn2,se4u/pylearn2,lamblin/pylearn2,ddboline/pylearn2,lisa-lab/pylearn2,kastnerkyle/pylearn2,aalmah/pylearn2,lamblin/pylearn2,theoryno3/pylearn2,JesseLivezey/pylearn2,TNick/pylearn2,bartvm/pylearn2,shiquanwang/pylearn2,JesseLivezey/plankton,shiquanwang/pylearn2,chrish42/pylearn,CIFASIS/pylearn2,chrish42/pylearn,mclaughlin6464/pylearn2,jamessergeant/pylearn2,lancezlin/pylearn2,lisa-lab/pylearn2,jamessergeant/pylearn2,lancezlin/pylearn2,matrogers/pylearn2,mkraemer67/pylearn2,theoryno3/pylearn2,junbochen/pylearn2,daemonmaker/pylearn2,pkainz/pylearn2,theoryno3/pylearn2,CIFASIS/pylearn2,cosmoharrigan/pylearn2,jeremyfix/pylearn2,caidongyun/pylearn2,bartvm/pylearn2,se4u/pylearn2,kose-y/pylearn2,msingh172/pylearn2,KennethPierce/pylearnk,hyqneuron/pylearn2-maxsom,hyqneuron/pylearn2-maxsom,fishcorn/pylearn2,KennethPierce/pylearnk,fyffyt/pylearn2,kose-y/pylearn2,jamessergeant/pylearn2,lancezlin/pylearn2,caidongyun/pylearn2,lamblin/pylearn2,cosmoharrigan/pylearn2,skearnes/pylearn2,goodfeli/pylearn2,matrogers/pylearn2,fishcorn/pylearn2,fulmicoton/pylearn2,w1kke/pylearn2,caidongyun/pylearn2,CIFASIS/pylearn2,msingh172/pylearn2,pkainz/pylearn2,Refefer/pylearn2,aalmah/pylearn2,chrish42/pylearn,pkainz/pylearn2,bartvm/pylearn2,woozzu/pylearn2,jeremyfix/pylearn2,ashhher3/pylearn2,pombredanne/pylearn2,jeremyfix/pylearn2,Refefer/pylearn2,lunyang/pylearn2,lisa-lab/pylearn2,JesseLivezey/pylearn2,skearnes/pylearn2,lunyang/pylearn2,TNick/pylearn2,goodfeli/pylearn2,kose-y/pylearn2,woozzu/pylearn2,lancezlin/pylearn2,fishcorn/pylearn2,aalmah/pylearn2,lamblin/pylearn2,daemonmaker/pylearn2,kastnerkyle/pylearn2,JesseLivezey/pylearn2,mclaughlin6464/pylearn2,hyqneuron/pylearn2-maxsom,lunyang/pylearn2,woozzu/pylearn2,abergeron/pylearn2,w1kke/pylearn2,cosmoharrigan/pylearn2,alexjc/pylearn2,TNick/pylearn2,mkraemer67/pylearn2,w1kke/pylearn2 | pylearn2/scripts/papers/maxout/tests/test_mnist.py | pylearn2/scripts/papers/maxout/tests/test_mnist.py | import os
import numpy as np
import pylearn2
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
from pylearn2.termination_criteria import EpochCounter
from pylearn2.utils.serial import load_train_file
def test_mnist():
"""
Test the mnist.yaml file from the dropout
paper on random input
"""
train = load_train_file(os.path.join(pylearn2.__path__[0],
"scripts/papers/maxout/mnist.yaml"))
random_X = np.random.rand(10, 784)
random_y = np.random.randint(0, 10, (10, 1))
train.dataset = DenseDesignMatrix(X=random_X, y=random_y, y_labels=10)
train.algorithm.termination_criterion = EpochCounter(max_epochs=1)
train.algorithm._set_monitoring_dataset(train.dataset)
train.main_loop()
| import os
import numpy as np
import pylearn2
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
from pylearn2.termination_criteria import EpochCounter
from pylearn2.utils.serial import load_train_file
def test_mnist():
"""
Test the mnist.yaml file from the dropout
paper on random input
"""
train = load_train_file(os.path.join(pylearn2.__path__[0],
"scripts/papers/maxout/mnist.yaml"))
random_X = np.random.rand(10, 784)
random_y = np.random.randint(0, 10, (10, 1))
train.dataset = DenseDesignMatrix(X=random_X, y=random_y, y_labels=10)
train.algorithm.termination_criterion = EpochCounter(max_epochs=1)
train.main_loop()
| bsd-3-clause | Python |
fca5e3d1f1a2c9a2378c4aff7b3d14c37b4de6af | Fix update_api to work with 1.8 | SteveViss/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,tddv/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,SteveViss/readthedocs.org,davidfischer/readthedocs.org,tddv/readthedocs.org,stevepiercy/readthedocs.org,stevepiercy/readthedocs.org,safwanrahman/readthedocs.org,pombredanne/readthedocs.org,wijerasa/readthedocs.org,clarkperkins/readthedocs.org,espdev/readthedocs.org,clarkperkins/readthedocs.org,stevepiercy/readthedocs.org,rtfd/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,techtonik/readthedocs.org,rtfd/readthedocs.org,istresearch/readthedocs.org,espdev/readthedocs.org,pombredanne/readthedocs.org,clarkperkins/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,techtonik/readthedocs.org,espdev/readthedocs.org,wijerasa/readthedocs.org,davidfischer/readthedocs.org,wijerasa/readthedocs.org,istresearch/readthedocs.org,techtonik/readthedocs.org,rtfd/readthedocs.org,espdev/readthedocs.org,stevepiercy/readthedocs.org,wijerasa/readthedocs.org,safwanrahman/readthedocs.org,tddv/readthedocs.org,pombredanne/readthedocs.org | readthedocs/core/management/commands/update_api.py | readthedocs/core/management/commands/update_api.py | import logging
from optparse import make_option
from django.core.management.base import BaseCommand
from readthedocs.projects import tasks
from readthedocs.api.client import api
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""Custom management command to rebuild documentation for all projects on
the site. Invoked via ``./manage.py update_repos``.
"""
def add_arguments(self, parser):
parser.add_argument('--docker', action='store_true', default=False)
parser.add_argument('projects', nargs='+', type=str)
def handle(self, *args, **options):
docker = options.get('docker', False)
for slug in options['projects']:
project_data = api.project(slug).get()
p = tasks.make_api_project(project_data)
log.info("Building %s" % p)
tasks.update_docs.run(pk=p.pk, docker=docker)
| import logging
from optparse import make_option
from django.core.management.base import BaseCommand
from readthedocs.projects import tasks
from readthedocs.api.client import api
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""Custom management command to rebuild documentation for all projects on
the site. Invoked via ``./manage.py update_repos``.
"""
def add_arguments(self, parser):
parser.add_argument('--docker', action='store_true', default=False)
def handle(self, *args, **options):
docker = options.get('docker', False)
if len(args):
for slug in args:
project_data = api.project(slug).get()
p = tasks.make_api_project(project_data)
log.info("Building %s" % p)
tasks.update_docs.run(pk=p.pk, docker=docker)
| mit | Python |
f51105e048d8f07ae5a1409b271246ae508c052a | Fix the iteration | crossbario/autobahn-testsuite,tavendo/AutobahnTestSuite,crossbario/autobahn-testsuite,mikelikespie/AutobahnTestSuite,mikelikespie/AutobahnTestSuite,mkauf/AutobahnTestSuite,mogui/AutobahnTestSuite,jgelens/AutobahnTestSuite,tavendo/AutobahnTestSuite,crossbario/autobahn-testsuite,crossbario/autobahn-testsuite,Brother-Simon/AutobahnTestSuite,normanmaurer/AutobahnTestSuite,tavendo/AutobahnTestSuite,mkauf/AutobahnTestSuite,mogui/AutobahnTestSuite,Brother-Simon/AutobahnTestSuite,normanmaurer/AutobahnTestSuite,jgelens/AutobahnTestSuite,Brother-Simon/AutobahnTestSuite,mkauf/AutobahnTestSuite | lib/python/autobahn/case/case5_16.py | lib/python/autobahn/case/case5_16.py | ###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from case import Case
class Case5_16(Case):
DESCRIPTION = """Repeated 2x: Continuation Frame with FIN = false (where there is nothing to continue), then text Message fragmented into 2 fragments."""
EXPECTATION = """The connection is failed immediately, since there is no message to continue."""
def onOpen(self):
self.expected[Case.OK] = []
self.expectedClose = {"closedByMe":False,"closeCode":[self.p.CLOSE_STATUS_CODE_PROTOCOL_ERROR],"requireClean":False}
for i in xrange(0, 2):
self.p.sendFrame(opcode = 0, fin = False, payload = "fragment1")
self.p.sendFrame(opcode = 1, fin = False, payload = "fragment2")
self.p.sendFrame(opcode = 0, fin = True, payload = "fragment3")
self.p.killAfter(1)
| ###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from case import Case
class Case5_16(Case):
DESCRIPTION = """Repeated 2x: Continuation Frame with FIN = false (where there is nothing to continue), then text Message fragmented into 2 fragments."""
EXPECTATION = """The connection is failed immediately, since there is no message to continue."""
def onOpen(self):
self.expected[Case.OK] = []
self.expectedClose = {"closedByMe":False,"closeCode":[self.p.CLOSE_STATUS_CODE_PROTOCOL_ERROR],"requireClean":False}
for i in range(1, 2):
self.p.sendFrame(opcode = 0, fin = False, payload = "fragment1")
self.p.sendFrame(opcode = 1, fin = False, payload = "fragment2")
self.p.sendFrame(opcode = 0, fin = True, payload = "fragment3")
self.p.killAfter(1)
| apache-2.0 | Python |
05ecc1f721d4683c2412e28a82eabe607c3a9129 | allow additional args for installing package | yejianye/fabtask | fabtask/packages.py | fabtask/packages.py | from fabric.api import env, sudo, run
from fabtask.utils import program_exists, is_linux, is_macos
def find_package_management_program():
if env.get('install_package_command'):
return
if is_linux():
if program_exists('apt-get'):
env.install_package_command = 'sudo apt-get install -y'
elif program_exists('yum'):
env.install_package_command = 'sudo yum -y install'
elif is_macos():
ensure_homebrew()
env.install_package_command = 'brew'
def ensure_homebrew():
if not program_exists('brew'):
run('/usr/bin/ruby -e "$(/usr/bin/curl -fksSL https://raw.github.com/mxcl/homebrew/master/Library/Contributions/install_homebrew.rb)"')
def ensure_package(name, install_args=""):
find_package_management_program()
cmd = '%s %s' % (env.install_package_command, name)
if install_args:
cmd += ' ' + install_args
run(cmd)
def ensure_python_pkg(name):
if not program_exists('pip'):
if program_exists('easy_install'):
sudo('easy_install pip')
else:
ensure_package('python-pip')
sudo('pip install %s' % name)
| from fabric.api import env, sudo, run
from fabtask.utils import program_exists, is_linux, is_macos
def find_package_management_program():
if env.get('install_package_command'):
return
if is_linux():
if program_exists('apt-get'):
env.install_package_command = 'sudo apt-get install -y'
elif program_exists('yum'):
env.install_package_command = 'sudo yum -y install'
elif is_macos():
ensure_homebrew()
env.install_package_command = 'brew'
def ensure_homebrew():
if not program_exists('brew'):
run('/usr/bin/ruby -e "$(/usr/bin/curl -fksSL https://raw.github.com/mxcl/homebrew/master/Library/Contributions/install_homebrew.rb)"')
def ensure_package(name):
find_package_management_program()
run('%s %s' % (env.install_package_command, name))
def ensure_python_pkg(name):
if not program_exists('pip'):
if program_exists('easy_install'):
sudo('easy_install pip')
else:
ensure_package('python-pip')
sudo('pip install %s' % name)
| mit | Python |
1670b9ae583ae151ea6aeb2ac09c468cfc30f266 | fix homebrew issues | yejianye/fabtask | fabtask/packages.py | fabtask/packages.py | from fabric.api import env, sudo, run, settings
from fabtask.utils import program_exists, is_linux, is_macos
class PackageError(Exception):
pass
def find_package_management_program():
if env.get('install_package_command'):
return
if is_linux():
if program_exists('apt-get'):
env.install_package_command = 'sudo apt-get install -y'
elif program_exists('yum'):
env.install_package_command = 'sudo yum -y install'
elif is_macos():
ensure_homebrew()
env.install_package_command = 'brew install'
def ensure_homebrew():
if not program_exists('brew'):
run('/usr/bin/ruby -e "$(/usr/bin/curl -fksSL https://raw.github.com/mxcl/homebrew/master/Library/Contributions/install_homebrew.rb)"')
def ensure_package(name, install_args=""):
find_package_management_program()
cmd = '%s %s' % (env.install_package_command, name)
if install_args:
cmd += ' ' + install_args
with settings(warn_only=True):
result = run(cmd)
if not result.succeeded and not 'already installed' in result:
raise PackageError()
def ensure_python_pkg(name):
if not program_exists('pip'):
if program_exists('easy_install'):
sudo('easy_install pip')
else:
ensure_package('python-pip')
sudo('pip install %s' % name)
| from fabric.api import env, sudo, run
from fabtask.utils import program_exists, is_linux, is_macos
def find_package_management_program():
if env.get('install_package_command'):
return
if is_linux():
if program_exists('apt-get'):
env.install_package_command = 'sudo apt-get install -y'
elif program_exists('yum'):
env.install_package_command = 'sudo yum -y install'
elif is_macos():
ensure_homebrew()
env.install_package_command = 'brew'
def ensure_homebrew():
if not program_exists('brew'):
run('/usr/bin/ruby -e "$(/usr/bin/curl -fksSL https://raw.github.com/mxcl/homebrew/master/Library/Contributions/install_homebrew.rb)"')
def ensure_package(name, install_args=""):
find_package_management_program()
cmd = '%s %s' % (env.install_package_command, name)
if install_args:
cmd += ' ' + install_args
run(cmd)
def ensure_python_pkg(name):
if not program_exists('pip'):
if program_exists('easy_install'):
sudo('easy_install pip')
else:
ensure_package('python-pip')
sudo('pip install %s' % name)
| mit | Python |
b85307aed2a2f909674734a5f5b84e353701eaaf | update version to 0.7.7 | sahlinet/fastapp,sahlinet/fastapp,sahlinet/fastapp,sahlinet/fastapp | fastapp/__init__.py | fastapp/__init__.py | __version__ = "0.7.7"
import os
from django.core.exceptions import ImproperlyConfigured
# load plugins
from django.conf import settings
try:
plugins_config = getattr(settings, "FASTAPP_PLUGINS_CONFIG", {})
plugins = plugins_config.keys()
plugins = plugins + getattr(settings, "FASTAPP_PLUGINS", [])
for plugin in list(set(plugins)):
def my_import(name):
# from http://effbot.org/zone/import-string.htm
m = __import__(name)
for n in name.split(".")[1:]:
m = getattr(m, n)
return m
amod = my_import(plugin)
except ImproperlyConfigured, e:
print e
| __version__ = "0.7.6"
import os
from django.core.exceptions import ImproperlyConfigured
# load plugins
from django.conf import settings
try:
plugins_config = getattr(settings, "FASTAPP_PLUGINS_CONFIG", {})
plugins = plugins_config.keys()
plugins = plugins + getattr(settings, "FASTAPP_PLUGINS", [])
for plugin in list(set(plugins)):
def my_import(name):
# from http://effbot.org/zone/import-string.htm
m = __import__(name)
for n in name.split(".")[1:]:
m = getattr(m, n)
return m
amod = my_import(plugin)
except ImproperlyConfigured, e:
print e
| mit | Python |
67ea1674184bd71a88019575a2cc61388b6ac26d | Move an import statement | City-of-Helsinki/django-helusers,City-of-Helsinki/django-helusers | helusers/management/commands/sync_helusers.py | helusers/management/commands/sync_helusers.py | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand
from allauth.socialaccount.models import SocialApp
from helusers.providers.helsinki.provider import HelsinkiProvider
class Command(BaseCommand):
help = 'Create or update helusers allauth SocialApp'
def handle(self, *args, **options):
changed = False
try:
app = SocialApp.objects.get(provider=HelsinkiProvider.id)
except SocialApp.DoesNotExist:
app = SocialApp(provider=HelsinkiProvider.id)
self.stdout.write(self.style.SUCCESS('Creating new SocialApp'))
if not app.name:
app.name = 'Helsingin kaupungin työntekijät'
changed = True
client_id = secret_key = None
jwt_settings = getattr(settings, 'JWT_AUTH')
if jwt_settings:
client_id = jwt_settings.get('JWT_AUDIENCE')
secret_key = jwt_settings.get('JWT_SECRET_KEY')
if not client_id:
raise ImproperlyConfigured("You must set JWT_AUTH['JWT_AUDIENCE'] to correspond to your client ID")
if not secret_key:
raise ImproperlyConfigured("You must set JWT_AUTH['JWT_SECRET_KEY'] to correspond to your secret key")
if app.client_id != client_id:
changed = True
app.client_id = client_id
if app.secret != secret_key:
changed = True
app.secret = secret_key
if changed:
app.save()
if not app.sites.exists():
from django.contrib.sites.models import Site
app.sites.add(Site.objects.get(id=settings.SITE_ID))
changed = True
if changed:
self.stdout.write(self.style.SUCCESS('SocialApp successfully updated'))
else:
self.stdout.write(self.style.NOTICE('Already synced -- no changes needed'))
| from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from allauth.socialaccount.models import SocialApp
from helusers.providers.helsinki.provider import HelsinkiProvider
class Command(BaseCommand):
help = 'Create or update helusers allauth SocialApp'
def handle(self, *args, **options):
changed = False
try:
app = SocialApp.objects.get(provider=HelsinkiProvider.id)
except SocialApp.DoesNotExist:
app = SocialApp(provider=HelsinkiProvider.id)
self.stdout.write(self.style.SUCCESS('Creating new SocialApp'))
if not app.name:
app.name = 'Helsingin kaupungin työntekijät'
changed = True
client_id = secret_key = None
jwt_settings = getattr(settings, 'JWT_AUTH')
if jwt_settings:
client_id = jwt_settings.get('JWT_AUDIENCE')
secret_key = jwt_settings.get('JWT_SECRET_KEY')
if not client_id:
raise ImproperlyConfigured("You must set JWT_AUTH['JWT_AUDIENCE'] to correspond to your client ID")
if not secret_key:
raise ImproperlyConfigured("You must set JWT_AUTH['JWT_SECRET_KEY'] to correspond to your secret key")
if app.client_id != client_id:
changed = True
app.client_id = client_id
if app.secret != secret_key:
changed = True
app.secret = secret_key
if changed:
app.save()
if not app.sites.exists():
app.sites.add(Site.objects.get(id=settings.SITE_ID))
changed = True
if changed:
self.stdout.write(self.style.SUCCESS('SocialApp successfully updated'))
else:
self.stdout.write(self.style.NOTICE('Already synced -- no changes needed'))
| bsd-2-clause | Python |
45256880a063e99ea40944044b2acfabf4fc7af1 | change media list key to 'media_list' | Mobii/twilio-python,YeelerG/twilio-python,johannakate/twilio-python,Rosy-S/twilio-python,twilio/twilio-python,tysonholub/twilio-python,supermanheng21/twilio-python,bcorwin/twilio-python | twilio/rest/resources/media.py | twilio/rest/resources/media.py | from twilio.rest.resources import InstanceResource, ListResource
from twilio.rest.resources.util import normalize_dates, parse_date
class Media(InstanceResource):
""" Represents media associated with a :class:`Message`.
.. attribute:: sid
A 34 character string that uniquely identifies this resource.
.. attribute:: account_sid
The unique id of the Account responsible for this media.
.. attribute:: date_created
The date that this resource was created, given in RFC 2822 format.
.. attribute:: date_updated
The date that this resource was last updated, given in RFC 2822 format.
.. attribute:: parent_sid
The MessageSid of the message that created the media.
.. attribute:: content_type
The default content-type of the media, for example image/jpeg,
image/png, or image/gif.
.. attribute:: uri
The URI for this resource, relative to https://api.twilio.com
"""
def delete(self):
"""
Delete this media.
"""
return self.parent.delete_instance(self.name)
class MediaList(ListResource):
name = "Media"
key = "media_list"
instance = Media
def __call__(self, message_sid):
# `Media` is a word of ambiguous plurality. This causes issues.
# To match the rest of the library:
# `client.media` needs to return a new MediaList.
# `client.media('message_sid')` needs to return a MediaList
# for a given message.
base_uri = "%s/Messages/%s" % (self.base_uri, message_sid)
return MediaList(base_uri, self.auth, self.timeout)
def __init__(self, *args, **kwargs):
super(MediaList, self).__init__(*args, **kwargs)
@normalize_dates
def list(self, before=None, after=None, date_created=None, **kw):
"""
Returns a page of :class:`Media` resources as a list. For
paging information see :class:`ListResource`.
:param date after: Only list media created after this date.
:param date before: Only list media created before this date.
:param date date_created: Only list media created on this date.
:param sid message_sid: Only list media created by the given MessageSid
"""
kw["DateCreated<"] = before
kw["DateCreated>"] = after
kw["DateCreated"] = parse_date(date_created)
return self.get_instances(kw)
def delete(self, sid):
"""
Delete a :class:`Media`.
:param sid: String identifier for a Media resource
"""
return self.delete_instance(sid)
| from twilio.rest.resources import InstanceResource, ListResource
from twilio.rest.resources.util import normalize_dates, parse_date
class Media(InstanceResource):
""" Represents media associated with a :class:`Message`.
.. attribute:: sid
A 34 character string that uniquely identifies this resource.
.. attribute:: account_sid
The unique id of the Account responsible for this media.
.. attribute:: date_created
The date that this resource was created, given in RFC 2822 format.
.. attribute:: date_updated
The date that this resource was last updated, given in RFC 2822 format.
.. attribute:: parent_sid
The MessageSid of the message that created the media.
.. attribute:: content_type
The default content-type of the media, for example image/jpeg,
image/png, or image/gif.
.. attribute:: uri
The URI for this resource, relative to https://api.twilio.com
"""
def delete(self):
"""
Delete this media.
"""
return self.parent.delete_instance(self.name)
class MediaList(ListResource):
name = "Media"
key = "media"
instance = Media
def __call__(self, message_sid):
# `Media` is a word of ambiguous plurality. This causes issues.
# To match the rest of the library:
# `client.media` needs to return a new MediaList.
# `client.media('message_sid')` needs to return a MediaList
# for a given message.
base_uri = "%s/Messages/%s" % (self.base_uri, message_sid)
return MediaList(base_uri, self.auth, self.timeout)
def __init__(self, *args, **kwargs):
super(MediaList, self).__init__(*args, **kwargs)
@normalize_dates
def list(self, before=None, after=None, date_created=None, **kw):
"""
Returns a page of :class:`Media` resources as a list. For
paging information see :class:`ListResource`.
:param date after: Only list media created after this date.
:param date before: Only list media created before this date.
:param date date_created: Only list media created on this date.
:param sid message_sid: Only list media created by the given MessageSid
"""
kw["DateCreated<"] = before
kw["DateCreated>"] = after
kw["DateCreated"] = parse_date(date_created)
return self.get_instances(kw)
def delete(self, sid):
"""
Delete a :class:`Media`.
:param sid: String identifier for a Media resource
"""
return self.delete_instance(sid)
| mit | Python |
e3b457c141ee4258db4a1b71b8632cee4f1bc929 | Change frequency per server | UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine | upol_search_engine/__main__.py | upol_search_engine/__main__.py | from datetime import datetime
from time import sleep
from upol_search_engine.upol_crawler import tasks
def main():
blacklist = """portal.upol.cz
stag.upol.cz
library.upol.cz
adfs.upol.cz
portalbeta.upol.cz
idp.upol.cz
famaplus.upol.cz
es.upol.cz
smlouvy.upol.cz
menza.upol.cz
edis.upol.cz
courseware.upol.cz
m.zurnal.upol.cz"""
crawler_settings = {'limit_domain': 'upol.cz',
'max_depth': 10,
'connect_max_timeout': 3.05,
'read_max_timeout': 10,
'frequency_per_server': 0.7,
'blacklist': blacklist}
seed = """https://www.upol.cz
https://www.cmtf.upol.cz
https://www.lf.upol.cz
https://www.ff.upol.cz
https://www.prf.upol.cz
https://www.pdf.upol.cz
https://ftk.upol.cz
https://www.pf.upol.cz
https://www.fzv.upol.cz"""
print("Launching crawler")
feeder = tasks.feeder_task.delay(
crawler_settings=crawler_settings,
seed=seed,
batch_size=300,
delay_between_feeding=30)
start_time = datetime.now()
while feeder.status != 'SUCCESS':
print(feeder.status)
print(feeder.info)
duration = datetime.now() - start_time
print(duration)
sleep(10)
print("Crawler done")
print("Launching pagerank calculation")
pagerank = tasks.calculate_pagerank_task.delay(crawler_settings)
while pagerank.status != 'SUCCESS':
print(pagerank.status)
sleep(5)
end_time = datetime.now()
duration = end_time - start_time
print(duration)
print("Pagerank done")
if __name__ == "__main__":
main()
| from datetime import datetime
from time import sleep
from upol_search_engine.upol_crawler import tasks
def main():
blacklist = """portal.upol.cz
stag.upol.cz
library.upol.cz
adfs.upol.cz
portalbeta.upol.cz
idp.upol.cz
famaplus.upol.cz
es.upol.cz
smlouvy.upol.cz
menza.upol.cz
edis.upol.cz
courseware.upol.cz
m.zurnal.upol.cz"""
crawler_settings = {'limit_domain': 'upol.cz',
'max_depth': 10,
'connect_max_timeout': 3.05,
'read_max_timeout': 10,
'frequency_per_server': 0.5,
'blacklist': blacklist}
seed = """https://www.upol.cz
https://www.cmtf.upol.cz
https://www.lf.upol.cz
https://www.ff.upol.cz
https://www.prf.upol.cz
https://www.pdf.upol.cz
https://ftk.upol.cz
https://www.pf.upol.cz
https://www.fzv.upol.cz"""
print("Launching crawler")
feeder = tasks.feeder_task.delay(
crawler_settings=crawler_settings,
seed=seed,
batch_size=300,
delay_between_feeding=30)
start_time = datetime.now()
while feeder.status != 'SUCCESS':
print(feeder.status)
print(feeder.info)
duration = datetime.now() - start_time
print(duration)
sleep(10)
print("Crawler done")
print("Launching pagerank calculation")
pagerank = tasks.calculate_pagerank_task.delay(crawler_settings)
while pagerank.status != 'SUCCESS':
print(pagerank.status)
sleep(5)
end_time = datetime.now()
duration = end_time - start_time
print(duration)
print("Pagerank done")
if __name__ == "__main__":
main()
| mit | Python |
76abfd7914b5521cd3d59308adbb2a7049aaf50b | add comment that version is a required field | MyPureCloud/developercenter-tutorials,MyPureCloud/developercenter-tutorials,MyPureCloud/developercenter-tutorials,MyPureCloud/developercenter-tutorials,MyPureCloud/developercenter-tutorials,MyPureCloud/developercenter-tutorials,MyPureCloud/developercenter-tutorials,MyPureCloud/developercenter-tutorials | user-management/python/user.py | user-management/python/user.py | import time
import PureCloudPlatformClientV2, os
from PureCloudPlatformClientV2.rest import ApiException
from pprint import pprint
# Credentials
CLIENT_ID = os.environ['GENESYS_CLOUD_CLIENT_ID']
CLIENT_SECRET = os.environ['GENESYS_CLOUD_CLIENT_SECRET']
ORG_REGION = os.environ['GENESYS_CLOUD_REGION'] # eg. us_east_1
# Set environment
region = PureCloudPlatformClientV2.PureCloudRegionHosts[ORG_REGION]
PureCloudPlatformClientV2.configuration.host = region.get_api_host()
# OAuth when using Client Credentials
api_client = PureCloudPlatformClientV2.api_client.ApiClient() \
.get_client_credentials_token(CLIENT_ID, CLIENT_SECRET)
api_instance = PureCloudPlatformClientV2.UsersApi(api_client)
newuser = PureCloudPlatformClientV2.CreateUser()
newuser.name = "Tutorial User"
newuser.email = "tutorial35@example.com"
newuser.password = "230498wkjdf8asdfoiasdf"
currentuser = api_instance.post_users(newuser)
print(currentuser.id)
updateuser = PureCloudPlatformClientV2.UpdateUser()
updateuser.name = "Tutorial User New Name"
# Take note that version is a required value in updating user
updateuser.version = currentuser.version
newaddress = PureCloudPlatformClientV2.Contact()
newaddress.address = "3172222222"
newaddress.media_type = "PHONE"
newaddress.type = "WORK"
updateuser.addresses = [newaddress]
api_response = api_instance.patch_user(currentuser.id, updateuser)
| import time
import PureCloudPlatformClientV2, os
from PureCloudPlatformClientV2.rest import ApiException
from pprint import pprint
# Credentials
CLIENT_ID = os.environ['GENESYS_CLOUD_CLIENT_ID']
CLIENT_SECRET = os.environ['GENESYS_CLOUD_CLIENT_SECRET']
ORG_REGION = os.environ['GENESYS_CLOUD_REGION'] # eg. us_east_1
# Set environment
region = PureCloudPlatformClientV2.PureCloudRegionHosts[ORG_REGION]
PureCloudPlatformClientV2.configuration.host = region.get_api_host()
# OAuth when using Client Credentials
api_client = PureCloudPlatformClientV2.api_client.ApiClient() \
.get_client_credentials_token(CLIENT_ID, CLIENT_SECRET)
api_instance = PureCloudPlatformClientV2.UsersApi(api_client)
newuser = PureCloudPlatformClientV2.CreateUser()
newuser.name = "Tutorial User"
newuser.email = "tutorial35@example.com"
newuser.password = "230498wkjdf8asdfoiasdf"
currentuser = api_instance.post_users(newuser)
print(currentuser.id)
updateuser = PureCloudPlatformClientV2.UpdateUser()
updateuser.name = "Tutorial User New Name"
updateuser.version = currentuser.version
newaddress = PureCloudPlatformClientV2.Contact()
newaddress.address = "3172222222"
newaddress.media_type = "PHONE"
newaddress.type = "WORK"
updateuser.addresses = [newaddress]
api_response = api_instance.patch_user(currentuser.id, updateuser) | mit | Python |
fbabf15f1db758c732b5be3a485f039d7c2a82dd | Simplify connecting to rmake servers by defaulting in the correct port, not using the user + password when connecting to a unix socket (your unix user is used instead) and defaulting to the local connection. | sassoftware/rmake3,sassoftware/rmake3,sassoftware/rmake3,sassoftware/rmake,sassoftware/rmake,sassoftware/rmake | rmake_plugins/multinode_client/build/buildcfg.py | rmake_plugins/multinode_client/build/buildcfg.py | #
# Copyright (c) 2007 rPath, Inc. All Rights Reserved.
#
import urllib
from conary.lib import cfgtypes
from rmake.build import buildcfg
from rmake.lib import apiutils
class BuildContext(object):
rmakeUrl = (cfgtypes.CfgString, 'unix:///var/lib/rmake/socket')
rmakeUser = (buildcfg.CfgUser, None)
clientCert = (cfgtypes.CfgPath, None)
def getServerUri(self):
url = self.rmakeUrl
type, rest = urllib.splittype(url)
host, path = urllib.splithost(rest)
user, host = urllib.splituser(host)
host, port = urllib.splitport(host)
if not port:
port = 9999
user = ''
if self.rmakeUser and type != 'unix':
user = '%s:%s@' % (self.rmakeUser)
url = '%s://%s%s:%s%s' % (type, user, host, port, path)
return url
def updateConfig():
buildcfg.RmakeBuildContext.rmakeUrl = BuildContext.rmakeUrl
buildcfg.RmakeBuildContext.rmakeUser = BuildContext.rmakeUser
buildcfg.RmakeBuildContext.clientCert = BuildContext.clientCert
buildcfg.BuildConfiguration.getServerUri = getServerUri
class SanitizedBuildConfiguration(buildcfg.SanitizedBuildConfiguration):
@staticmethod
def __freeze__(cfg):
cfg = buildcfg.SanitizedBuildConfiguration.__freeze__(cfg)
if 'rmakeUser' in cfg:
del cfg['rmakeUser']
return cfg
@staticmethod
def __thaw__(cfg):
return apiutils.thaw('BuildConfiguration', cfg)
apiutils.register(SanitizedBuildConfiguration)
| #
# Copyright (c) 2007 rPath, Inc. All Rights Reserved.
#
import urllib
from conary.lib import cfgtypes
from rmake.build import buildcfg
from rmake.lib import apiutils
class BuildContext(object):
rmakeUrl = (cfgtypes.CfgString, 'https://localhost:9999')
rmakeUser = (buildcfg.CfgUser, None)
clientCert = (cfgtypes.CfgPath, None)
def getServerUri(self):
url = self.rmakeUrl
type, rest = urllib.splittype(url)
host, path = urllib.splithost(rest)
user, host = urllib.splituser(host)
if self.rmakeUser:
url = '%s://%s:%s@%s%s' % (type, self.rmakeUser[0], self.rmakeUser[1],
host, path)
else:
url = '%s://%s%s' % (type, host, path)
return url
def updateConfig():
buildcfg.RmakeBuildContext.rmakeUrl = BuildContext.rmakeUrl
buildcfg.RmakeBuildContext.rmakeUser = BuildContext.rmakeUser
buildcfg.RmakeBuildContext.clientCert = BuildContext.clientCert
buildcfg.BuildConfiguration.getServerUri = getServerUri
class SanitizedBuildConfiguration(buildcfg.SanitizedBuildConfiguration):
@staticmethod
def __freeze__(cfg):
cfg = buildcfg.SanitizedBuildConfiguration.__freeze__(cfg)
if 'rmakeUser' in cfg:
del cfg['rmakeUser']
return cfg
@staticmethod
def __thaw__(cfg):
return apiutils.thaw('BuildConfiguration', cfg)
apiutils.register(SanitizedBuildConfiguration)
| apache-2.0 | Python |
4b9e86c1547f1513e42e443668d7998b8ae03b3c | Remove unneeded code | RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline | rnacentral_pipeline/databases/ensembl/databases.py | rnacentral_pipeline/databases/ensembl/databases.py | # -*- coding: utf-8 -*-
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
import itertools as it
DISALLOWED_DATABASE_TERMS = {
'mirror',
'chok1gs', # skip golden hamster genome (same taxid as CriGri)
'female', # skip female naked mole rat genome
}
def database_key(name):
parts = name.strip().split('_core_')
numbers = parts[1].split('_')
major = int(numbers[0])
match = re.match(r'^(\d+)([^\d]?)$', numbers[1])
if not match:
raise ValueError("Cannot process: " + name)
minor = int(match.group(1))
suffix = ''
if match.group(2):
suffix = match.group(2)
return (parts[0], major, minor, suffix)
def major(database):
return database_key(database)[1]
def select_max(handle):
databases = []
for line in handle:
if any(t in line for t in DISALLOWED_DATABASE_TERMS):
continue
if 'mus_musculus' in line and line.count('_') != 4:
continue # skip mouse strains Mouse 129S1/SvImJ
if '_core_' in line:
databases.append(line.strip())
max_major = max(database_key(d)[1] for d in databases)
possible = it.ifilter(lambda d: major(d) == max_major, databases)
grouped = it.groupby(possible, lambda d: database_key(d)[0])
for _, databases in grouped:
yield max(databases, key=database_key)
def write_max(handle, output):
for name in select_max(handle):
output.write(name)
output.write('\n')
| # -*- coding: utf-8 -*-
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
import itertools as it
DISALLOWED_DATABASE_TERMS = {
'mirror',
'chok1gs', # skip golden hamster genome (same taxid as CriGri)
'female', # skip female naked mole rat genome
}
def database_key(name):
parts = name.strip().split('_core_')
numbers = parts[1].split('_')
major = int(numbers[0])
match = re.match(r'^(\d+)([^\d]?)$', numbers[1])
if not match:
raise ValueError("Cannot process: " + name)
minor = int(match.group(1))
suffix = ''
if match.group(2):
suffix = match.group(2)
return (parts[0], major, minor, suffix)
def major(database):
return database_key(database)[1]
def select_max(handle):
databases = []
for line in handle:
if any(t in line for t in DISALLOWED_DATABASE_TERMS):
continue
if 'mus_musculus' in line and line.count('_') != 4:
continue # skip mouse strains Mouse 129S1/SvImJ
if '_core_' in line:
databases.append(line.strip())
max_major = max(database_key(d)[1] for d in databases)
possible = it.ifilter(lambda d: major(d) == max_major, databases)
grouped = it.groupby(possible, lambda d: database_key(d)[0])
for _, databases in grouped:
yield max(databases, key=database_key)
def load_known(url):
return set()
def write_max(handle, output, db_url=None):
known = load_known(db_url)
for name in select_max(handle):
if name not in known:
output.write(name)
output.write('\n')
| apache-2.0 | Python |
d25a5f7d2f3916eb4c9f047309edb22c716ce346 | Fix invalid import issue | vv-p/jira-reports,vv-p/jira-reports | filters/__init__.py | filters/__init__.py | from .filters import cleanup, fix_emoji
| from filters import cleanup, fix_emoji
| mit | Python |
9312d59cdb1e2df0a4d6142a55dbd6dc046edea5 | split long line | googlearchive/cloud-playground,silverlinings/cloud-playground,googlearchive/cloud-playground,silverlinings/cloud-playground,silverlinings/cloud-playground,googlearchive/cloud-playground,googlearchive/cloud-playground | appengine_config.py | appengine_config.py | """App Engine configuration file."""
import os
import re
import sys
# append 'mimic' directory to sys.path
DIRNAME = os.path.dirname(os.path.abspath(__file__)
sys.path.append(os.path.join(DIRNAME, 'mimic'))
from __mimic import common
from __mimic import datastore_tree
from __mimic import mimic
import caching_urlfetch_tree
import settings
from google.appengine.api import app_identity
# our current app id
app_id = app_identity.get_application_id()
urlfetch_tree_SOURCE_CODE_APP_ID = settings.PLAYGROUND_APP_ID
if common.IsDevMode() or urlfetch_tree_SOURCE_CODE_APP_ID == app_id:
mimic_CREATE_TREE_FUNC = datastore_tree.DatastoreTree
else:
mimic_CREATE_TREE_FUNC = caching_urlfetch_tree.CachingUrlFetchTree
mimic_NAMESPACE = '_playground'
mimic_PROJECT_ID_QUERY_PARAM = '_mimic_project'
mimic_PROJECT_ID_FROM_PATH_INFO_RE = re.compile('/playground/p/(.+?)/')
# pylint: disable-msg=C6409
def namespace_manager_default_namespace_for_request():
return mimic.GetNamespace()
| """App Engine configuration file."""
import os
import re
import sys
# append 'mimic' directory to sys.path
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'mimic'))
from __mimic import common
from __mimic import datastore_tree
from __mimic import mimic
import caching_urlfetch_tree
import settings
from google.appengine.api import app_identity
# our current app id
app_id = app_identity.get_application_id()
urlfetch_tree_SOURCE_CODE_APP_ID = settings.PLAYGROUND_APP_ID
if common.IsDevMode() or urlfetch_tree_SOURCE_CODE_APP_ID == app_id:
mimic_CREATE_TREE_FUNC = datastore_tree.DatastoreTree
else:
mimic_CREATE_TREE_FUNC = caching_urlfetch_tree.CachingUrlFetchTree
mimic_NAMESPACE = '_playground'
mimic_PROJECT_ID_QUERY_PARAM = '_mimic_project'
mimic_PROJECT_ID_FROM_PATH_INFO_RE = re.compile('/playground/p/(.+?)/')
# pylint: disable-msg=C6409
def namespace_manager_default_namespace_for_request():
return mimic.GetNamespace()
| apache-2.0 | Python |
03c0bc1c23e622afbe66c0ba166fe2baaddb5750 | Bump version | racker/fleece,racker/fleece | fleece/__about__.py | fleece/__about__.py | """Fleece package attributes and metadata."""
__all__ = (
'__title__',
'__summary__',
'__author__',
'__email__',
'__license__',
'__version__',
'__copyright__',
'__url__',
)
__title__ = 'fleece'
__summary__ = 'Wrap the lamb...da'
__author__ = 'Rackers'
__email__ = 'bruce.stringer@rackspace.com'
__version__ = '0.1.0'
__license__ = 'Apache License, Version 2.0'
__keywords__ = ['fleece', 'lambda']
__copyright__ = 'Copyright Rackspace US, Inc. 2016'
__url__ = 'https://github.com/racker/fleece'
| """Fleece package attributes and metadata."""
__all__ = (
'__title__',
'__summary__',
'__author__',
'__email__',
'__license__',
'__version__',
'__copyright__',
'__url__',
)
__title__ = 'fleece'
__summary__ = 'Wrap the lamb...da'
__author__ = 'Rackers'
__email__ = 'bruce.stringer@rackspace.com'
__version__ = '0.0.1'
__license__ = 'Apache License, Version 2.0'
__keywords__ = ['fleece', 'lambda']
__copyright__ = 'Copyright Rackspace US, Inc. 2016'
__url__ = 'https://github.com/racker/fleece'
| apache-2.0 | Python |
2c9c6ad9ee808fa27f1d52c280e6501f1d712921 | Prepare v1.2.234.dev | Flexget/Flexget,OmgOhnoes/Flexget,LynxyssCZ/Flexget,sean797/Flexget,Flexget/Flexget,Danfocus/Flexget,malkavi/Flexget,spencerjanssen/Flexget,OmgOhnoes/Flexget,Pretagonist/Flexget,ianstalk/Flexget,tobinjt/Flexget,tobinjt/Flexget,Danfocus/Flexget,oxc/Flexget,crawln45/Flexget,jawilson/Flexget,lildadou/Flexget,ratoaq2/Flexget,sean797/Flexget,xfouloux/Flexget,crawln45/Flexget,spencerjanssen/Flexget,cvium/Flexget,qk4l/Flexget,tarzasai/Flexget,poulpito/Flexget,Danfocus/Flexget,LynxyssCZ/Flexget,Flexget/Flexget,thalamus/Flexget,tsnoam/Flexget,xfouloux/Flexget,vfrc2/Flexget,ratoaq2/Flexget,ibrahimkarahan/Flexget,poulpito/Flexget,cvium/Flexget,oxc/Flexget,gazpachoking/Flexget,vfrc2/Flexget,ibrahimkarahan/Flexget,v17al/Flexget,sean797/Flexget,JorisDeRieck/Flexget,ibrahimkarahan/Flexget,jacobmetrick/Flexget,JorisDeRieck/Flexget,antivirtel/Flexget,OmgOhnoes/Flexget,offbyone/Flexget,JorisDeRieck/Flexget,ZefQ/Flexget,Danfocus/Flexget,qvazzler/Flexget,dsemi/Flexget,malkavi/Flexget,ianstalk/Flexget,tobinjt/Flexget,crawln45/Flexget,lildadou/Flexget,v17al/Flexget,patsissons/Flexget,spencerjanssen/Flexget,offbyone/Flexget,jawilson/Flexget,drwyrm/Flexget,grrr2/Flexget,cvium/Flexget,grrr2/Flexget,JorisDeRieck/Flexget,jawilson/Flexget,Flexget/Flexget,qvazzler/Flexget,tsnoam/Flexget,tarzasai/Flexget,jacobmetrick/Flexget,tobinjt/Flexget,grrr2/Flexget,drwyrm/Flexget,LynxyssCZ/Flexget,ZefQ/Flexget,xfouloux/Flexget,ianstalk/Flexget,ratoaq2/Flexget,patsissons/Flexget,tsnoam/Flexget,dsemi/Flexget,LynxyssCZ/Flexget,tarzasai/Flexget,qk4l/Flexget,gazpachoking/Flexget,jacobmetrick/Flexget,crawln45/Flexget,dsemi/Flexget,qk4l/Flexget,malkavi/Flexget,oxc/Flexget,antivirtel/Flexget,poulpito/Flexget,v17al/Flexget,lildadou/Flexget,ZefQ/Flexget,antivirtel/Flexget,drwyrm/Flexget,qvazzler/Flexget,Pretagonist/Flexget,Pretagonist/Flexget,malkavi/Flexget,offbyone/Flexget,jawilson/Flexget,thalamus/Flexget,vfrc2/Flexget,patsissons/Flexget,thalamus/Flexget | flexget/_version.py | flexget/_version.py | """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.234.dev'
| """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.233'
| mit | Python |
462cfe2f406deaafd320402bd8209e0261f9aa3e | Prepare v2.8.1.dev | OmgOhnoes/Flexget,LynxyssCZ/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,sean797/Flexget,OmgOhnoes/Flexget,LynxyssCZ/Flexget,JorisDeRieck/Flexget,malkavi/Flexget,drwyrm/Flexget,jacobmetrick/Flexget,Flexget/Flexget,jacobmetrick/Flexget,JorisDeRieck/Flexget,Flexget/Flexget,malkavi/Flexget,Danfocus/Flexget,jawilson/Flexget,crawln45/Flexget,crawln45/Flexget,Danfocus/Flexget,ianstalk/Flexget,sean797/Flexget,gazpachoking/Flexget,tobinjt/Flexget,OmgOhnoes/Flexget,qk4l/Flexget,ianstalk/Flexget,jawilson/Flexget,Flexget/Flexget,JorisDeRieck/Flexget,jacobmetrick/Flexget,ianstalk/Flexget,poulpito/Flexget,jawilson/Flexget,qk4l/Flexget,LynxyssCZ/Flexget,drwyrm/Flexget,malkavi/Flexget,tobinjt/Flexget,malkavi/Flexget,crawln45/Flexget,Danfocus/Flexget,drwyrm/Flexget,tobinjt/Flexget,jawilson/Flexget,crawln45/Flexget,poulpito/Flexget,gazpachoking/Flexget,tobinjt/Flexget,Flexget/Flexget,Danfocus/Flexget,sean797/Flexget,qk4l/Flexget,poulpito/Flexget | flexget/_version.py | flexget/_version.py | """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '2.8.1.dev'
| """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '2.8.0'
| mit | Python |
ec918563683f8c4c1898f1c575cce2c817a38c52 | Prepare v3.2.12.dev | crawln45/Flexget,Flexget/Flexget,crawln45/Flexget,crawln45/Flexget,Flexget/Flexget,Flexget/Flexget,crawln45/Flexget,Flexget/Flexget | flexget/_version.py | flexget/_version.py | """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '3.2.12.dev'
| """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '3.2.11'
| mit | Python |
500cd2b709599fbe33dd9d1df37cfaa862f4449e | Prepare v1.2.427.dev | cvium/Flexget,qk4l/Flexget,JorisDeRieck/Flexget,Flexget/Flexget,antivirtel/Flexget,OmgOhnoes/Flexget,qvazzler/Flexget,oxc/Flexget,Danfocus/Flexget,oxc/Flexget,Flexget/Flexget,tobinjt/Flexget,sean797/Flexget,qk4l/Flexget,jacobmetrick/Flexget,jawilson/Flexget,lildadou/Flexget,JorisDeRieck/Flexget,gazpachoking/Flexget,qvazzler/Flexget,Danfocus/Flexget,tobinjt/Flexget,dsemi/Flexget,LynxyssCZ/Flexget,ianstalk/Flexget,tobinjt/Flexget,lildadou/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,JorisDeRieck/Flexget,crawln45/Flexget,poulpito/Flexget,crawln45/Flexget,ianstalk/Flexget,qk4l/Flexget,dsemi/Flexget,Pretagonist/Flexget,Flexget/Flexget,antivirtel/Flexget,jacobmetrick/Flexget,cvium/Flexget,cvium/Flexget,tarzasai/Flexget,drwyrm/Flexget,OmgOhnoes/Flexget,crawln45/Flexget,jacobmetrick/Flexget,dsemi/Flexget,sean797/Flexget,drwyrm/Flexget,Pretagonist/Flexget,malkavi/Flexget,gazpachoking/Flexget,Pretagonist/Flexget,LynxyssCZ/Flexget,OmgOhnoes/Flexget,malkavi/Flexget,Danfocus/Flexget,crawln45/Flexget,LynxyssCZ/Flexget,tarzasai/Flexget,ianstalk/Flexget,antivirtel/Flexget,jawilson/Flexget,lildadou/Flexget,Flexget/Flexget,oxc/Flexget,tarzasai/Flexget,poulpito/Flexget,drwyrm/Flexget,poulpito/Flexget,jawilson/Flexget,sean797/Flexget,jawilson/Flexget,malkavi/Flexget,malkavi/Flexget,tobinjt/Flexget,Danfocus/Flexget,qvazzler/Flexget | flexget/_version.py | flexget/_version.py | """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.427.dev'
| """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.426'
| mit | Python |
649f496c937680a22cab63b8b327cf394610b590 | Prepare v1.2.423.dev | OmgOhnoes/Flexget,lildadou/Flexget,jawilson/Flexget,dsemi/Flexget,ianstalk/Flexget,tobinjt/Flexget,tsnoam/Flexget,Flexget/Flexget,qvazzler/Flexget,drwyrm/Flexget,drwyrm/Flexget,poulpito/Flexget,qk4l/Flexget,JorisDeRieck/Flexget,crawln45/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,Pretagonist/Flexget,qvazzler/Flexget,LynxyssCZ/Flexget,cvium/Flexget,oxc/Flexget,Pretagonist/Flexget,Flexget/Flexget,dsemi/Flexget,OmgOhnoes/Flexget,tarzasai/Flexget,cvium/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,malkavi/Flexget,jacobmetrick/Flexget,jacobmetrick/Flexget,tarzasai/Flexget,malkavi/Flexget,JorisDeRieck/Flexget,tarzasai/Flexget,poulpito/Flexget,tobinjt/Flexget,sean797/Flexget,Flexget/Flexget,malkavi/Flexget,gazpachoking/Flexget,Flexget/Flexget,OmgOhnoes/Flexget,qk4l/Flexget,lildadou/Flexget,LynxyssCZ/Flexget,lildadou/Flexget,LynxyssCZ/Flexget,sean797/Flexget,tobinjt/Flexget,jacobmetrick/Flexget,crawln45/Flexget,antivirtel/Flexget,poulpito/Flexget,malkavi/Flexget,Danfocus/Flexget,sean797/Flexget,jawilson/Flexget,tsnoam/Flexget,antivirtel/Flexget,jawilson/Flexget,ianstalk/Flexget,drwyrm/Flexget,cvium/Flexget,gazpachoking/Flexget,oxc/Flexget,tobinjt/Flexget,Danfocus/Flexget,qk4l/Flexget,dsemi/Flexget,crawln45/Flexget,LynxyssCZ/Flexget,tsnoam/Flexget,crawln45/Flexget,Pretagonist/Flexget,qvazzler/Flexget,ianstalk/Flexget,jawilson/Flexget,oxc/Flexget,antivirtel/Flexget | flexget/_version.py | flexget/_version.py | """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.423.dev'
| """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.422'
| mit | Python |
f5cb6c46dc27390feef032b42ea5e23bc7925246 | Add random_alphanumeric fixture | igboyes/virtool,igboyes/virtool,virtool/virtool,virtool/virtool | virtool/tests/fixtures/core.py | virtool/tests/fixtures/core.py | import pytest
import datetime
import virtool.web.dispatcher
@pytest.fixture
def test_random_alphanumeric(monkeypatch):
class RandomAlphanumericTester:
def __init__(self):
self.choices = [
"aB67nm89jL56hj34AL90",
"fX1l90Rt45JK34bA7890",
"kl84Fg067jJa109lmQ021"
]
self.last_choice = None
def __call__(self, length=6, mixed_case=False, excluded=None):
string = self.choices.pop()[:length]
if not mixed_case:
string = string.lower()
if string in excluded:
string = self.__call__(length, mixed_case, excluded)
self.last_choice = string
return string
@property
def next_choice(self):
return self.choices[-1]
tester = RandomAlphanumericTester()
monkeypatch.setattr("virtool.utils.random_alphanumeric", tester)
return tester
@pytest.fixture
def static_time(monkeypatch):
time = datetime.datetime(2017, 10, 6, 20, 0, 0, tzinfo=datetime.timezone.utc)
monkeypatch.setattr("virtool.utils.timestamp", lambda: time)
return time
@pytest.fixture
def test_dispatch(mocker, monkeypatch):
m = mocker.Mock(spec=virtool.web.dispatcher.Dispatcher())
mock_class = mocker.Mock()
mock_class.return_value = m
monkeypatch.setattr("virtool.web.dispatcher.Dispatcher", mock_class)
return m.dispatch
| import pytest
import datetime
import virtool.web.dispatcher
@pytest.fixture
def static_time(monkeypatch):
time = datetime.datetime(2017, 10, 6, 20, 0, 0, tzinfo=datetime.timezone.utc)
monkeypatch.setattr("virtool.utils.timestamp", lambda: time)
return time
@pytest.fixture
def test_dispatch(mocker, monkeypatch):
m = mocker.Mock(spec=virtool.web.dispatcher.Dispatcher())
mock_class = mocker.Mock()
mock_class.return_value = m
monkeypatch.setattr("virtool.web.dispatcher.Dispatcher", mock_class)
return m.dispatch
| mit | Python |
75f46292a8dccb04e233986c18e415dbfffb9e68 | Fix calls to reverse | takeflight/wagtailvideos,takeflight/wagtailvideos,takeflight/wagtailvideos | wagtailvideos/wagtail_hooks.py | wagtailvideos/wagtail_hooks.py | from django.conf.urls import include, url
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.urls import reverse
from django.utils.html import format_html, format_html_join
from django.utils.translation import ugettext_lazy as _
from wagtail.admin.menu import MenuItem
from wagtail.core import hooks
from wagtailvideos import urls
from wagtailvideos.forms import GroupVideoPermissionFormSet
@hooks.register('register_admin_urls')
def register_admin_urls():
return [
url(r'^videos/', include(urls)),
]
@hooks.register('insert_editor_js')
def editor_js():
js_files = [
static('wagtailvideos/js/video-chooser.js'),
]
js_includes = format_html_join(
'\n', '<script src="{0}"></script>',
((filename, ) for filename in js_files)
)
return js_includes + format_html(
"""
<script>
window.chooserUrls.videoChooser = '{0}';
</script>
""",
reverse('wagtailvideos:chooser')
)
@hooks.register('register_group_permission_panel')
def register_video_permissions_panel():
return GroupVideoPermissionFormSet
@hooks.register('register_admin_menu_item')
def register_images_menu_item():
return MenuItem(
_('Videos'), reverse('wagtailvideos:index'),
name='videos', classnames='icon icon-media', order=300
)
| from django.conf.urls import include, url
from django.contrib.staticfiles.templatetags.staticfiles import static
from django import urls
from django.utils.html import format_html, format_html_join
from django.utils.translation import ugettext_lazy as _
from wagtail.admin.menu import MenuItem
from wagtail.core import hooks
from wagtailvideos import urls
from wagtailvideos.forms import GroupVideoPermissionFormSet
@hooks.register('register_admin_urls')
def register_admin_urls():
return [
url(r'^videos/', include(urls)),
]
@hooks.register('insert_editor_js')
def editor_js():
js_files = [
static('wagtailvideos/js/video-chooser.js'),
]
js_includes = format_html_join(
'\n', '<script src="{0}"></script>',
((filename, ) for filename in js_files)
)
return js_includes + format_html(
"""
<script>
window.chooserUrls.videoChooser = '{0}';
</script>
""",
urls.reverse('wagtailvideos:chooser')
)
@hooks.register('register_group_permission_panel')
def register_video_permissions_panel():
return GroupVideoPermissionFormSet
@hooks.register('register_admin_menu_item')
def register_images_menu_item():
return MenuItem(
_('Videos'), urls.reverse('wagtailvideos:index'),
name='videos', classnames='icon icon-media', order=300
)
| bsd-3-clause | Python |
5653403e04cfcb00ebecb93659cb6e0725fa5415 | drop unnecessary constants and rename quantum -> tacker | stackforge/tacker,openstack/tacker,openstack/tacker,priya-pp/Tacker,zeinsteinz/tacker,stackforge/tacker,SripriyaSeetharam/tacker,openstack/tacker,SripriyaSeetharam/tacker,trozet/tacker,zeinsteinz/tacker,priya-pp/Tacker,trozet/tacker | tacker/common/constants.py | tacker/common/constants.py | # Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO(salv-orlando): Verify if a single set of operational
# status constants is achievable
EXT_NS_COMP = '_backward_comp_e_ns'
EXT_NS = '_extension_ns'
XML_NS_V20 = 'http://openstack.org/tacker/api/v2.0'
XSI_NAMESPACE = "http://www.w3.org/2001/XMLSchema-instance"
XSI_ATTR = "xsi:nil"
XSI_NIL_ATTR = "xmlns:xsi"
ATOM_NAMESPACE = "http://www.w3.org/2005/Atom"
ATOM_XMLNS = "xmlns:atom"
ATOM_LINK_NOTATION = "{%s}link" % ATOM_NAMESPACE
TYPE_XMLNS = "xmlns:tacker"
TYPE_ATTR = "tacker:type"
VIRTUAL_ROOT_KEY = "_v_root"
TYPE_BOOL = "bool"
TYPE_INT = "int"
TYPE_LONG = "long"
TYPE_FLOAT = "float"
TYPE_LIST = "list"
TYPE_DICT = "dict"
PAGINATION_INFINITE = 'infinite'
SORT_DIRECTION_ASC = 'asc'
SORT_DIRECTION_DESC = 'desc'
| # Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO(salv-orlando): Verify if a single set of operational
# status constants is achievable
NET_STATUS_ACTIVE = 'ACTIVE'
NET_STATUS_BUILD = 'BUILD'
NET_STATUS_DOWN = 'DOWN'
NET_STATUS_ERROR = 'ERROR'
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_BUILD = 'BUILD'
PORT_STATUS_DOWN = 'DOWN'
PORT_STATUS_ERROR = 'ERROR'
FLOATINGIP_STATUS_ACTIVE = 'ACTIVE'
FLOATINGIP_STATUS_DOWN = 'DOWN'
FLOATINGIP_STATUS_ERROR = 'ERROR'
DEVICE_OWNER_ROUTER_INTF = "network:router_interface"
DEVICE_OWNER_ROUTER_GW = "network:router_gateway"
DEVICE_OWNER_FLOATINGIP = "network:floatingip"
DEVICE_OWNER_DHCP = "network:dhcp"
DEVICE_ID_RESERVED_DHCP_PORT = "reserved_dhcp_port"
FLOATINGIP_KEY = '_floatingips'
INTERFACE_KEY = '_interfaces'
METERING_LABEL_KEY = '_metering_labels'
IPv4 = 'IPv4'
IPv6 = 'IPv6'
DHCP_RESPONSE_PORT = 68
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
MAX_VXLAN_VNI = 16777215
FLOODING_ENTRY = ['00:00:00:00:00:00', '0.0.0.0']
EXT_NS_COMP = '_backward_comp_e_ns'
EXT_NS = '_extension_ns'
XML_NS_V20 = 'http://openstack.org/quantum/api/v2.0'
XSI_NAMESPACE = "http://www.w3.org/2001/XMLSchema-instance"
XSI_ATTR = "xsi:nil"
XSI_NIL_ATTR = "xmlns:xsi"
ATOM_NAMESPACE = "http://www.w3.org/2005/Atom"
ATOM_XMLNS = "xmlns:atom"
ATOM_LINK_NOTATION = "{%s}link" % ATOM_NAMESPACE
TYPE_XMLNS = "xmlns:quantum"
TYPE_ATTR = "quantum:type"
VIRTUAL_ROOT_KEY = "_v_root"
TYPE_BOOL = "bool"
TYPE_INT = "int"
TYPE_LONG = "long"
TYPE_FLOAT = "float"
TYPE_LIST = "list"
TYPE_DICT = "dict"
AGENT_TYPE_DHCP = 'DHCP agent'
AGENT_TYPE_OVS = 'Open vSwitch agent'
AGENT_TYPE_LINUXBRIDGE = 'Linux bridge agent'
AGENT_TYPE_HYPERV = 'HyperV agent'
AGENT_TYPE_NEC = 'NEC plugin agent'
AGENT_TYPE_OFA = 'OFA driver agent'
AGENT_TYPE_L3 = 'L3 agent'
AGENT_TYPE_LOADBALANCER = 'Loadbalancer agent'
AGENT_TYPE_MLNX = 'Mellanox plugin agent'
AGENT_TYPE_METERING = 'Metering agent'
AGENT_TYPE_METADATA = 'Metadata agent'
AGENT_TYPE_SDNVE = 'IBM SDN-VE agent'
L2_AGENT_TOPIC = 'N/A'
PAGINATION_INFINITE = 'infinite'
SORT_DIRECTION_ASC = 'asc'
SORT_DIRECTION_DESC = 'desc'
PORT_BINDING_EXT_ALIAS = 'binding'
L3_AGENT_SCHEDULER_EXT_ALIAS = 'l3_agent_scheduler'
DHCP_AGENT_SCHEDULER_EXT_ALIAS = 'dhcp_agent_scheduler'
LBAAS_AGENT_SCHEDULER_EXT_ALIAS = 'lbaas_agent_scheduler'
# Protocol names and numbers for Security Groups/Firewalls
PROTO_NAME_TCP = 'tcp'
PROTO_NAME_ICMP = 'icmp'
PROTO_NAME_ICMP_V6 = 'icmpv6'
PROTO_NAME_UDP = 'udp'
PROTO_NUM_TCP = 6
PROTO_NUM_ICMP = 1
PROTO_NUM_ICMP_V6 = 58
PROTO_NUM_UDP = 17
# List of ICMPv6 types that should be allowed by default:
# Multicast Listener Query (130),
# Multicast Listener Report (131),
# Multicast Listener Done (132),
# Neighbor Solicitation (135),
# Neighbor Advertisement (136)
ICMPV6_ALLOWED_TYPES = [130, 131, 132, 135, 136]
ICMPV6_TYPE_RA = 134
DHCPV6_STATEFUL = 'dhcpv6-stateful'
DHCPV6_STATELESS = 'dhcpv6-stateless'
IPV6_SLAAC = 'slaac'
IPV6_MODES = [DHCPV6_STATEFUL, DHCPV6_STATELESS, IPV6_SLAAC]
IPV6_LLA_PREFIX = 'fe80::/64'
| apache-2.0 | Python |
853a53c23b2320554c1491884c41700bbedb1214 | add explicit django_admin = False to context | henzk/django-productline,henzk/django-productline | django_productline/features/djpladmin/context_processors.py | django_productline/features/djpladmin/context_processors.py | from django.conf import settings
def django_admin(request):
'''
Adds additional information to the context:
``django_admin`` - boolean variable indicating whether the current
page is part of the django admin or not.
``ADMIN_URL`` - normalized version of settings.ADMIN_URL; starts with a slash, ends without a slash
NOTE: do not set ADMIN_URL='/' in case your application provides functionality
outside of django admin as all incoming urls are interpreted as admin urls.
'''
# ensure that adminurl always starts with a '/' but never ends with a '/'
if settings.ADMIN_URL.endswith('/'):
admin_url = settings.ADMIN_URL[:-1]
if not settings.ADMIN_URL.startswith('/'):
admin_url = '/' + settings.ADMIN_URL
# add ADMIN_URL and django_admin to context
if request.META['PATH_INFO'].startswith(admin_url):
return {
'ADMIN_URL': admin_url,
'django_admin': True
}
else:
return {
'django_admin': False
}
| from django.conf import settings
def django_admin(request):
'''
Adds additional information to the context:
``django_admin`` - boolean variable indicating whether the current
page is part of the django admin or not.
``ADMIN_URL`` - normalized version of settings.ADMIN_URL; starts with a slash, ends without a slash
NOTE: do not set ADMIN_URL='/' in case your application provides functionality
outside of django admin as all incoming urls are interpreted as admin urls.
'''
# ensure that adminurl always starts with a '/' but never ends with a '/'
if settings.ADMIN_URL.endswith('/'):
admin_url = settings.ADMIN_URL[:-1]
if not settings.ADMIN_URL.startswith('/'):
admin_url = '/' + settings.ADMIN_URL
# add ADMIN_URL and django_admin to context
if request.META['PATH_INFO'].startswith(admin_url):
return {
'ADMIN_URL': admin_url,
'django_admin': True
}
else:
return {}
| mit | Python |
4e65ec43ecdd30495e1e9da41ddfa3374d6ba7ac | remove a stray semicolon | gkralik/lightspeed | util/create_database.py | util/create_database.py | #!/usr/bin/env python
import os
import sys
import sqlite3
base_dir = os.path.dirname(os.path.realpath(os.path.join(__file__, '..')))
db_path = os.path.join(base_dir, 'db/lightspeed.db')
if len(sys.argv) == 2:
db_path = os.path.realpath(sys.argv[1])
try:
conn = sqlite3.connect(db_path)
c = conn.cursor()
c.execute('''
CREATE TABLE IF NOT EXISTS measurements (
id INTEGER PRIMARY KEY,
pingMs UNSIGNED DECIMAL(10, 3),
downloadMbit DECIMAL(5, 2),
uploadMbit DECIMAL(5, 2),
timestamp DATETIME,
durationSecs UNSIGNED DECIMAL(10, 2),
isError INTEGER DEFAULT 0
);
''')
conn.commit()
print('Database created in', db_path)
except sqlite3.Error as e:
print('Error:', e.args[0])
finally:
if conn:
conn.close()
| #!/usr/bin/env python
import os
import sys
import sqlite3
base_dir = os.path.dirname(os.path.realpath(os.path.join(__file__, '..')))
db_path = os.path.join(base_dir, 'db/lightspeed.db')
if len(sys.argv) == 2:
db_path = os.path.realpath(sys.argv[1])
try:
conn = sqlite3.connect(db_path)
c = conn.cursor();
c.execute('''
CREATE TABLE IF NOT EXISTS measurements (
id INTEGER PRIMARY KEY,
pingMs UNSIGNED DECIMAL(10, 3),
downloadMbit DECIMAL(5, 2),
uploadMbit DECIMAL(5, 2),
timestamp DATETIME,
durationSecs UNSIGNED DECIMAL(10, 2),
isError INTEGER DEFAULT 0
);
''')
conn.commit()
print('Database created in', db_path)
except sqlite3.Error as e:
print('Error:', e.args[0])
finally:
if conn:
conn.close()
| mit | Python |
13bc65adf712841375d08bf717d7e56743a3c592 | fix error 500 plugin detail if user not authenticated | ava-project/ava-website,ava-project/ava-website,ava-project/ava-website | website/apps/plugins/models.py | website/apps/plugins/models.py | from django.contrib.auth.models import User
from django.db import models
from django.urls import reverse
from model_utils.models import TimeStampedModel
from core.behaviors import Expirationable
from main.utils import generate_token
class Plugin(TimeStampedModel, models.Model):
name = models.CharField(max_length=120)
description = models.TextField()
author = models.ForeignKey(User, on_delete=models.CASCADE)
nb_download = models.IntegerField(default=0)
nb_upvote = models.IntegerField(default=0)
def update_from_manifest(self, manifest):
"""
This fonction update the plugin information from the manifest file
"""
manifest['description'] = "TODO IMPLEMENT DESCRIPTION"
self.description = manifest['description']
@property
def url(self):
return reverse('plugins:detail', args=[self.author.username, self.name])
@property
def url_download(self):
return reverse('plugins:download', args=[self.author.username, self.name])
def user_has_upvoted(self, user):
if not user.is_authenticated():
return False
query = Upvote.objects.filter(plugin=self, user=user)
return True if query.count() else False
def plugin_directory_path(instance, filename):
filename = '{}.zip'.format(str(instance.version))
return 'plugins/{}/{}/{}'.format(instance.plugin.author.username, instance.plugin.name, filename)
class Release(TimeStampedModel, models.Model):
plugin = models.ForeignKey(Plugin, on_delete=models.CASCADE)
version = models.IntegerField(default=0)
archive = models.FileField(upload_to=plugin_directory_path)
class DownloadRelease(Expirationable, TimeStampedModel, models.Model):
NB_DAY_EXPIRE = 0
NB_MINUTE_EXPIRE = 5
plugin = models.ForeignKey(Plugin, on_delete=models.CASCADE)
release = models.ForeignKey(Release, on_delete=models.CASCADE)
author = models.ForeignKey(User, on_delete=models.CASCADE)
token = models.CharField(max_length=50, default=generate_token)
is_used = models.BooleanField(default=False)
@property
def url(self):
return reverse('plugins:download-link',
args=[self.token])
class UserPlugins(TimeStampedModel, models.Model):
plugin = models.ForeignKey(Plugin, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
class Meta:
unique_together = ('plugin', 'user')
class Upvote(TimeStampedModel, models.Model):
plugin = models.ForeignKey(Plugin, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
class Meta:
unique_together = ('plugin', 'user')
| from django.contrib.auth.models import User
from django.db import models
from django.urls import reverse
from model_utils.models import TimeStampedModel
from core.behaviors import Expirationable
from main.utils import generate_token
class Plugin(TimeStampedModel, models.Model):
name = models.CharField(max_length=120)
description = models.TextField()
author = models.ForeignKey(User, on_delete=models.CASCADE)
nb_download = models.IntegerField(default=0)
nb_upvote = models.IntegerField(default=0)
def update_from_manifest(self, manifest):
"""
This fonction update the plugin information from the manifest file
"""
manifest['description'] = "TODO IMPLEMENT DESCRIPTION"
self.description = manifest['description']
@property
def url(self):
return reverse('plugins:detail', args=[self.author.username, self.name])
@property
def url_download(self):
return reverse('plugins:download', args=[self.author.username, self.name])
def user_has_upvoted(self, user):
query = Upvote.objects.filter(plugin=self, user=user)
return True if query.count() else False
def plugin_directory_path(instance, filename):
filename = '{}.zip'.format(str(instance.version))
return 'plugins/{}/{}/{}'.format(instance.plugin.author.username, instance.plugin.name, filename)
class Release(TimeStampedModel, models.Model):
plugin = models.ForeignKey(Plugin, on_delete=models.CASCADE)
version = models.IntegerField(default=0)
archive = models.FileField(upload_to=plugin_directory_path)
class DownloadRelease(Expirationable, TimeStampedModel, models.Model):
NB_DAY_EXPIRE = 0
NB_MINUTE_EXPIRE = 5
plugin = models.ForeignKey(Plugin, on_delete=models.CASCADE)
release = models.ForeignKey(Release, on_delete=models.CASCADE)
author = models.ForeignKey(User, on_delete=models.CASCADE)
token = models.CharField(max_length=50, default=generate_token)
is_used = models.BooleanField(default=False)
@property
def url(self):
return reverse('plugins:download-link',
args=[self.token])
class UserPlugins(TimeStampedModel, models.Model):
plugin = models.ForeignKey(Plugin, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
class Meta:
unique_together = ('plugin', 'user')
class Upvote(TimeStampedModel, models.Model):
plugin = models.ForeignKey(Plugin, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
class Meta:
unique_together = ('plugin', 'user')
| mit | Python |
6bf12c141bab4c698d0324d3ef006ae2d14e6f16 | FIX migration script | ingadhoc/product,ingadhoc/product | product_uom_prices_currency/migrations/8.0.0.5.0/post-migration.py | product_uom_prices_currency/migrations/8.0.0.5.0/post-migration.py | # -*- encoding: utf-8 -*-
from openerp import SUPERUSER_ID
from openerp.modules.registry import RegistryManager
def migrate(cr, version):
print 'Migrating product_uom_prices'
if not version:
return
create_product_sale_uom(cr)
def create_product_sale_uom(cr):
registry = RegistryManager.get(cr.dbname)
by_uom_template_ids = registry['product.template'].search(
cr, SUPERUSER_ID,
[('list_price_type', '=', 'by_uom_currency')], {})
for template_id in by_uom_template_ids:
template_read = registry['product.template'].read(
cr, SUPERUSER_ID, template_id, ['uom_id', 'other_currency_list_price'])
uom_id = template_read['uom_id'][0]
price = template_read['other_currency_list_price']
print 'template_id', template_id
print 'template_read', template_read
print 'uom_id', uom_id
print 'price', price
registry['product.sale.uom'].create(
cr, SUPERUSER_ID, {
'sequence': 0,
'product_tmpl_id': template_id,
'uom_id': uom_id,
'price': price,
}, {})
return True
| # -*- encoding: utf-8 -*-
from openerp import SUPERUSER_ID
from openerp.modules.registry import RegistryManager
def migrate(cr, version):
print 'Migrating product_uom_prices'
if not version:
return
create_product_sale_uom(cr)
def create_product_sale_uom(cr):
registry = RegistryManager.get(cr.dbname)
by_uom_template_ids = registry['product.template'].search(
cr, SUPERUSER_ID,
[('list_price_type', '=', 'by_uom_currency')], {})
for template_id in by_uom_template_ids:
template_read = registry['product.template'].read(
cr, SUPERUSER_ID, template_id, ['uom_id', 'list_price'])
uom_id = template_read['uom_id'][0]
price = template_read['other_currency_list_price']
print 'template_id', template_id
print 'template_read', template_read
print 'uom_id', uom_id
print 'price', price
registry['product.sale.uom'].create(
cr, SUPERUSER_ID, {
'sequence': 0,
'product_tmpl_id': template_id,
'uom_id': uom_id,
'price': price,
}, {})
return True
| agpl-3.0 | Python |
7b656aa3d244628a26730658468e02781cf444e6 | Add FeaturedProject to admin | OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans | private_sharing/admin.py | private_sharing/admin.py | from django.contrib import admin
from . import models
class DataRequestProjectMemberAdmin(admin.ModelAdmin):
"""
Display and make the 'created' field read-only in the admin interface.
"""
readonly_fields = ('created',)
search_fields = ('member__user__username',
'project_member_id',
'project__name')
admin.site.register(models.ProjectDataFile)
admin.site.register(models.DataRequestProject)
admin.site.register(models.OAuth2DataRequestProject)
admin.site.register(models.OnSiteDataRequestProject)
admin.site.register(models.DataRequestProjectMember,
DataRequestProjectMemberAdmin)
admin.site.register(models.FeaturedProject)
| from django.contrib import admin
from . import models
class DataRequestProjectMemberAdmin(admin.ModelAdmin):
"""
Display and make the 'created' field read-only in the admin interface.
"""
readonly_fields = ('created',)
search_fields = ('member__user__username',
'project_member_id',
'project__name')
admin.site.register(models.ProjectDataFile)
admin.site.register(models.DataRequestProject)
admin.site.register(models.OAuth2DataRequestProject)
admin.site.register(models.OnSiteDataRequestProject)
admin.site.register(models.DataRequestProjectMember,
DataRequestProjectMemberAdmin)
| mit | Python |
2fac5416468269df246c1f676838797f8326b427 | Prepare 1.3.0 release | antechrestos/cf-python-client | main/cloudfoundry_client/__init__.py | main/cloudfoundry_client/__init__.py | """
This module provides a client library for cloudfoundry_client v2.
"""
__version__ = "1.3.0"
| """
This module provides a client library for cloudfoundry_client v2.
"""
__version__ = "1.2.0"
| apache-2.0 | Python |
ff2837c53d3f43256b9d22b130eb044ac0f56949 | Update common.py | frydaykg/Pell | common.py | common.py | def checkPellSolution(x,y,n):
return x*x - n*y*y == 1
def getContinuedFraction(val):
a = [ int(val) ]
x = [ val - a[0] ]
yield a[0]
while True:
a.append(int(1/x[-1]))
x.append(1/x[-1]-a[-1])
yield a[-1]
| def checkPellSolution(x,y,n):
return x*x - n*y*y == 1
def getContinuedFraction(val):
a = [ int(val) ]
x = [ val - a[0] ]
yield a[0]
while True:
a.append(int(1/x[-1]))
xx.append(1/x[-1]-a[-1])
yield a[-1]
| mit | Python |
2cf37282b2675c27b3c7b4f4702b2c3ad57e785c | convert dbus.Struct to tuple. | wistful/pympris | common.py | common.py | #!/usr/bin/env python
# coding=utf-8
from functools import wraps
import dbus
MPRIS_NAME_PREFIX = "org.mpris.MediaPlayer2"
MPRIS_OBJECT_PATH = "/org/mpris/MediaPlayer2"
IROOT = "org.mpris.MediaPlayer2"
IPLAYER = IROOT + ".Player"
ITRACKLIST = IROOT + ".TrackList"
IPLAYLISTS = IROOT + ".PlayLists"
IPROPERTIES = "org.freedesktop.DBus.Properties"
def convert2dbus(value, signature):
"""Convert Python type to dbus type according signature"""
if len(signature) == 2 and signature.startswith('a'):
return dbus.Array(value, signature=signature[-1])
type_map = {
'b': dbus.Boolean, 'y': dbus.Byte, 'n': dbus.Int16,
'i': dbus.Int32, 'x': dbus.Int64, 'q': dbus.UInt16, 'u': dbus.UInt32,
't': dbus.UInt64, 'd': dbus.Double, 'o': dbus.ObjectPath,
'g': dbus.Signature, 's': dbus.UTF8String}
return type_map[signature](value)
def convert(dbus_obj):
if isinstance(dbus_obj, dbus.Boolean):
return bool(dbus_obj)
if filter(lambda obj_type: isinstance(dbus_obj, obj_type),
(dbus.Byte, dbus.Int16, dbus.Int32, dbus.Int64,
dbus.UInt16, dbus.UInt32, dbus.UInt64)):
return int(dbus_obj)
if isinstance(dbus_obj, dbus.Double):
return float(dbus_obj)
if filter(lambda obj_type: isinstance(dbus_obj, obj_type),
(dbus.ObjectPath, dbus.Signature, dbus.String, dbus.UTF8String)):
return unicode(dbus_obj)
if isinstance(dbus_obj, dbus.Array):
return map(convert, dbus_obj)
if isinstance(dbus_obj, dbus.Dictionary):
return {convert(key): convert(value)
for key, value in dbus_obj.items()}
if isinstance(dbus_obj, dbus.Struct):
return tuple(map(convert, dbus_obj))
def converter(f):
"""Decorator to convert from dbus type to Python type"""
@wraps(f)
def wrapper(*args, **kwds):
return convert(f(*args, **kwds))
return wrapper
def available_players():
bus = dbus.SessionBus()
players = set()
for name in filter(lambda item: item.startswith(MPRIS_NAME_PREFIX),
bus.list_names()):
owner_name = bus.get_name_owner(name)
players.add(convert(owner_name))
return players
print(available_players())
| #!/usr/bin/env python
# coding=utf-8
from functools import wraps
import dbus
MPRIS_NAME_PREFIX = "org.mpris.MediaPlayer2"
MPRIS_OBJECT_PATH = "/org/mpris/MediaPlayer2"
IROOT = "org.mpris.MediaPlayer2"
IPLAYER = IROOT + ".Player"
ITRACKLIST = IROOT + ".TrackList"
IPLAYLISTS = IROOT + ".PlayLists"
IPROPERTIES = "org.freedesktop.DBus.Properties"
def convert2dbus(value, signature):
"""Convert Python type to dbus type according signature"""
type_map = {
'b': dbus.Boolean, 'y': dbus.Byte, 'n': dbus.Int16,
'i': dbus.Int32, 'x': dbus.Int64, 'q': dbus.UInt16, 'u': dbus.UInt32,
't': dbus.UInt64, 'd': dbus.Double, 'o': dbus.ObjectPath,
'g': dbus.Signature, 's': dbus.UTF8String}
return type_map[signature](value)
def convert(dbus_obj):
if isinstance(dbus_obj, dbus.Boolean):
return bool(dbus_obj)
if filter(lambda obj_type: isinstance(dbus_obj, obj_type),
(dbus.Byte, dbus.Int16, dbus.Int32, dbus.Int64,
dbus.UInt16, dbus.UInt32, dbus.UInt64)):
return int(dbus_obj)
if isinstance(dbus_obj, dbus.Double):
return float(dbus_obj)
if filter(lambda obj_type: isinstance(dbus_obj, obj_type),
(dbus.ObjectPath, dbus.Signature, dbus.String, dbus.UTF8String)):
return unicode(dbus_obj)
if isinstance(dbus_obj, dbus.Array):
return map(convert, dbus_obj)
if isinstance(dbus_obj, dbus.Dictionary):
return {convert(key): convert(value)
for key, value in dbus_obj.items()}
def converter(f):
"""Decorator to convert from dbus type to Python type"""
@wraps(f)
def wrapper(*args, **kwds):
return convert(f(*args, **kwds))
return wrapper
def available_players():
bus = dbus.SessionBus()
players = set()
for name in filter(lambda item: item.startswith(MPRIS_NAME_PREFIX),
bus.list_names()):
owner_name = bus.get_name_owner(name)
players.add(convert(owner_name))
return players
print(available_players())
| mit | Python |
fa9aea2e3f4301dd8bdb8bb4680c6c3c669a8efb | Update adapter_16mers.py | hackseq/2017_project_6,hackseq/2017_project_6,hackseq/2017_project_6,hackseq/2017_project_6 | select_random_subset/adapter_16mers.py | select_random_subset/adapter_16mers.py |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 21 14:15:18 2017
@author: nikka.keivanfar
"""
#to do: fasta as input
P5 = 'AATGATACGGCGACCACCGA'
P7 = 'CAAGCAGAAGACGGCATACGAGAT'
read1 = 'GATCTACACTCTTTCCCTACACGACGCTC'
read2 = 'GTGACTGGAGTTCAGACGTGT'
adapters = [P5, P7, read1, read2] #to do: streamline loops for all adapters combined
P5_kmers = {}
P7_kmers = {}
read1_kmers = {}
read2_kmers = {}
k = 16
#P5 16mers
for i in range(len(P5) - k + 1):
kmer = P5[i:i+k]
if P5_kmers.has_key(kmer):
P5_kmers[kmer] += 1
else:
P5_kmers[kmer] = 1
for kmer, count in P5_kmers.items():
print kmer + "\t" + str(count)
P5mers = set(kmer)
#P7 16mers
for i in range(len(P7) - k + 1):
kmer = P7[i:i+k]
if P7_kmers.has_key(kmer):
P7_kmers[kmer] += 1
else:
P7_kmers[kmer] = 1
for kmer, count in P7_kmers.items():
print kmer + "\t" + str(count)
P7mers = set(kmer)
#read1 16mers
for i in range(len(read1) - k + 1):
kmer = read1[i:i+k]
if read1_kmers.has_key(kmer):
read1_kmers[kmer] += 1
else:
read1_kmers[kmer] = 1
for kmer, count in read1_kmers.items():
print kmer + "\t" + str(count)
read1mers = set(kmer)
#read2 16mers
for i in range(len(read2) - k + 1):
kmer = read2[i:i+k]
if read2_kmers.has_key(kmer):
read2_kmers[kmer] += 1
else:
read2_kmers[kmer] = 1
for kmer, count in read2_kmers.items():
print kmer + "\t" + str(count)
read2mers = set(kmer)
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 21 14:15:18 2017
@author: nikka.keivanfar
"""
#see also: adapters.fa
P5 = 'AATGATACGGCGACCACCGA'
P7 = 'CAAGCAGAAGACGGCATACGAGAT'
read1 = 'GATCTACACTCTTTCCCTACACGACGCTC'
read2 = 'GTGACTGGAGTTCAGACGTGT'
adapters = [P5, P7, read1, read2] #to do: streamline loops for all adapters combined
P5_kmers = {}
P7_kmers = {}
read1_kmers = {}
read2_kmers = {}
k = 16
#P5 16mers
for i in range(len(P5) - k + 1):
kmer = P5[i:i+k]
if P5_kmers.has_key(kmer):
P5_kmers[kmer] += 1
else:
P5_kmers[kmer] = 1
for kmer, count in P5_kmers.items():
print kmer + "\t" + str(count)
P5mers = set(kmer)
#P7 16mers
for i in range(len(P7) - k + 1):
kmer = P7[i:i+k]
if P7_kmers.has_key(kmer):
P7_kmers[kmer] += 1
else:
P7_kmers[kmer] = 1
for kmer, count in P7_kmers.items():
print kmer + "\t" + str(count)
P7mers = set(kmer)
#read1 16mers
for i in range(len(read1) - k + 1):
kmer = read1[i:i+k]
if read1_kmers.has_key(kmer):
read1_kmers[kmer] += 1
else:
read1_kmers[kmer] = 1
for kmer, count in read1_kmers.items():
print kmer + "\t" + str(count)
read1mers = set(kmer)
#read2 16mers
for i in range(len(read2) - k + 1):
kmer = read2[i:i+k]
if read2_kmers.has_key(kmer):
read2_kmers[kmer] += 1
else:
read2_kmers[kmer] = 1
for kmer, count in read2_kmers.items():
print kmer + "\t" + str(count)
read2mers = set(kmer)
| mit | Python |
c4809f9f43129d092235738127b90dc62f593fb8 | Remove some commented out code | tswicegood/steinie,tswicegood/steinie | steinie/app.py | steinie/app.py | from werkzeug import routing
from werkzeug import serving
from werkzeug import wrappers
from . import routes
class Steinie(routes.Router):
def __init__(self, host="127.0.0.1", port=5151, debug=False):
self.host = host
self.port = port
self.debug = debug
super(Steinie, self).__init__()
def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def wsgi_app(self, environ, start_response):
request = wrappers.Request(environ)
response = self.handle(request)
return wrappers.Response(response)(environ, start_response)
def run(self):
serving.run_simple(self.host, self.port, self, use_debugger=self.debug)
def use(self, route, router):
if route.startswith('/'):
route = route[1:]
submount = route
if not submount.startswith('/'):
submount = '/' + submount
rules = [a for a in router.map.iter_rules()]
mount = routing.EndpointPrefix(route, [routes.Submount(submount, rules)])
self.map.add(mount)
| from werkzeug import routing
from werkzeug import serving
from werkzeug import wrappers
from . import routes
class Steinie(routes.Router):
def __init__(self, host="127.0.0.1", port=5151, debug=False):
self.host = host
self.port = port
self.debug = debug
super(Steinie, self).__init__()
def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def wsgi_app(self, environ, start_response):
request = wrappers.Request(environ)
response = self.handle(request)
return wrappers.Response(response)(environ, start_response)
def run(self):
serving.run_simple(self.host, self.port, self, use_debugger=self.debug)
def use(self, route, router):
# if not route.endswith('/'):
# route += '/'
if route.startswith('/'):
route = route[1:]
submount = route
if not submount.startswith('/'):
submount = '/' + submount
rules = [a for a in router.map.iter_rules()]
mount = routing.EndpointPrefix(route, [routes.Submount(submount, rules)])
self.map.add(mount)
# import ipdb; ipdb.set_trace()
| apache-2.0 | Python |
1e931e9aac18f393de786894d9e26ecccc251135 | Fix girder_work script bug: PEP 263 is not compatible with exec | ImageMarkup/isic-archive,ImageMarkup/isic-archive,ImageMarkup/isic-archive,ImageMarkup/isic-archive | server/models/_generate_superpixels.py | server/models/_generate_superpixels.py | ###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os
import sys
# Worker-defined inputs
originalFile = globals()['originalFile']
segmentation_helpersPath = globals()['segmentation_helpersPath']
segmentation_helpersDirPath = os.path.dirname(segmentation_helpersPath)
if segmentation_helpersDirPath not in sys.path:
sys.path.append(segmentation_helpersDirPath)
from segmentation_helpers.scikit import ScikitSegmentationHelper # noqa
with open(originalFile, 'rb') as originalFileStream:
# Scikit-Image is ~70ms faster at decoding image data
originalImageData = ScikitSegmentationHelper.loadImage(originalFileStream)
superpixelsData = ScikitSegmentationHelper.superpixels(originalImageData)
superpixelsEncodedStream = ScikitSegmentationHelper.writeImage(
superpixelsData, 'png')
superpixelsEncodedBytes = superpixelsEncodedStream.getvalue()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os
import sys
# Worker-defined inputs
originalFile = globals()['originalFile']
segmentation_helpersPath = globals()['segmentation_helpersPath']
segmentation_helpersDirPath = os.path.dirname(segmentation_helpersPath)
if segmentation_helpersDirPath not in sys.path:
sys.path.append(segmentation_helpersDirPath)
from segmentation_helpers.scikit import ScikitSegmentationHelper # noqa
with open(originalFile, 'rb') as originalFileStream:
# Scikit-Image is ~70ms faster at decoding image data
originalImageData = ScikitSegmentationHelper.loadImage(originalFileStream)
superpixelsData = ScikitSegmentationHelper.superpixels(originalImageData)
superpixelsEncodedStream = ScikitSegmentationHelper.writeImage(
superpixelsData, 'png')
superpixelsEncodedBytes = superpixelsEncodedStream.getvalue()
| apache-2.0 | Python |
49cd6b9175ef529cbf0994117a25928f33a8b73e | Read simple_script | techtonik/pydotorg.pypi,techtonik/pydotorg.pypi | config.py | config.py | import ConfigParser
class Config:
''' Read in the config and set up the vars with the correct type.
'''
def __init__(self, configfile, name):
# "name" argument no longer used
c = ConfigParser.ConfigParser()
c.read(configfile)
self.database_name = c.get('database', 'name')
self.database_user = c.get('database', 'user')
if c.has_option('database', 'password'):
self.database_pw = c.get('database', 'password')
else:
self.database_pw = None
self.database_files_dir = c.get('database', 'files_dir')
self.mailhost = c.get('webui', 'mailhost')
self.adminemail = c.get('webui', 'adminemail')
self.url = c.get('webui', 'url')
self.simple_script = c.get('webui', 'simple_script')
self.files_url = c.get('webui', 'files_url')
self.rss_file = c.get('webui', 'rss_file')
self.debug_mode = c.get('webui', 'debug_mode')
self.cheesecake_password = c.get('webui', 'cheesecake_password')
self.logfile = c.get('logging', 'file')
self.mailhost = c.get('logging', 'mailhost')
self.fromaddr = c.get('logging', 'fromaddr')
self.toaddrs = c.get('logging', 'toaddrs').split(',')
| import ConfigParser
class Config:
''' Read in the config and set up the vars with the correct type.
'''
def __init__(self, configfile, name):
# "name" argument no longer used
c = ConfigParser.ConfigParser()
c.read(configfile)
self.database_name = c.get('database', 'name')
self.database_user = c.get('database', 'user')
if c.has_option('database', 'password'):
self.database_pw = c.get('database', 'password')
else:
self.database_pw = None
self.database_files_dir = c.get('database', 'files_dir')
self.mailhost = c.get('webui', 'mailhost')
self.adminemail = c.get('webui', 'adminemail')
self.url = c.get('webui', 'url')
self.files_url = c.get('webui', 'files_url')
self.rss_file = c.get('webui', 'rss_file')
self.debug_mode = c.get('webui', 'debug_mode')
self.cheesecake_password = c.get('webui', 'cheesecake_password')
self.logfile = c.get('logging', 'file')
self.mailhost = c.get('logging', 'mailhost')
self.fromaddr = c.get('logging', 'fromaddr')
self.toaddrs = c.get('logging', 'toaddrs').split(',')
| bsd-3-clause | Python |
9d0ffbe216296fac2bec6f28147431fec607d8b1 | Fix require_admin | JokerQyou/bot | config.py | config.py | # coding: utf-8
import json
from redis_wrap import get_hash, get_list
__config__ = 'config.json'
with open(__config__, 'r') as cfr:
config = json.loads(cfr.read())
PATH = '/%s' % '/'.join(config.get('server').replace('https://', '').replace('http://', '').split('/')[1:])
TOKEN = config.get('token')
SERVER = config.get('server')
PORT = config.get('port')
def get(key):
''' Get raw config from redis with a prefix '''
list_keys = ('admins', )
hash_keys = (None, )
real_key = '%s:%s' % (str(__name__), key, )
if key in list_keys:
return get_list(real_key)
elif key in hash_keys:
return get_hash(real_key)
def init_redis():
admins = get('admins')
if len(admins) == 0:
[admins.append(admin) for admin in config.get('admins')]
def require_admin(func):
def wrapper(*args, **kwargs):
msg = kwargs.get('msg', None) if len(args) == 0 else args[0]
if not msg:
return ''
if msg['from']['username'] not in get('admins'):
return u'这个功能只有管理员可以使用'
return func(*args, **kwargs)
return wrapper
init_redis()
| # coding: utf-8
import json
from redis_wrap import get_hash, get_list
__config__ = 'config.json'
with open(__config__, 'r') as cfr:
config = json.loads(cfr.read())
PATH = '/%s' % '/'.join(config.get('server').replace('https://', '').replace('http://', '').split('/')[1:])
TOKEN = config.get('token')
SERVER = config.get('server')
PORT = config.get('port')
def get(key):
''' Get raw config from redis with a prefix '''
list_keys = ('admins', )
hash_keys = (None, )
real_key = '%s:%s' % (str(__name__), key, )
if key in list_keys:
return get_list(real_key)
elif key in hash_keys:
return get_hash(real_key)
def init_redis():
admins = get('admins')
if len(admins) == 0:
[admins.append(admin) for admin in config.get('admins')]
def require_admin(func):
def wrapper(**kwargs):
msg = kwargs.get('msg', None)
if not msg:
return ''
if msg['from']['username'] not in get('admins'):
return u'这个功能只有管理员可以使用'
return func(**kwargs)
return wrapper
init_redis()
| bsd-2-clause | Python |
5db9297ced5dd310899fd3b5072c670339cffd37 | fix with syntax | fwilson42/dchacks2015,fwilson42/dchacks2015,fwilson42/dchacks2015 | config.py | config.py | import json
import utils.metro
with open("config.json") as f:
config = json.load(f)
redis_info = {"host": config["REDIS_HOST"], "password": config["REDIS_PASSWORD"]}
wmata = utils.metro.MetroApi(config["API_KEY"], **redis_info)
| import json
import utils.metro
with f as open("config.json"):
config = json.load(f)
redis_info = {"host": config["REDIS_HOST"], "password": config["REDIS_PASSWORD"]}
wmata = utils.metro.MetroApi(config["API_KEY"], **redis_info)
| mit | Python |
69ff6726b6a25b585f9f9631408bd2191d98d36f | add some docstrings | dude-pa/dude | config.py | config.py | import os.path
API_AI_TOKEN = 'caca38e8d99d4ea6bd9ffa9a8be15ff9'
API_AI_SESSION_ID = 'dd60fde7-c6ab-4f38-9487-7300c42b4916'
# this is where yoda's config will be stored
YODA_CONFIG_FILE_PATH = os.path.join(os.path.expanduser('~'), '.yodaconfig')
DEFAULT_CONFIG_PATH = os.path.join(os.path.expanduser('~'), '.yoda')
def update_config_path(new_path):
'''Updates the path where the config files are stored.
This is done by changing the contents of YODA_CONFIG_FILE_PATH with new_path
of user's config.
'''
if len(new_path) == 0:
new_path = DEFAULT_CONFIG_PATH
with open(YODA_CONFIG_FILE_PATH, 'w') as config_file:
config_file.write(new_path)
return new_path
def get_config_file_paths():
'''Get the absolute config file paths of user
config_path_prefix is where the config files are stored.
'''
try:
with open(YODA_CONFIG_FILE_PATH) as config_file:
config_path_prefix = config_file.read().strip()
except IOError:
config_path_prefix = DEFAULT_CONFIG_PATH
return {
"USER_CONFIG_FILE_PATH": os.path.join(config_path_prefix, '.userconfig.yml'),
"LOVE_CONFIG_FILE_PATH": os.path.join(config_path_prefix, 'love/.loveconfig.yml'),
"MONEY_CONFIG_FILE_PATH": os.path.join(config_path_prefix, 'money/.moneyconfig.yml'),
"DIARY_CONFIG_FILE_PATH": os.path.join(config_path_prefix, 'diary/.diaryconfig.yml'),
"VOCABULARY_CONFIG_FILE_PATH": os.path.join(config_path_prefix, 'vocabulary/.vocabularyconfig.yml'),
"FLASHCARDS_CONFIG_FILE_PATH": os.path.join(config_path_prefix, 'flashcards/.flashcardsconfig.yml')
}
| import os.path
API_AI_TOKEN = 'caca38e8d99d4ea6bd9ffa9a8be15ff9'
API_AI_SESSION_ID = 'dd60fde7-c6ab-4f38-9487-7300c42b4916'
# this is where yoda's config will be stored
YODA_CONFIG_FILE_PATH = os.path.join(os.path.expanduser('~'), '.yodaconfig')
DEFAULT_CONFIG_PATH = os.path.join(os.path.expanduser('~'), '.yoda')
def update_config_path(new_path):
if len(new_path) == 0:
new_path = DEFAULT_CONFIG_PATH
with open(YODA_CONFIG_FILE_PATH, 'w') as config_file:
config_file.write(new_path)
return new_path
def get_config_file_paths():
try:
with open(YODA_CONFIG_FILE_PATH) as config_file:
config_path_prefix = config_file.read().strip()
except IOError:
config_path_prefix = DEFAULT_CONFIG_PATH
return {
"USER_CONFIG_FILE_PATH": os.path.join(config_path_prefix, '.userconfig.yml'),
"LOVE_CONFIG_FILE_PATH": os.path.join(config_path_prefix, 'love/.loveconfig.yml'),
"MONEY_CONFIG_FILE_PATH": os.path.join(config_path_prefix, 'money/.moneyconfig.yml'),
"DIARY_CONFIG_FILE_PATH": os.path.join(config_path_prefix, 'diary/.diaryconfig.yml'),
"VOCABULARY_CONFIG_FILE_PATH": os.path.join(config_path_prefix, 'vocabulary/.vocabularyconfig.yml'),
"FLASHCARDS_CONFIG_FILE_PATH": os.path.join(config_path_prefix, 'flashcards/.flashcardsconfig.yml')
}
| mit | Python |
685f6e63062fcac3b7cefec40aa661c1c6fcb88a | set mongodb uri to env var | buck06191/bcmd-web,buck06191/bcmd-web,buck06191/bcmd-web,buck06191/bcmd-web,buck06191/bcmd-web | config.py | config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
TESTING = False
CSRF_ENABLED = True
SECRET_KEY = os.environ['SECRET_KEY']
MONGOLAB_DB_URI = os.environ['MONGODB_URI']
class ProductionConfig(Config):
DEBUG = False
class StagingConfig(Config):
DEVELOPMENT = True
DEBUG = True
class DevelopmentConfig(Config):
DEVELOPMENT = True
DEBUG = True
class TestingConfig(Config):
TESTING = True
| import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
TESTING = False
CSRF_ENABLED = True
SECRET_KEY = os.environ['SECRET_KEY']
class ProductionConfig(Config):
DEBUG = False
class StagingConfig(Config):
DEVELOPMENT = True
DEBUG = True
class DevelopmentConfig(Config):
DEVELOPMENT = True
DEBUG = True
class TestingConfig(Config):
TESTING = True
| mit | Python |
dee622f86ec8184ad5cc00f9564eee34ee8626e6 | Add color, cursor hiding. Ugly! | erikrose/conway | conway.py | conway.py | #!/usr/bin/env python
import atexit
from itertools import chain
from sys import stdout
from time import sleep
from blessings import Terminal
def main():
"""Play Conway's Game of Life on the terminal."""
def die((x, y)):
if (x < 0 or x >= term.width or
y < 0 or y >= term.height):
return None
return x, y
term = Terminal()
board = dict(((x, y), 1) for x, y in [(10, 10), (11, 10), (12, 10), (10, 11), (11, 12)])
# Hide the cursor, but show it on exit:
atexit.register(stdout.write, term.cnorm)
print term.civis,
for i in range(50):
print term.clear,
draw(board, term)
sleep(0.05)
board = next_board(board, wrap=die)
def draw(board, term):
"""Draw a set of points to the terminal."""
with term.location(): # snap back when done
for (x, y), state in board.iteritems():
print term.move(y, x) + term.on_color(state)(' '),
stdout.flush()
def next_board(board, wrap=lambda p: p):
"""Given a set of "on" (x, y) points, return the next set.
Adapted from Jack Diedrich's implementation from his 2012 PyCon talk "Stop
Writing Classes"
:arg wrap: A callable which takes a point and transforms it, for example
to wrap to the other edge of the screen
"""
new_board = {}
# We need consider only the points that are alive and their neighbors:
points_to_recalc = set(board.iterkeys()) | set(chain(*map(neighbors, board)))
for point in points_to_recalc:
count = sum((neigh in board) for neigh in neighbors(point))
x, y = point
if count == 3:
if point in board:
state = 9
else:
state = 10
elif count == 2 and point in board:
state = 11
else:
state = 0
if state:
wrapped = wrap(point)
if wrapped:
new_board[wrapped] = state
return new_board
def neighbors(point):
x, y = point
yield x + 1, y
yield x - 1, y
yield x, y + 1
yield x, y - 1
yield x + 1, y + 1
yield x + 1, y - 1
yield x - 1, y + 1
yield x - 1, y - 1
if __name__ == '__main__':
main()
| #!/usr/bin/env python
from itertools import chain
from sys import stdout
from time import sleep
from blessings import Terminal
def main():
"""Play Conway's Game of Life on the terminal."""
def die((x, y)):
if (x < 0 or x >= term.width or
y < 0 or y >= term.height):
return None
return x, y
term = Terminal()
board = set([(10, 10), (11, 10), (12, 10), (10, 11), (11, 12)])
for i in range(50):
draw(board, term)
sleep(0.1)
board = next_board(board, wrap=die)
def draw(board, term):
"""Draw a set of points to the terminal."""
with term.location(): # snap back when done
print term.clear,
for x, y in board:
print term.move(y, x) + '#',
stdout.flush()
def next_board(board, wrap=lambda p: p):
"""Given a set of "on" (x, y) points, return the next set.
Adapted from Jack Diedrich's implementation from his 2012 PyCon talk "Stop
Writing Classes"
:arg wrap: A callable which takes a point and transforms it, for example
to wrap to the other edge of the screen
"""
new_board = set()
# We need consider only the points that are alive and their neighbors:
points_to_recalc = board | set(chain(*map(neighbors, board)))
for point in points_to_recalc:
count = sum((neigh in board) for neigh in neighbors(point))
x, y = point
if count == 3 or (count == 2 and point in board):
wrapped = wrap(point)
if wrapped:
new_board.add(wrapped)
return new_board
def neighbors(point):
x, y = point
yield x + 1, y
yield x - 1, y
yield x, y + 1
yield x, y - 1
yield x + 1, y + 1
yield x + 1, y - 1
yield x - 1, y + 1
yield x - 1, y - 1
if __name__ == '__main__':
main()
| mit | Python |
caa575a8b564518e553d803b5dfeea0995a21d7a | Add lists property to Lists | joshua-stone/DerPyBooru | derpibooru/Lists.py | derpibooru/Lists.py |
class Lists(object)
def __init__(self, lists, page=1, last="", comments=False, fav=False, key=""):
self.__parameters = {}
@property
def hostname()
return("https://derpiboo.ru")
@property
def parameters(self):
return(self.__parameters)
@property
def lists():
lists = {
0: "index",
1: "scoring_scoring",
2: "all_time_top_scoring",
3: "top_commented"
}
return(lists)
@property
def page(self):
return(self.parameters["page"])
@page.setter
def page(self, page=1):
if not isinstance(page, int):
raise TypeError("page number must be an int")
if page < 1:
raise ValueError("page number must be greater than 1")
self.__parameters["page"] = page
|
class Lists(object)
def __init__(self, page=1, last="", comments=False, fav=False, key=""):
self.__parameters = {}
@property
def hostname()
return("https://derpiboo.ru")
@property
def parameters(self):
return(self.__parameters)
@property
def page(self):
return(self.parameters["page"])
@page.setter
def page(self, page=1):
if not isinstance(page, int):
raise TypeError("page number must be an int")
if page < 1:
raise ValueError("page number must be greater than 1")
self.__parameters["page"] = page
| bsd-2-clause | Python |
7c419c1e0b34169e02d47653655a00c74cedecf1 | add a comment to test_documentation.py | freeslugs/eventum,freeslugs/eventum,freeslugs/eventum,freeslugs/eventum | test/test_documentation.py | test/test_documentation.py | import os
import unittest
from fnmatch import fnmatch
from sys import path
path.append('../')
class TestDocumentation(unittest.TestCase):
APP_ROOT = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
EXCLUDES = set(open('../.gitignore').read().split('\n') + ['.git'])
README = 'README.md'
def test_readmes(self):
"""All directories not in the gitignore should have `README.md` files
in them.
"""
for root, dirs, filenames in os.walk(self.APP_ROOT):
# Ignore all subdirectories of directories that match the EXCLUDES
dirs[:] = [d for d in dirs if not
any(fnmatch(d, pattern) for pattern in self.EXCLUDES)]
for d in dirs:
readme = os.path.join(root, d, self.README)
relpath = os.path.relpath(os.path.join(root, d), self.APP_ROOT)
self.assertTrue(
os.path.isfile(readme),
msg=self.README + " must be created in " + relpath)
if __name__ == '__main__':
unittest.main()
| import os
import unittest
from fnmatch import fnmatch
from sys import path
path.append('../')
class TestDocumentation(unittest.TestCase):
APP_ROOT = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
EXCLUDES = set(open('../.gitignore').read().split('\n') + ['.git'])
README = 'README.md'
def test_readmes(self):
"""All directories not in the gitignore should have `README.md` files
in them.
"""
for root, dirs, filenames in os.walk(self.APP_ROOT):
dirs[:] = [d for d in dirs if not
any(fnmatch(d, pattern) for pattern in self.EXCLUDES)]
for d in dirs:
readme = os.path.join(root, d, self.README)
relpath = os.path.relpath(os.path.join(root, d), self.APP_ROOT)
self.assertTrue(
os.path.isfile(readme),
msg=self.README + " must be created in " + relpath)
if __name__ == '__main__':
unittest.main()
| mit | Python |
d08c6b849d19549df9d1a179f187f44370c572a8 | bump version to 1.0.3 | uber/doubles | doubles/__init__.py | doubles/__init__.py | __version__ = '1.0.3'
from doubles.class_double import ClassDouble # noqa
from doubles.instance_double import InstanceDouble # noqa
from doubles.object_double import ObjectDouble # noqa
from doubles.targets.allowance_target import allow # noqa
from doubles.targets.expectation_target import expect # noqa
from doubles.lifecycle import teardown, verify, no_builtin_verification # noqa
| __version__ = '1.0.2'
from doubles.class_double import ClassDouble # noqa
from doubles.instance_double import InstanceDouble # noqa
from doubles.object_double import ObjectDouble # noqa
from doubles.targets.allowance_target import allow # noqa
from doubles.targets.expectation_target import expect # noqa
from doubles.lifecycle import teardown, verify, no_builtin_verification # noqa
| mit | Python |
a79a3f7c42c858ae42c618479654cd7589de05b9 | Remove unused tests for hash map | alexrudy/Zeeko,alexrudy/Zeeko | zeeko/utils/tests/test_hmap.py | zeeko/utils/tests/test_hmap.py | # -*- coding: utf-8 -*-
import pytest
from ..hmap import HashMap
@pytest.fixture(params=[0,1,5,9])
def n(request):
"""Number of items"""
return request.param
@pytest.fixture
def items(n):
"""A list of strings."""
return ["item{0:d}".format(i) for i in range(n)]
| # -*- coding: utf-8 -*-
import pytest
from ..hmap import HashMap
@pytest.fixture(params=[0,1,5,9])
def n(request):
"""Number of items"""
return request.param
@pytest.fixture
def items(n):
"""A list of strings."""
return ["item{0:d}".format(i) for i in range(n)]
@pytest.mark.skip
def test_hmap(items):
"""docstring for test"""
h = HashMap(10)
if len(items):
with pytest.raises(KeyError):
h[items[0]]
for item in items:
h.add(item)
assert len(h) == len(items)
for i, item in enumerate(items):
assert h[item] == i
assert repr(h) == "HashMap({0!r})".format(items)
if len(items):
item = items[0]
del h[item]
assert len(h) == len(items) - 1
assert item not in h
| bsd-3-clause | Python |
7b30a1036f67ef6bc41b1c65cd610346080aecff | Fix identation | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | nodeconductor/core/authentication.py | nodeconductor/core/authentication.py | from __future__ import unicode_literals
from django.conf import settings
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
import rest_framework.authentication
from rest_framework import exceptions
import nodeconductor.logging.middleware
TOKEN_KEY = settings.NODECONDUCTOR.get('TOKEN_KEY', 'x-auth-token')
class TokenAuthentication(rest_framework.authentication.TokenAuthentication):
"""
Custom token-based authentication.
Use TOKEN_KEY from request query parameters if authentication token was not found in header.
"""
def get_authorization_value(self, request):
auth = rest_framework.authentication.get_authorization_header(request)
if not auth:
auth = request.query_params.get(TOKEN_KEY, '')
return auth
def authenticate_credentials(self, key):
try:
token = self.model.objects.select_related('user').get(key=key)
except self.model.DoesNotExist:
raise exceptions.AuthenticationFailed(_('Invalid token.'))
if not token.user.is_active:
raise exceptions.AuthenticationFailed(_('User inactive or deleted.'))
lifetime = settings.NODECONDUCTOR.get('TOKEN_LIFETIME', timezone.timedelta(hours=1))
if token.created < timezone.now() - lifetime:
raise exceptions.AuthenticationFailed(_('Token has expired.'))
else:
token.created = timezone.now()
token.save()
return token.user, token
def authenticate(self, request):
auth = self.get_authorization_value(request).split()
if not auth or auth[0].lower() != b'token':
return None
if len(auth) == 1:
msg = _('Invalid token. No credentials provided.')
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = _('Invalid token. Token string should not contain spaces.')
raise exceptions.AuthenticationFailed(msg)
return self.authenticate_credentials(auth[1])
def user_capturing_auth(auth):
class CapturingAuthentication(auth):
def authenticate(self, request):
result = super(CapturingAuthentication, self).authenticate(request)
if result is not None:
user, _ = result
nodeconductor.logging.middleware.set_current_user(user)
return result
return CapturingAuthentication
SessionAuthentication = user_capturing_auth(rest_framework.authentication.SessionAuthentication)
TokenAuthentication = user_capturing_auth(TokenAuthentication)
| from __future__ import unicode_literals
from django.conf import settings
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
import rest_framework.authentication
from rest_framework import exceptions
import nodeconductor.logging.middleware
TOKEN_KEY = settings.NODECONDUCTOR.get('TOKEN_KEY', 'x-auth-token')
class TokenAuthentication(rest_framework.authentication.TokenAuthentication):
"""
Custom token-based authentication.
Use TOKEN_KEY from request query parameters if authentication token was not found in header.
"""
def get_authorization_value(self, request):
auth = rest_framework.authentication.get_authorization_header(request)
if not auth:
auth = request.query_params.get(TOKEN_KEY, '')
return auth
def authenticate_credentials(self, key):
try:
token = self.model.objects.select_related('user').get(key=key)
except self.model.DoesNotExist:
raise exceptions.AuthenticationFailed(_('Invalid token.'))
if not token.user.is_active:
raise exceptions.AuthenticationFailed(_('User inactive or deleted.'))
lifetime = settings.NODECONDUCTOR.get('TOKEN_LIFETIME', timezone.timedelta(hours=1))
if token.created < timezone.now() - lifetime:
raise exceptions.AuthenticationFailed(_('Token has expired.'))
else:
token.created = timezone.now()
token.save()
return token.user, token
def authenticate(self, request):
auth = self.get_authorization_value(request).split()
if not auth or auth[0].lower() != b'token':
return None
if len(auth) == 1:
msg = _('Invalid token. No credentials provided.')
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = _('Invalid token. Token string should not contain spaces.')
raise exceptions.AuthenticationFailed(msg)
return self.authenticate_credentials(auth[1])
def user_capturing_auth(auth):
class CapturingAuthentication(auth):
def authenticate(self, request):
result = super(CapturingAuthentication, self).authenticate(request)
if result is not None:
user, _ = result
nodeconductor.logging.middleware.set_current_user(user)
return result
return CapturingAuthentication
SessionAuthentication = user_capturing_auth(rest_framework.authentication.SessionAuthentication)
TokenAuthentication = user_capturing_auth(TokenAuthentication)
| mit | Python |
9d797a9a0ad6e3e552d4cdbb385b7521c38327fe | update the example | xsank/Pyeventbus,xsank/Pyeventbus | example/myeventbus.py | example/myeventbus.py | __author__ = 'Xsank'
import time
from eventbus.eventbus import EventBus
from myevent import GreetEvent
from myevent import ByeEvent
from mylistener import MyListener
if __name__=="__main__":
eventbus=EventBus()
eventbus.register(MyListener())
ge=GreetEvent('world')
be=ByeEvent('world')
eventbus.async_post(be)
eventbus.post(ge)
time.sleep(0.1)
eventbus.unregister(MyListener())
eventbus.destroy() | __author__ = 'Xsank'
import time
from eventbus.eventbus import EventBus
from myevent import GreetEvent
from myevent import ByeEvent
from mylistener import MyListener
if __name__=="__main__":
eventbus=EventBus()
eventbus.register(MyListener())
ge=GreetEvent('world')
be=ByeEvent('world')
eventbus.async_post(be)
eventbus.post(ge)
time.sleep(0.1)
eventbus.unregister(MyListener()) | mit | Python |
ee954e6c221f4d78a2dcaf6607837fa62892ae37 | Add more tests | openfisca/openfisca-core,openfisca/openfisca-core | openfisca_core/tests/test_periods.py | openfisca_core/tests/test_periods.py | # -*- coding: utf-8 -*-
from nose.tools import assert_equal, raises
from openfisca_core.periods import Period, Instant, YEAR, MONTH, period
first_jan = Instant((2014, 1, 1))
first_march = Instant((2014, 3, 1))
# Test Period -> String
def test_year():
assert_equal(unicode(Period((YEAR, first_jan, 1))), u'2014')
def test_12_months_is_a_year():
assert_equal(unicode(Period((MONTH, first_jan, 12))), u'2014')
def test_rolling_year():
assert_equal(unicode(Period((MONTH, first_march, 12))), u'year:2014-03')
assert_equal(unicode(Period((YEAR, first_march, 1))), u'year:2014-03')
def test_month():
assert_equal(unicode(Period((MONTH, first_jan, 1))), u'2014-01')
def test_several_months():
assert_equal(unicode(Period((MONTH, first_jan, 3))), u'month:2014-01:3')
assert_equal(unicode(Period((MONTH, first_march, 3))), u'month:2014-03:3')
def test_several_years():
assert_equal(unicode(Period((YEAR, first_jan, 3))), u'year:2014:3')
assert_equal(unicode(Period((YEAR, first_march, 3))), u'year:2014-03:3')
# Test String -> Period
def test_parsing_year():
assert_equal(period(u'2014'), Period((YEAR, first_jan, 1)))
def test_parsing_month():
assert_equal(period(u'2014-01'), Period((MONTH, first_jan, 1)))
def test_parsing_rolling_year():
assert_equal(period(u'year:2014-03'), Period((YEAR, first_march, 1)))
def test_parsing_several_months():
assert_equal(period(u'month:2014-03:3'), Period((MONTH, first_march, 3)))
def test_parsing_several_years():
assert_equal(period(u'year:2014:2'), Period((YEAR, first_jan, 2)))
@raises(ValueError)
def test_wrong_syntax_several_years():
period(u'2014:2')
@raises(ValueError)
def test_wrong_syntax_several_months():
period(u'2014-2:2')
@raises(ValueError)
def test_daily_period():
period(u'2014-2-3')
@raises(ValueError)
def test_daily_period_2():
period(u'2014-2-3:2')
@raises(ValueError)
def test_ambiguous_period():
period(u'month:2014')
@raises(TypeError)
def test_deprecated_signature():
period(MONTH, 2014)
@raises(TypeError)
def test_wrong_argument():
period({})
@raises(TypeError)
def test_wrong_argument_1():
period([])
@raises(TypeError)
def test_none():
period(None)
@raises(ValueError)
def test_empty_string():
period('')
| # -*- coding: utf-8 -*-
from nose.tools import assert_equal, raises
from openfisca_core.periods import Period, Instant, YEAR, MONTH, period
first_jan = Instant((2014, 1, 1))
first_march = Instant((2014, 3, 1))
# Test Period -> String
def test_year():
assert_equal(unicode(Period((YEAR, first_jan, 1))), u'2014')
def test_12_months_is_a_year():
assert_equal(unicode(Period((MONTH, first_jan, 12))), u'2014')
def test_rolling_year():
assert_equal(unicode(Period((MONTH, first_march, 12))), u'year:2014-03')
assert_equal(unicode(Period((YEAR, first_march, 1))), u'year:2014-03')
def test_month():
assert_equal(unicode(Period((MONTH, first_jan, 1))), u'2014-01')
def test_several_months():
assert_equal(unicode(Period((MONTH, first_jan, 3))), u'month:2014-01:3')
assert_equal(unicode(Period((MONTH, first_march, 3))), u'month:2014-03:3')
def test_several_years():
assert_equal(unicode(Period((YEAR, first_jan, 3))), u'year:2014:3')
assert_equal(unicode(Period((YEAR, first_march, 3))), u'year:2014-03:3')
# Test String -> Period
def test_parsing_year():
assert_equal(period(u'2014'), Period((YEAR, first_jan, 1)))
def test_parsing_month():
assert_equal(period(u'2014-01'), Period((MONTH, first_jan, 1)))
def test_parsing_rolling_year():
assert_equal(period(u'year:2014-03'), Period((YEAR, first_march, 1)))
def test_parsing_several_months():
assert_equal(period(u'month:2014-03:3'), Period((MONTH, first_march, 3)))
def test_parsing_several_years():
assert_equal(period(u'year:2014:2'), Period((YEAR, first_jan, 2)))
@raises(ValueError)
def test_wrong_syntax_several_years():
period(u'2014:2')
@raises(ValueError)
def test_wrong_syntax_several_months():
period(u'2014-2:2')
@raises(ValueError)
def test_daily_period():
period(u'2014-2-3')
@raises(ValueError)
def test_daily_period_2():
period(u'2014-2-3:2')
@raises(ValueError)
def test_ambiguous_period():
period(u'month:2014')
@raises(TypeError)
def test_deprecated_signature():
period(MONTH, 2014)
@raises(TypeError)
def test_wrong_argument():
period({})
| agpl-3.0 | Python |
8481002f71c3d51d4550841d49a02d85062aabc4 | Fix examples (#357) | arviz-devs/arviz,arviz-devs/arviz,arviz-devs/arviz,arviz-devs/arviz | examples/plot_pair.py | examples/plot_pair.py | """
Pair Plot
=========
_thumb: .2, .5
"""
import arviz as az
az.style.use('arviz-darkgrid')
centered = az.load_arviz_data('centered_eight')
coords = {'school': ['Choate', 'Deerfield']}
az.plot_pair(centered, var_names=['theta', 'mu', 'tau'], coords=coords, divergences=True, textsize=22)
| """
Pair Plot
=========
_thumb: .2, .5
"""
import arviz as az
az.style.use('arviz-darkgrid')
centered = az.load_arviz_data('centered_eight')
coords = {'school': ['Choate', 'Deerfield']}
az.plot_pair(data, var_names=['theta', 'mu', 'tau'], coords=coords, divergences=True, textsize=22)
| apache-2.0 | Python |
5f193bb791947fe1195e2aebf00eb3d127247d10 | Document why title tag is omitted | ento/elm-doc,ento/elm-doc | src/elm_doc/tasks/html.py | src/elm_doc/tasks/html.py | import json
import html
from pathlib import Path
from elm_doc.utils import Namespace
# Note: title tag is omitted, as the Elm app sets the title after
# it's initialized.
PAGE_TEMPLATE = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<link rel="shortcut icon" size="16x16, 32x32, 48x48, 64x64, 128x128, 256x256" href="{mount_point}/assets/favicon.ico">
<link rel="stylesheet" href="{mount_point}/assets/style.css">
<script src="{mount_point}/artifacts/elm.js"></script>
<script src="{mount_point}/assets/highlight/highlight.pack.js"></script>
<link rel="stylesheet" href="{mount_point}/assets/highlight/styles/default.css">
</head>
<body>
<script>
try {{
const fontsLink = document.createElement("link");
fontsLink.href = "{mount_point}/assets/fonts/" + ((navigator.userAgent.indexOf("Macintosh") > -1) ? "_hints_off.css" : "_hints_on.css");
fontsLink.rel = "stylesheet";
document.head.appendChild(fontsLink);
}} catch(e) {{
// loading the font is not essential; log the error and move on
console.log(e);
}}
Elm.Main.init({init});
</script>
</body>
</html>
''' # noqa: E501
def _render(mount_point: str = ''):
if mount_point and mount_point[-1] == '/':
mount_point = mount_point[:-1]
init = {
'flags': {
'mountedAt': mount_point,
},
}
return PAGE_TEMPLATE.format(
mount_point=html.escape(mount_point),
init=json.dumps(init))
class actions(Namespace):
def write(output_path: Path, mount_point: str = ''):
output_path.parent.mkdir(parents=True, exist_ok=True)
with open(str(output_path), 'w') as f:
f.write(_render(mount_point=mount_point))
| import json
import html
from pathlib import Path
from elm_doc.utils import Namespace
PAGE_TEMPLATE = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<link rel="shortcut icon" size="16x16, 32x32, 48x48, 64x64, 128x128, 256x256" href="{mount_point}/assets/favicon.ico">
<link rel="stylesheet" href="{mount_point}/assets/style.css">
<script src="{mount_point}/artifacts/elm.js"></script>
<script src="{mount_point}/assets/highlight/highlight.pack.js"></script>
<link rel="stylesheet" href="{mount_point}/assets/highlight/styles/default.css">
</head>
<body>
<script>
try {{
const fontsLink = document.createElement("link");
fontsLink.href = "{mount_point}/assets/fonts/" + ((navigator.userAgent.indexOf("Macintosh") > -1) ? "_hints_off.css" : "_hints_on.css");
fontsLink.rel = "stylesheet";
document.head.appendChild(fontsLink);
}} catch(e) {{
// loading the font is not essential; log the error and move on
console.log(e);
}}
Elm.Main.init({init});
</script>
</body>
</html>
''' # noqa: E501
def _render(mount_point: str = ''):
if mount_point and mount_point[-1] == '/':
mount_point = mount_point[:-1]
init = {
'flags': {
'mountedAt': mount_point,
},
}
return PAGE_TEMPLATE.format(
mount_point=html.escape(mount_point),
init=json.dumps(init))
class actions(Namespace):
def write(output_path: Path, mount_point: str = ''):
output_path.parent.mkdir(parents=True, exist_ok=True)
with open(str(output_path), 'w') as f:
f.write(_render(mount_point=mount_point))
| bsd-3-clause | Python |
4951b6fdb9702d6b13ce58d91ebdfebe2d7f232f | Make tests pass | ibab/datapipe | tests/tests.py | tests/tests.py | import logging
logger = logging.getLogger('datapipe')
logger.setLevel(logging.WARN)
from datapipe import *
from datapipe.targets.mock import *
class TestTask(Task):
inp = Input()
def outputs(self):
outs = []
for i, elem in enumerate(self.inp):
outs.append(MockTarget('out_{}'.format(i)))
return outs
def run(self):
pass
# New tasks are defined by inheriting from an existing Task
class AddLines(Task):
# The inputs can be anything the task depends on:
# Local and remote files, python objects, numpy arrays, ...
infile = Input()
count = Input(default=1)
text = Input(default='This is some text')
# The outputs are defined dynamically (with access to the inputs)
def outputs(self):
return LocalFile(self.infile.path().replace('.txt', '.AddLines.txt'))
# The actual task is defined as a function with access to inputs and outputs
def run(self):
with self.infile.open() as f:
with self.outputs().open('w') as g:
g.write(f.read())
for i in range(self.count):
g.write(self.text + '\n')
def test_simpletask():
target = MockTarget('test1')
TestTask([target]).run()
def test_several_targets():
inputs = [MockTarget('input_1'), MockTarget('input_2')]
task = TestTask(inputs)
task.run()
def test_example():
# Create test file
open('/tmp/input.txt', 'w').close()
from datapipe import Task, Input, LocalFile, require
# Create initial Targets
infile = LocalFile('/tmp/input.txt')
# Define the pipeline
task1 = AddLines(infile, count=2)
task2 = AddLines(task1.outputs(), count=3, text='This is some more text')
# Require a target to execute all tasks needed to produce it
require(task2.outputs())
| import logging
logger = logging.getLogger('datapipe')
logger.setLevel(logging.WARN)
from datapipe import *
from datapipe.targets.mock import *
class TestTask(Task):
inp = Input()
def outputs(self):
outs = []
for i, elem in enumerate(self.inp):
outs.append(MockTarget('out_{}'.format(i)))
return outs
def run(self):
pass
# New tasks are defined by inheriting from an existing Task
class AddLines(Task):
# The inputs can be anything the task depends on:
# Local and remote files, python objects, numpy arrays, ...
infile = Input()
count = Input(default=1)
text = Input(default='This is some text')
# The outputs are defined dynamically (with access to the inputs)
def outputs(self):
return LocalFile(self.infile.path().replace('.txt', '.AddLines.txt'))
# The actual task is defined as a function with access to inputs and outputs
def run(self):
with self.infile.open() as f:
with self.outputs().open('w') as g:
g.write(f.read())
for i in range(self.count):
g.write(self.text + '\n')
def test_simpletask():
target = MockTarget('test1')
TestTask([target]).run()
assert hash(TestTask([target])) == hash(TestTask([target]))
assert TestTask([target]) == TestTask([target])
assert hash(MockTarget('test1')) == hash(MockTarget('test1'))
assert MockTarget('test1') == MockTarget('test1')
def test_several_targets():
inputs = [MockTarget('input_1'), MockTarget('input_2')]
task = TestTask(inputs)
task.run()
def test_example():
# Create test file
open('/tmp/input.txt', 'w').close()
from datapipe import Task, Input, LocalFile, require
# Create initial Targets
infile = LocalFile('/tmp/input.txt')
# Define the pipeline
task1 = AddLines(infile, count=2)
task2 = AddLines(task1.outputs(), count=3, text='This is some more text')
# Require a target to execute all tasks needed to produce it
require(task2.outputs())
| mit | Python |
2af686c117ce4ced82809b08457122abf7626144 | Use resolution=None | guziy/basemap,matplotlib/basemap,matplotlib/basemap,guziy/basemap | examples/warpimage.py | examples/warpimage.py | import pylab as P
import Image as I
from matplotlib.toolkits.basemap import Basemap
# shows how to warp an image from one map projection to another.
# Uses PIL.
# Download image from
# http://www.space-graphics.com/earth_topo-bathy.htm,
# convert from jpg to png.
# read in png image to rgba array of normalized floats.
try:
rgba = P.imread('e_topo_bathy_4k.png')
except:
msg = """
please download image from http://www.space-graphics.com/earth_topo-bathy.htm,
convert e_topo_bathy_4k.jpg to e_topo_bathy_4k.png is present working directory."""
raise IOError, msg
# reverse lats
rgba = rgba[::-1,:,:]
# define lat/lon grid that image spans (projection='cyl').
nlons = rgba.shape[1]; nlats = rgba.shape[0]
delta = 360./float(nlons)
lons = P.arange(-180.+0.5*delta,180.,delta)
lats = P.arange(-90.+0.5*delta,90.,delta)
# define Lambert Conformal basemap for North America.
# resolution=None means don't bother with boundary data.
m = Basemap(llcrnrlon=-145.5,llcrnrlat=1.,urcrnrlon=-2.566,urcrnrlat=46.352,\
rsphere=(6378137.00,6356752.3142),lat_1=50.,lon_0=-107.,\
resolution=None,projection='lcc')
# transform to nx x ny regularly spaced native projection grid
# nx and ny chosen to have roughly the same horizontal res as original image.
dx = 2.*P.pi*m.rmajor/float(nlons)
nx = int((m.xmax-m.xmin)/dx)+1; ny = int((m.ymax-m.ymin)/dx)+1
rgba_warped = P.zeros((ny,nx,4),'d')
# interpolate from proj='cyl' (geographic coords) to 'lcc'
for k in range(4):
rgba_warped[:,:,k],x,y = m.transform_scalar(rgba[:,:,k],lons,lats,nx,ny,returnxy=True)
# convert normalized floats to integer RGBA values between 0 and 255
rgba_warped = (255.*rgba_warped).astype('uint8')
# convert rgba values to pil image.
pilimage = I.fromstring('RGBA',(nx,ny),rgba_warped[::-1,:,:].tostring())
# plot pil image.
im = m.imshow(pilimage)
# draw parallels and meridians.
# label on left, right and bottom of map.
parallels = P.arange(0.,80,20.)
m.drawparallels(parallels,labels=[1,1,0,1])
meridians = P.arange(10.,360.,30.)
m.drawmeridians(meridians,labels=[1,1,0,1])
P.title("Global earth topo-bathy image warped from 'cyl' to 'lcc' projection",fontsize=12)
P.show()
| import pylab as P
import Image as I
from matplotlib.toolkits.basemap import Basemap
# shows how to warp an image from one map projection to another.
# Uses PIL.
# Download image from
# http://www.space-graphics.com/earth_topo-bathy.htm,
# convert from jpg to png.
# read in png image to rgba array of normalized floats.
try:
rgba = P.imread('e_topo_bathy_4k.png')
except:
msg = """
please download image from http://www.space-graphics.com/earth_topo-bathy.htm,
convert e_topo_bathy_4k.jpg to e_topo_bathy_4k.png is present working directory."""
raise IOError, msg
# reverse lats
rgba = rgba[::-1,:,:]
# define lat/lon grid that image spans (projection='cyl').
nlons = rgba.shape[1]; nlats = rgba.shape[0]
delta = 360./float(nlons)
lons = P.arange(-180.+0.5*delta,180.,delta)
lats = P.arange(-90.+0.5*delta,90.,delta)
# define Lambert Conformal basemap for North America.
m = Basemap(llcrnrlon=-145.5,llcrnrlat=1.,urcrnrlon=-2.566,urcrnrlat=46.352,\
rsphere=(6378137.00,6356752.3142),lat_1=50.,lon_0=-107.,\
resolution='l',area_thresh=1000.,projection='lcc')
# transform to nx x ny regularly spaced native projection grid
# nx and ny chosen to have roughly the same horizontal res as original image.
dx = 2.*P.pi*m.rmajor/float(nlons)
nx = int((m.xmax-m.xmin)/dx)+1; ny = int((m.ymax-m.ymin)/dx)+1
rgba_warped = P.zeros((ny,nx,4),'d')
# interpolate from proj='cyl' (geographic coords) to 'lcc'
for k in range(4):
rgba_warped[:,:,k],x,y = m.transform_scalar(rgba[:,:,k],lons,lats,nx,ny,returnxy=True)
# convert normalized floats to integer RGBA values between 0 and 255
rgba_warped = (255.*rgba_warped).astype('uint8')
# convert rgba values to pil image.
pilimage = I.fromstring('RGBA',(nx,ny),rgba_warped[::-1,:,:].tostring())
# plot pil image.
im = m.imshow(pilimage)
# draw parallels and meridians.
# label on left, right and bottom of map.
parallels = P.arange(0.,80,20.)
m.drawparallels(parallels,labels=[1,1,0,1])
meridians = P.arange(10.,360.,30.)
m.drawmeridians(meridians,labels=[1,1,0,1])
P.title("Global earth topo-bathy image warped from 'cyl' to 'lcc' projection",fontsize=12)
P.show()
| mit | Python |
faeba554cc62b80687b4fd1a7c00fcee2933ecf4 | use logger instead of logging (#14) | cosven/feeluown-core | fuocore/dispatch.py | fuocore/dispatch.py | # -*- coding: utf-8 -*-
import weakref
import logging
from weakref import WeakMethod
logger = logging.getLogger(__name__)
class Signal(object):
def __init__(self, name='', *sig):
self.sig = sig
self.receivers = set()
def emit(self, *args):
for receiver in self.receivers:
try:
receiver()(*args)
except Exception:
logger.exception('receiver %s run error' % receiver())
def _ref(self, receiver):
ref = weakref.ref
if hasattr(receiver, '__self__') and hasattr(receiver, '__func__'):
ref = WeakMethod
return ref(receiver)
def connect(self, receiver):
self.receivers.add(self._ref(receiver))
def disconnect(self, receiver):
receiver = self._ref(receiver)
if receiver in self.receivers:
self.receivers.remove(receiver)
return True
return False
def receiver(signal):
def _decorator(func):
if isinstance(signal, (list, tuple)):
for s in signal:
s.connect(func)
else:
signal.connect(func)
return func
return _decorator
| # -*- coding: utf-8 -*-
import weakref
import logging
try:
from weakref import WeakMethod
except ImportError:
from fuocore.backports.weakref import WeakMethod
class Signal(object):
def __init__(self, name='', *sig):
self.sig = sig
self.receivers = set()
def emit(self, *args):
for receiver in self.receivers:
try:
receiver()(*args)
except Exception:
logging.exception('receiver %s run error' % receiver())
def _ref(self, receiver):
ref = weakref.ref
if hasattr(receiver, '__self__') and hasattr(receiver, '__func__'):
ref = WeakMethod
return ref(receiver)
def connect(self, receiver):
self.receivers.add(self._ref(receiver))
def disconnect(self, receiver):
receiver = self._ref(receiver)
if receiver in self.receivers:
self.receivers.remove(receiver)
return True
return False
def receiver(signal):
def _decorator(func):
if isinstance(signal, (list, tuple)):
for s in signal:
s.connect(func)
else:
signal.connect(func)
return func
return _decorator
| mit | Python |
311a858ecbe7d34f9f68a18a3735db9da8b0e692 | Fix global test driver initialization | alisaifee/holmium.core,alisaifee/holmium.core,alisaifee/holmium.core,alisaifee/holmium.core | tests/utils.py | tests/utils.py | import atexit
import tempfile
import sys
import mock
from selenium import webdriver
import os
def build_mock_mapping(name):
mock_driver = mock.Mock()
browser_mapping = {name: mock_driver}
mock_driver.return_value.name = name
return browser_mapping
test_driver = None
def get_driver():
global test_driver
if not test_driver:
options = webdriver.ChromeOptions()
options.add_argument('headless')
test_driver = webdriver.Chrome(chrome_options=options)
atexit.register(test_driver.quit)
test_driver.delete_all_cookies()
test_driver.switch_to.default_content()
return test_driver
def make_temp_page(src):
f = tempfile.mktemp(".html")
fh = open(f, "w")
fh.write(src.replace("\n", ""))
fh.close()
atexit.register(lambda: os.remove(f))
return "file://%s" % f
def mock_open():
if sys.version_info >= (3, 0, 0):
return mock.patch("builtins.open")
return mock.patch("__builtin__.open")
| import atexit
import tempfile
import sys
import mock
from selenium import webdriver
import os
def build_mock_mapping(name):
mock_driver = mock.Mock()
browser_mapping = {name: mock_driver}
mock_driver.return_value.name = name
return browser_mapping
test_driver = None
def get_driver():
global test_driver
if not test_driver:
options = webdriver.ChromeOptions()
options.add_argument('headless')
chrome = webdriver.Chrome(chrome_options=options)
atexit.register(chrome.quit)
chrome.delete_all_cookies()
chrome.switch_to.default_content()
return chrome
def make_temp_page(src):
f = tempfile.mktemp(".html")
fh = open(f, "w")
fh.write(src.replace("\n", ""))
fh.close()
atexit.register(lambda: os.remove(f))
return "file://%s" % f
def mock_open():
if sys.version_info >= (3, 0, 0):
return mock.patch("builtins.open")
return mock.patch("__builtin__.open")
| mit | Python |
e83d8edc6c90fd91e68fc4251e7f0532b06ad6fb | Add docstring for ClusterAbstraction | studiawan/pygraphc | pygraphc/clustering/ClusterAbstraction.py | pygraphc/clustering/ClusterAbstraction.py |
class ClusterAbstraction(object):
"""Get cluster abstraction based on longest common substring.
References
----------
.. [1] jtjacques, Longest common substring from more than two strings - Python.
http://stackoverflow.com/questions/2892931/longest-common-substring-from-more-than-two-strings-python.
"""
@staticmethod
def dp_lcs(graph, clusters):
"""The processed string are preprocessed message from raw event log messages.
Parameters
----------
graph : graph
A graph to be processed.
clusters : dict[list]
Dictionary containing a list of node identifier per cluster.
Returns
-------
abstraction : dict[str]
Dictionary of abstraction string per cluster.
"""
abstraction = {}
for cluster_id, nodes in clusters.iteritems():
data = []
for node_id in nodes:
data.append(graph.node[node_id]['preprocessed_event'])
abstraction[cluster_id] = ClusterAbstraction.lcs(data)
return abstraction
@staticmethod
def lcs(data):
"""Get longest common substring from multiple string.
Parameters
----------
data : list[str]
List of string to be processed.
Returns
-------
substr : str
A single string as longest common substring.
"""
substr = ''
if len(data) > 1 and len(data[0]) > 0:
for i in range(len(data[0])):
for j in range(len(data[0]) - i + 1):
if j > len(substr) and all(data[0][i:i + j] in x for x in data):
substr = data[0][i:i + j]
return substr
|
class ClusterAbstraction(object):
@staticmethod
def dp_lcs(graph, clusters):
abstraction = {}
for cluster_id, nodes in clusters.iteritems():
data = []
for node_id in nodes:
data.append(graph.node[node_id]['preprocessed_event'])
abstraction[cluster_id] = ClusterAbstraction.lcs(data)
return abstraction
@staticmethod
def lcs(data):
substr = ''
if len(data) > 1 and len(data[0]) > 0:
for i in range(len(data[0])):
for j in range(len(data[0]) - i + 1):
if j > len(substr) and all(data[0][i:i + j] in x for x in data):
substr = data[0][i:i + j]
return substr
| mit | Python |
395c0a9cbeceb512972b71199dd46661af7fcce2 | Update pcl-reference-assemblies.py | mono/bockbuild,mono/bockbuild | packages/pcl-reference-assemblies.py | packages/pcl-reference-assemblies.py | import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2013-09-10',
sources=['http://last-hope.baulig.net/misc/mono-pcl-profiles.tgz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name)
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
| import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2013-09-10',
sources=['http://last-hope.baulig.net/misc/mono-pcl-profiles.tgz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name)
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
# Remove v4.6 until we support it
shutil.rmtree(os.path.join(dest, "v4.6"))
PCLReferenceAssembliesPackage()
| mit | Python |
de0bb3543d68b65cc61f9449a3c44d48b3920e49 | add flagged and timestamp to admin view | sunlightlabs/django-gatekeeper | gatekeeper/admin.py | gatekeeper/admin.py | from django.contrib import admin
from gatekeeper.models import ModeratedObject
class ModeratedObjectAdmin(admin.ModelAdmin):
list_display = ('object_name', 'timestamp', 'moderation_status', 'flagged')
list_editable = ('moderation_status','flagged')
list_filter = ['moderation_status','flagged','content_type']
def object_name(self, obj):
return "%s" % obj
admin.site.register(ModeratedObject, ModeratedObjectAdmin)
if not admin.site.index_template:
admin.site.index_template = "admin/gatekeeper/index.html"
| from django.contrib import admin
from gatekeeper.models import ModeratedObject
class ModeratedObjectAdmin(admin.ModelAdmin):
list_display = ('object_name', 'moderation_status',)
list_editable = ('moderation_status',)
list_filter = ['moderation_status','flagged','content_type']
def object_name(self, obj):
return "%s" % obj
admin.site.register(ModeratedObject, ModeratedObjectAdmin)
if not admin.site.index_template:
admin.site.index_template = "admin/gatekeeper/index.html"
| bsd-3-clause | Python |
b15a79e311c45cd25181b79d9657eedcf5ac3785 | Set version to v6.11.1 | explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc | thinc/about.py | thinc/about.py | # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = 'thinc'
__version__ = '6.11.1'
__summary__ = "Practical Machine Learning for NLP"
__uri__ = 'https://github.com/explosion/thinc'
__author__ = 'Matthew Honnibal'
__email__ = 'matt@explosion.ai'
__license__ = 'MIT'
__title__ = "thinc"
__release__ = True
| # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = 'thinc'
__version__ = '6.11.1.dev20'
__summary__ = "Practical Machine Learning for NLP"
__uri__ = 'https://github.com/explosion/thinc'
__author__ = 'Matthew Honnibal'
__email__ = 'matt@explosion.ai'
__license__ = 'MIT'
__title__ = "thinc"
__release__ = False
| mit | Python |
692584f8cfeb5d75a6d38529ed1029286188a3a9 | Add features_masks array in mock model | rossant/phy,rossant/phy,kwikteam/phy,rossant/phy,kwikteam/phy,kwikteam/phy | phy/cluster/manual/tests/conftest.py | phy/cluster/manual/tests/conftest.py | # -*- coding: utf-8 -*-
"""Test fixtures."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import numpy as np
from pytest import yield_fixture
from phy.electrode.mea import staggered_positions
from phy.io.array import _spikes_per_cluster
from phy.io.mock import (artificial_waveforms,
artificial_features,
artificial_spike_clusters,
artificial_spike_samples,
artificial_masks,
artificial_traces,
)
from phy.utils import Bunch
#------------------------------------------------------------------------------
# Fixtures
#------------------------------------------------------------------------------
@yield_fixture
def cluster_ids():
yield [0, 1, 2, 10, 11, 20, 30]
# i, g, N, i, g, N, N
@yield_fixture
def cluster_groups():
yield {0: 'noise', 1: 'good', 10: 'mua', 11: 'good'}
@yield_fixture
def quality():
yield lambda c: c
@yield_fixture
def similarity():
yield lambda c, d: c * 1.01 + d
@yield_fixture(scope='session')
def model():
model = Bunch()
n_spikes = 51
n_samples_w = 31
n_samples_t = 20000
n_channels = 11
n_clusters = 3
n_features = 4
model.n_channels = n_channels
# TODO: test with permutation and dead channels
model.channel_order = None
model.n_spikes = n_spikes
model.sample_rate = 20000.
model.duration = n_samples_t / float(model.sample_rate)
model.spike_times = artificial_spike_samples(n_spikes) * 1.
model.spike_times /= model.spike_times[-1]
model.spike_clusters = artificial_spike_clusters(n_spikes, n_clusters)
model.channel_positions = staggered_positions(n_channels)
model.waveforms = artificial_waveforms(n_spikes, n_samples_w, n_channels)
model.masks = artificial_masks(n_spikes, n_channels)
model.traces = artificial_traces(n_samples_t, n_channels)
model.features = artificial_features(n_spikes, n_channels, n_features)
# features_masks array
f = model.features.reshape((n_spikes, -1))
m = np.repeat(model.masks, n_features, axis=1)
model.features_masks = np.dstack((f, m))
model.spikes_per_cluster = _spikes_per_cluster(model.spike_clusters)
model.n_features_per_channel = n_features
model.n_samples_waveforms = n_samples_w
model.cluster_groups = {c: None for c in range(n_clusters)}
yield model
| # -*- coding: utf-8 -*-
"""Test fixtures."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import yield_fixture
from phy.electrode.mea import staggered_positions
from phy.io.array import _spikes_per_cluster
from phy.io.mock import (artificial_waveforms,
artificial_features,
artificial_spike_clusters,
artificial_spike_samples,
artificial_masks,
artificial_traces,
)
from phy.utils import Bunch
#------------------------------------------------------------------------------
# Fixtures
#------------------------------------------------------------------------------
@yield_fixture
def cluster_ids():
yield [0, 1, 2, 10, 11, 20, 30]
# i, g, N, i, g, N, N
@yield_fixture
def cluster_groups():
yield {0: 'noise', 1: 'good', 10: 'mua', 11: 'good'}
@yield_fixture
def quality():
yield lambda c: c
@yield_fixture
def similarity():
yield lambda c, d: c * 1.01 + d
@yield_fixture(scope='session')
def model():
model = Bunch()
n_spikes = 51
n_samples_w = 31
n_samples_t = 20000
n_channels = 11
n_clusters = 3
n_features = 4
model.n_channels = n_channels
# TODO: test with permutation and dead channels
model.channel_order = None
model.n_spikes = n_spikes
model.sample_rate = 20000.
model.duration = n_samples_t / float(model.sample_rate)
model.spike_times = artificial_spike_samples(n_spikes) * 1.
model.spike_times /= model.spike_times[-1]
model.spike_clusters = artificial_spike_clusters(n_spikes, n_clusters)
model.channel_positions = staggered_positions(n_channels)
model.waveforms = artificial_waveforms(n_spikes, n_samples_w, n_channels)
model.masks = artificial_masks(n_spikes, n_channels)
model.traces = artificial_traces(n_samples_t, n_channels)
model.features = artificial_features(n_spikes, n_channels, n_features)
model.spikes_per_cluster = _spikes_per_cluster(model.spike_clusters)
model.n_features_per_channel = n_features
model.n_samples_waveforms = n_samples_w
model.cluster_groups = {c: None for c in range(n_clusters)}
yield model
| bsd-3-clause | Python |
c2054c2fb0e5af75ffa1ac1305b3fa805f73ae4a | enable persistent cache also on Windows | primiano/depot_tools,CoherentLabs/depot_tools,CoherentLabs/depot_tools,primiano/depot_tools,primiano/depot_tools | recipe_modules/infra_paths/path_config.py | recipe_modules/infra_paths/path_config.py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import DEPS
CONFIG_CTX = DEPS['path'].CONFIG_CTX
@CONFIG_CTX()
def infra_common(c):
c.dynamic_paths['checkout'] = None
@CONFIG_CTX(includes=['infra_common'])
def infra_buildbot(c):
c.base_paths['root'] = c.CURRENT_WORKING_DIR[:-4]
c.base_paths['slave_build'] = c.CURRENT_WORKING_DIR
c.base_paths['cache'] = c.base_paths['root'] + (
'build', 'slave', 'cache')
c.base_paths['git_cache'] = c.base_paths['root'] + (
'build', 'slave', 'cache_dir')
c.base_paths['goma_cache'] = c.base_paths['root'] + (
'build', 'slave', 'goma_cache')
for token in ('build_internal', 'build', 'depot_tools'):
c.base_paths[token] = c.base_paths['root'] + (token,)
@CONFIG_CTX(includes=['infra_common'])
def infra_kitchen(c):
c.base_paths['root'] = c.CURRENT_WORKING_DIR
c.base_paths['slave_build'] = c.CURRENT_WORKING_DIR
# TODO(phajdan.jr): have one cache dir, let clients append suffixes.
b_dir = c.CURRENT_WORKING_DIR
while b_dir and b_dir[-1] != 'b':
b_dir = b_dir[:-1]
if c.PLATFORM in ('linux', 'mac'):
c.base_paths['cache'] = (
'/', 'b', 'cache', 'chromium')
for path in ('builder_cache', 'git_cache', 'goma_cache', 'goma_deps_cache'):
c.base_paths[path] = c.base_paths['cache'] + (path,)
elif b_dir:
c.base_paths['cache'] = b_dir + ('cache', 'chromium')
for path in ('builder_cache', 'git_cache', 'goma_cache', 'goma_deps_cache'):
c.base_paths[path] = c.base_paths['cache'] + (path,)
else: # pragma: no cover
c.base_paths['cache'] = c.base_paths['root'] + ('cache',)
c.base_paths['git_cache'] = c.base_paths['root'] + ('cache_dir',)
for path in ('builder_cache', 'goma_cache', 'goma_deps_cache'):
c.base_paths[path] = c.base_paths['cache'] + (path,)
| # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import DEPS
CONFIG_CTX = DEPS['path'].CONFIG_CTX
@CONFIG_CTX()
def infra_common(c):
c.dynamic_paths['checkout'] = None
@CONFIG_CTX(includes=['infra_common'])
def infra_buildbot(c):
c.base_paths['root'] = c.CURRENT_WORKING_DIR[:-4]
c.base_paths['slave_build'] = c.CURRENT_WORKING_DIR
c.base_paths['cache'] = c.base_paths['root'] + (
'build', 'slave', 'cache')
c.base_paths['git_cache'] = c.base_paths['root'] + (
'build', 'slave', 'cache_dir')
c.base_paths['goma_cache'] = c.base_paths['root'] + (
'build', 'slave', 'goma_cache')
for token in ('build_internal', 'build', 'depot_tools'):
c.base_paths[token] = c.base_paths['root'] + (token,)
@CONFIG_CTX(includes=['infra_common'])
def infra_kitchen(c):
c.base_paths['root'] = c.CURRENT_WORKING_DIR
c.base_paths['slave_build'] = c.CURRENT_WORKING_DIR
# TODO(phajdan.jr): have one cache dir, let clients append suffixes.
# TODO(phajdan.jr): set persistent cache path for remaining platforms.
# NOTE: do not use /b/swarm_slave here - it gets deleted on bot redeploy,
# and may happen even after a reboot.
if c.PLATFORM in ('linux', 'mac'):
c.base_paths['cache'] = (
'/', 'b', 'cache', 'chromium')
for path in ('builder_cache', 'git_cache', 'goma_cache', 'goma_deps_cache'):
c.base_paths[path] = c.base_paths['cache'] + (path,)
else:
c.base_paths['cache'] = c.base_paths['root'] + ('cache',)
c.base_paths['git_cache'] = c.base_paths['root'] + ('cache_dir',)
for path in ('builder_cache', 'goma_cache', 'goma_deps_cache'):
c.base_paths[path] = c.base_paths['cache'] + (path,)
| bsd-3-clause | Python |
525725fd1578479a4629677bee3eab20e6170839 | Remove documented code | ayushgoel/LongShot | github.py | github.py | import requests
import constants
QUERY = """
query($repository_owner:String!, $repository_name: String!, $count: Int!) {
repository(
owner: $repository_owner,
name: $repository_name) {
refs(last: $count,refPrefix:"refs/tags/") {
edges {
node{
name
}
}
}
releases(last: $count) {
edges {
node {
name
}
}
}
}
}
"""
class Github:
def __authorization_header(self):
return "token " + self.token
def __request_headers(self):
return {
'authorization': self.__authorization_header(),
}
def __init__(self, token):
self.token = token
def getTagsAndReleases(self, repository_owner, repository_name, count):
payload = {"query": QUERY,
"variables": {
"repository_owner": repository_owner,
"repository_name": repository_name,
"count": count
}}
response = requests.post(constants.GITHUB_API_URL, json=payload, headers=self.__request_headers())
print response
return response | import requests
import constants
# payload = "{\"query\": \"query($repository: String!) {repository(owner: \\\"talk-to\\\",name: $repository) " \
# "{refs(last: 10,refPrefix:\\\"refs/tags/\\\") {edges {node{name}}}}}\"," \
# "\"variables\": \"{\\\"repository\\\": \\\"Knock\\\"}\"\n\t\n}\n\n"
#
# response = requests.request("POST", url, data=payload, headers=headers)
#
# print(response.text)
QUERY = """
query($repository_owner:String!, $repository_name: String!, $count: Int!) {
repository(
owner: $repository_owner,
name: $repository_name) {
refs(last: $count,refPrefix:"refs/tags/") {
edges {
node{
name
}
}
}
releases(last: $count) {
edges {
node {
name
}
}
}
}
}
"""
class Github:
def __authorization_header(self):
return "token " + self.token
def __request_headers(self):
return {
'authorization': self.__authorization_header(),
}
def __init__(self, token):
self.token = token
def getTagsAndReleases(self, repository_owner, repository_name, count):
payload = {"query": QUERY,
"variables": {
"repository_owner": repository_owner,
"repository_name": repository_name,
"count": count
}}
response = requests.post(constants.GITHUB_API_URL, json=payload, headers=self.__request_headers())
print response
return response | mit | Python |
ffe55eca7cc58a13f8dd1b9c2da9e8fa05c9f9e5 | add comment | pzankov/hydroctrl | google.py | google.py | #!/usr/bin/env python3
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from datetime import datetime
from os import path
import settings
class GoogleSheet:
"""
Use Google Sheet as online database.
Connection is recreated for each sheet access to avoid timeout issues.
A copy of sheet contents is kept in memory.
All values are read at object creation.
It takes 1 minute to obtain 20k rows on Raspberry Pi 3.
"""
def __init__(self):
script_dir = path.dirname(path.abspath(__file__))
key_path = path.join(script_dir, 'google_key.json')
sheet_id_path = path.join(script_dir, 'google_sheet_id.txt')
with open(sheet_id_path) as f:
self.sheet_id = f.read().strip()
scope = ['https://spreadsheets.google.com/feeds']
self.credentials = ServiceAccountCredentials.from_json_keyfile_name(key_path, scope)
self.values = self._get_all_values()
def _get_all_values(self):
client = gspread.authorize(self.credentials)
sheet = client.open_by_key(self.sheet_id).sheet1
return sheet.get_all_values()
def _append_row(self, values):
client = gspread.authorize(self.credentials)
sheet = client.open_by_key(self.sheet_id).sheet1
sheet.append_row(values)
self.values.append(values)
def append(self, data):
if len(data) != len(settings.DATA_SPEC):
raise Exception('Invalid data fields count')
values = [data[k] for k in settings.DATA_SPEC]
self._append_row(values)
def main():
s = GoogleSheet()
date = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
s.append({'date': date, 'temperature_C': 25, 'pH': 6.0, 'volume_L': 250, 'nutrients_mL': 0})
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from datetime import datetime
from os import path
import settings
class GoogleSheet:
"""
Use Google Sheet as online database.
Connection is recreated for each sheet access to avoid timeout issues.
A copy of sheet contents is kept in memory.
"""
def __init__(self):
script_dir = path.dirname(path.abspath(__file__))
key_path = path.join(script_dir, 'google_key.json')
sheet_id_path = path.join(script_dir, 'google_sheet_id.txt')
with open(sheet_id_path) as f:
self.sheet_id = f.read().strip()
scope = ['https://spreadsheets.google.com/feeds']
self.credentials = ServiceAccountCredentials.from_json_keyfile_name(key_path, scope)
self.values = self._get_all_values()
def _get_all_values(self):
client = gspread.authorize(self.credentials)
sheet = client.open_by_key(self.sheet_id).sheet1
return sheet.get_all_values()
def _append_row(self, values):
client = gspread.authorize(self.credentials)
sheet = client.open_by_key(self.sheet_id).sheet1
sheet.append_row(values)
self.values.append(values)
def append(self, data):
if len(data) != len(settings.DATA_SPEC):
raise Exception('Invalid data fields count')
values = [data[k] for k in settings.DATA_SPEC]
self._append_row(values)
def main():
s = GoogleSheet()
date = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
s.append({'date': date, 'temperature_C': 25, 'pH': 6.0, 'volume_L': 250, 'nutrients_mL': 0})
if __name__ == "__main__":
main()
| mit | Python |
7e4dfdae04af881408ce623c330b7b30f9a63498 | make flake8 happy | liveblog/liveblog,superdesk/liveblog,liveblog/liveblog,superdesk/liveblog,superdesk/liveblog,hlmnrmr/liveblog,liveblog/liveblog,hlmnrmr/liveblog,liveblog/liveblog,hlmnrmr/liveblog,superdesk/liveblog,liveblog/liveblog,hlmnrmr/liveblog | server/liveblog/syndication/exceptions.py | server/liveblog/syndication/exceptions.py | class APIConnectionError(Exception):
pass
class ProducerAPIError(APIConnectionError):
pass
class ConsumerAPIError(APIConnectionError):
pass
| class APIConnectionError(Exception):
pass
class ProducerAPIError(APIConnectionError):
pass
class ConsumerAPIError(APIConnectionError):
pass | agpl-3.0 | Python |
c22ae8b38b2843f9a8c3504c9b89593908a343aa | Fix #267 (#269) | salopensource/sal,sheagcraig/sal,salopensource/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal,sheagcraig/sal,salopensource/sal | server/plugins/cryptstatus/cryptstatus.py | server/plugins/cryptstatus/cryptstatus.py | import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
output = None
machine_url = crypt_url
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
| import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None).rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
output = None
machine_url = crypt_url
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
| apache-2.0 | Python |
e8da41193238a7c677ec7ff8339095ec3e71be3b | Fix formatting, show total and sort | tracymiranda/pc-scripts,tracymiranda/pc-scripts | track_count.py | track_count.py | from report import *
def show_track_count(S):
print "Track Count".ljust(40) + "\t\tSubmission Count"
items = S.track_count().items()
total = sum([count for (track, count) in items])
for (track, count) in sorted(items, cmp=lambda (a_track, a_count), (b_track, b_count): cmp(b_count, a_count)):
if track:
print "%s\t\t%s" % (track.ljust(40), count)
print "Total".ljust(40) + "\t\t%s" % total
if __name__ == "__main__":
# S = ALL.standard().vote_cutoff(4.0)
S = ALL.standard() #.filter(lambda s: s.accepted)
show_track_count(S)
| from report import *
def show_track_count(S):
print "Track Count\t\tSubmission Count"
for (track, count) in S.track_count().items():
if track:
print "%s\t\t%s" % (track.ljust(20), count)
if __name__ == "__main__":
# S = ALL.standard().vote_cutoff(4.0)
S = ALL.standard().filter(lambda s: s.accepted)
show_track_count(S)
| epl-1.0 | Python |
0f9f8dfe459ca98f4ea75d4358724cab3c0559bf | add minimal python version for naoth package to setup.py | BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH | Utils/py/naoth/setup.py | Utils/py/naoth/setup.py | #!/usr/bin/python
from setuptools import setup, find_packages
setup(name='naoth',
version='0.3',
author='NaoTH Berlin United',
author_email='nao-team@informatik.hu-berlin.de',
description='Python utils for the NaoTH toolchain',
packages=find_packages(),
zip_safe=False,
setup_requires=['wheel'],
install_requires=[
'protobuf', 'numpy'
],
python_requires='>=3.6.9',
)
| #!/usr/bin/python
from setuptools import setup, find_packages
setup(name='naoth',
version='0.3',
author='NaoTH Berlin United',
author_email='nao-team@informatik.hu-berlin.de',
description='Python utils for the NaoTH toolchain',
packages=find_packages(),
zip_safe=False,
setup_requires=['wheel'],
install_requires=[
'protobuf', 'numpy'
])
| apache-2.0 | Python |
e9cdd0a05a6ea144b029cb893d00aa4caf055c4b | Remove unused imports. | nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments | tests/test_examplefiles.py | tests/test_examplefiles.py | # -*- coding: utf-8 -*-
"""
Pygments tests with example files
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import os
from pygments.lexers import get_lexer_for_filename, get_lexer_by_name
from pygments.token import Error
from pygments.util import ClassNotFound, b
# generate methods
def test_example_files():
testdir = os.path.dirname(__file__)
for fn in os.listdir(os.path.join(testdir, 'examplefiles')):
absfn = os.path.join(testdir, 'examplefiles', fn)
if not os.path.isfile(absfn):
continue
try:
lx = get_lexer_for_filename(absfn)
except ClassNotFound:
if "_" not in fn:
raise AssertionError('file %r has no registered extension, '
'nor is of the form <lexer>_filename '
'for overriding, thus no lexer found.'
% fn)
try:
name, rest = fn.split("_", 1)
lx = get_lexer_by_name(name)
except ClassNotFound:
raise AssertionError('no lexer found for file %r' % fn)
yield check_lexer, lx, absfn
def check_lexer(lx, absfn):
text = open(absfn, 'rb').read()
text = text.replace(b('\r\n'), b('\n'))
text = text.strip(b('\n')) + b('\n')
try:
text = text.decode('utf-8')
except UnicodeError:
text = text.decode('latin1')
ntext = []
for type, val in lx.get_tokens(text):
ntext.append(val)
assert type != Error, 'lexer %s generated error token for %s' % \
(lx, absfn)
if u''.join(ntext) != text:
raise AssertionError('round trip failed for ' + absfn)
| # -*- coding: utf-8 -*-
"""
Pygments tests with example files
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import os
import unittest
from pygments import highlight
from pygments.lexers import get_lexer_for_filename, get_lexer_by_name
from pygments.token import Error
from pygments.util import ClassNotFound, b
# generate methods
def test_example_files():
testdir = os.path.dirname(__file__)
for fn in os.listdir(os.path.join(testdir, 'examplefiles')):
absfn = os.path.join(testdir, 'examplefiles', fn)
if not os.path.isfile(absfn):
continue
try:
lx = get_lexer_for_filename(absfn)
except ClassNotFound:
if "_" not in fn:
raise AssertionError('file %r has no registered extension, '
'nor is of the form <lexer>_filename '
'for overriding, thus no lexer found.'
% fn)
try:
name, rest = fn.split("_", 1)
lx = get_lexer_by_name(name)
except ClassNotFound:
raise AssertionError('no lexer found for file %r' % fn)
yield check_lexer, lx, absfn
def check_lexer(lx, absfn):
text = open(absfn, 'rb').read()
text = text.replace(b('\r\n'), b('\n'))
text = text.strip(b('\n')) + b('\n')
try:
text = text.decode('utf-8')
except UnicodeError:
text = text.decode('latin1')
ntext = []
for type, val in lx.get_tokens(text):
ntext.append(val)
assert type != Error, 'lexer %s generated error token for %s' % \
(lx, absfn)
if u''.join(ntext) != text:
raise AssertionError('round trip failed for ' + absfn)
| bsd-2-clause | Python |
49feeac25b33ebf79cc103e12d0ed4623c96ec71 | Add transaction.to_hex() test | thibault/btctools | tests/test_transactions.py | tests/test_transactions.py | from __future__ import unicode_literals
import unittest
from transactions import Transaction
class TransactionTests(unittest.TestCase):
def setUp(self):
inputs = [
('b0ff74bb0dd894797153ccb862c9f9a488e657452647ada440fe1006ece95c78', 0),
('683d180645632d45f23baf2fb2897241321c1af779f3064ebd24aa517bae6a22', 0),
]
outputs = [
('1EL3y9j8rzZwa8Hxmx2scatb3bh8KKFK6v', 1000),
('115MDLurYMiExVwfTU7R4kE43zrdVoC2pz', 49585000),
]
self.tx = Transaction(inputs, outputs)
def test_transaction_in_and_out_counters(self):
self.assertEqual(self.tx.in_counter, 2)
self.assertEqual(self.tx.out_counter, 2)
def test_transaction_input_constructor(self):
self.assertEqual(
self.tx.inputs[0].tx_hash,
'b0ff74bb0dd894797153ccb862c9f9a488e657452647ada440fe1006ece95c78'
)
self.assertEqual(self.tx.inputs[0].output_id, 0)
def test_transaction_output_constructor(self):
self.assertEqual(
self.tx.outputs[0].address,
'1EL3y9j8rzZwa8Hxmx2scatb3bh8KKFK6v'
)
self.assertEqual(self.tx.outputs[0].amount, 1000)
def test_transaction_to_hex(self):
self.assertEqual(
self.tx.to_hex(),
'0100000002785ce9ec0610fe40a4ad47264557e688a4f9c962b8cc53717994d80dbb74ffb00000000000ffffffff226aae7b51aa24bd4e06f379f71a1c32417289b22faf3bf2452d634506183d680000000000ffffffff02e8030000000000001a76a94c149234042049764dbed331c7d1fc492a4eb5007c5388ac689bf402000000001a76a94c1400d289624679d48aae98137561f1f9df60791a7c88ac00000000'
)
class InputTests(unittest.TestCase):
pass
| from __future__ import unicode_literals
import unittest
from transactions import Transaction
class TransactionTests(unittest.TestCase):
def setUp(self):
inputs = [
('b0ff74bb0dd894797153ccb862c9f9a488e657452647ada440fe1006ece95c78', 0),
('683d180645632d45f23baf2fb2897241321c1af779f3064ebd24aa517bae6a22', 0),
]
outputs = [
('1EL3y9j8rzZwa8Hxmx2scatb3bh8KKFK6v', 1000),
('115MDLurYMiExVwfTU7R4kE43zrdVoC2pz', 49585000),
]
self.tx = Transaction(inputs, outputs)
def test_transaction_in_and_out_counters(self):
self.assertEqual(self.tx.in_counter, 2)
self.assertEqual(self.tx.out_counter, 2)
def test_transaction_input_constructor(self):
self.assertEqual(
self.tx.inputs[0].tx_hash,
'b0ff74bb0dd894797153ccb862c9f9a488e657452647ada440fe1006ece95c78'
)
self.assertEqual(self.tx.inputs[0].output_id, 0)
def test_transaction_output_constructor(self):
self.assertEqual(
self.tx.outputs[0].address,
'1EL3y9j8rzZwa8Hxmx2scatb3bh8KKFK6v'
)
self.assertEqual(self.tx.outputs[0].amount, 1000)
def test_transaction_to_hex(self):
pass
class InputTests(unittest.TestCase):
pass
| mit | Python |
540498265c706232d645c96c04c774c0b04ccc84 | remove test_featurewise_anomaly_score_notfitted method temporarily | Y-oHr-N/kenchi,Y-oHr-N/kenchi | kenchi/outlier_detection/tests/test_reconstruction_based.py | kenchi/outlier_detection/tests/test_reconstruction_based.py | import unittest
import matplotlib
import matplotlib.axes
import numpy as np
from sklearn.exceptions import NotFittedError
from sklearn.utils.estimator_checks import check_estimator
from kenchi.datasets import make_blobs
from kenchi.outlier_detection import PCA
matplotlib.use('Agg')
import matplotlib.pyplot as plt
class PCATest(unittest.TestCase):
def setUp(self):
self.X, self.y = make_blobs(random_state=1)
self.sut = PCA()
_, self.ax = plt.subplots()
def tearDown(self):
plt.close()
def test_check_estimator(self):
self.assertIsNone(check_estimator(self.sut))
def test_fit(self):
self.assertIsInstance(self.sut.fit(self.X), PCA)
def test_fit_predict(self):
self.assertIsInstance(self.sut.fit_predict(self.X), np.ndarray)
def test_anomaly_score_notfitted(self):
with self.assertRaises(NotFittedError):
self.sut.anomaly_score(self.X)
def test_predict_notfitted(self):
with self.assertRaises(NotFittedError):
self.sut.predict(self.X)
def test_score(self):
self.assertIsInstance(
self.sut.fit(self.X).score(self.X), float
)
def test_score_notfitted(self):
with self.assertRaises(NotFittedError):
self.sut.score(self.X)
def test_plot_anomaly_score(self):
self.assertIsInstance(
self.sut.fit(self.X).plot_anomaly_score(self.X, ax=self.ax),
matplotlib.axes.Axes
)
def test_plot_roc_curve(self):
self.assertIsInstance(
self.sut.fit(self.X).plot_roc_curve(self.X, self.y, ax=self.ax),
matplotlib.axes.Axes
)
| import unittest
import matplotlib
import matplotlib.axes
import numpy as np
from sklearn.exceptions import NotFittedError
from sklearn.utils.estimator_checks import check_estimator
from kenchi.datasets import make_blobs
from kenchi.outlier_detection import PCA
matplotlib.use('Agg')
import matplotlib.pyplot as plt
class PCATest(unittest.TestCase):
def setUp(self):
self.X, self.y = make_blobs(random_state=1)
self.sut = PCA()
_, self.ax = plt.subplots()
def tearDown(self):
plt.close()
def test_check_estimator(self):
self.assertIsNone(check_estimator(self.sut))
def test_fit(self):
self.assertIsInstance(self.sut.fit(self.X), PCA)
def test_fit_predict(self):
self.assertIsInstance(self.sut.fit_predict(self.X), np.ndarray)
def test_anomaly_score_notfitted(self):
with self.assertRaises(NotFittedError):
self.sut.anomaly_score(self.X)
def test_featurewise_anomaly_score_notfitted(self):
with self.assertRaises(NotFittedError):
self.sut.featurewise_anomaly_score(self.X)
def test_predict_notfitted(self):
with self.assertRaises(NotFittedError):
self.sut.predict(self.X)
def test_score(self):
self.assertIsInstance(
self.sut.fit(self.X).score(self.X), float
)
def test_score_notfitted(self):
with self.assertRaises(NotFittedError):
self.sut.score(self.X)
def test_plot_anomaly_score(self):
self.assertIsInstance(
self.sut.fit(self.X).plot_anomaly_score(self.X, ax=self.ax),
matplotlib.axes.Axes
)
def test_plot_roc_curve(self):
self.assertIsInstance(
self.sut.fit(self.X).plot_roc_curve(self.X, self.y, ax=self.ax),
matplotlib.axes.Axes
)
| bsd-3-clause | Python |
09ed01974e5fd921d367a6e48e6cc7e382badfe7 | bump version to 0.5.0 | ivelum/graphql-py | graphql/__init__.py | graphql/__init__.py | __version__ = '0.5.0'
| __version__ = '0.4.0'
| mit | Python |
588e1474c0ee4e3f7fc02c5dc4785f70fefa371f | Update AvailableMachinesModel to account for Resources.getLocation returning a list | onitake/Uranium,onitake/Uranium | UM/Qt/Bindings/AvailableMachinesModel.py | UM/Qt/Bindings/AvailableMachinesModel.py | from PyQt5.QtCore import Qt, pyqtSlot, pyqtProperty, pyqtSignal
from UM.Qt.ListModel import ListModel
from UM.Resources import Resources
from UM.Application import Application
from UM.Logger import Logger
from UM.Settings.MachineSettings import MachineSettings
import os
import os.path
import json
class AvailableMachinesModel(ListModel):
NameRole = Qt.UserRole + 1
TypeRole = Qt.UserRole + 2
def __init__(self, parent = None):
super().__init__(parent)
self.addRoleName(self.NameRole, "name")
self.addRoleName(self.TypeRole, "type")
self._updateModel()
@pyqtSlot(int, str)
def createMachine(self, index, name):
type = self.getItem(index)['type']
machine = MachineSettings()
machine.loadSettingsFromFile(Resources.getPath(Resources.SettingsLocation, type))
machine.setName(name)
app = Application.getInstance()
index = app.addMachine(machine)
app.setActiveMachine(app.getMachines()[index])
def _updateModel(self):
dirs = Resources.getLocation(Resources.SettingsLocation)
for dir in dirs:
if not os.path.isdir(dir):
continue
for file in os.listdir(dir):
data = None
path = os.path.join(dir, file)
if os.path.isdir(path):
continue
with open(path, 'rt', -1, 'utf-8') as f:
try:
data = json.load(f)
except ValueError as e:
Logger.log('e', "Error when loading file {0}: {1}".format(file, e))
continue
# Ignore any file that is explicitly marked as non-visible
if not data.get('visible', True):
continue
# Ignore any file that is marked as non-visible for the current application.
appname = Application.getInstance().getApplicationName()
if appname in data:
if not data[appname].get('visible', True):
continue
self.appendItem({ 'name': data['name'], 'type': file })
self.sort(lambda e: e['name'])
| from PyQt5.QtCore import Qt, pyqtSlot, pyqtProperty, pyqtSignal
from UM.Qt.ListModel import ListModel
from UM.Resources import Resources
from UM.Application import Application
from UM.Logger import Logger
from UM.Settings.MachineSettings import MachineSettings
import os
import os.path
import json
class AvailableMachinesModel(ListModel):
NameRole = Qt.UserRole + 1
TypeRole = Qt.UserRole + 2
def __init__(self, parent = None):
super().__init__(parent)
self.addRoleName(self.NameRole, "name")
self.addRoleName(self.TypeRole, "type")
self._updateModel()
@pyqtSlot(int, str)
def createMachine(self, index, name):
type = self.getItem(index)['type']
machine = MachineSettings()
machine.loadSettingsFromFile(Resources.getPath(Resources.SettingsLocation, type))
machine.setName(name)
app = Application.getInstance()
index = app.addMachine(machine)
app.setActiveMachine(app.getMachines()[index])
def _updateModel(self):
dir = Resources.getLocation(Resources.SettingsLocation)
for file in os.listdir(dir):
data = None
path = os.path.join(dir, file)
if os.path.isdir(path):
continue
with open(path, 'rt', -1, 'utf-8') as f:
try:
data = json.load(f)
except ValueError as e:
Logger.log('e', "Error when loading file {0}: {1}".format(file, e))
continue
# Ignore any file that is explicitly marked as non-visible
if not data.get('visible', True):
continue
# Ignore any file that is marked as non-visible for the current application.
appname = Application.getInstance().getApplicationName()
if appname in data:
if not data[appname].get('visible', True):
continue
self.appendItem({ 'name': data['name'], 'type': file })
self.sort(lambda e: e['name'])
| agpl-3.0 | Python |
89701f002f8831ca2133aab0cc56319e33d18eed | Fix import | explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc | thinc/layers/with_debug.py | thinc/layers/with_debug.py | from typing import Optional, Callable, Any, Tuple
from ..model import Model
do_nothing = lambda *args, **kwargs: None
def with_debug(
layer: Model,
name: Optional[str] = None,
*,
on_init: Callable[[Model, Any, Any], None] = do_nothing,
on_forward: Callable[[Model, Any, bool], None] = do_nothing,
on_backprop: Callable[[Any], None] = do_nothing,
):
"""Debugging layer that wraps any layer and allows executing callbacks
during the forward pass, backward pass and initialization. The callbacks
will receive the same arguments as the functions they're called in.
"""
name = layer.name if name is None else name
def forward(model: Model, X: Any, is_train: bool) -> Tuple[Any, Callable]:
on_forward(model, X, is_train)
layer_Y, layer_callback = layer(X, is_train=is_train)
def backprop(dY: Any) -> Any:
on_backprop(dY)
return layer_callback(dY)
return layer_Y, backprop
def init(model: Model, X: Any, Y: Any) -> Model:
on_init(model, X, Y)
return layer.initialize(X, Y)
return Model(f"debug:{name}", forward, init=init)
| from typing import Optional, Callable, Any, Tuple
from thinc.api import Model
do_nothing = lambda *args, **kwargs: None
def with_debug(
layer: Model,
name: Optional[str] = None,
*,
on_init: Callable[[Model, Any, Any], None] = do_nothing,
on_forward: Callable[[Model, Any, bool], None] = do_nothing,
on_backprop: Callable[[Any], None] = do_nothing,
):
"""Debugging layer that wraps any layer and allows executing callbacks
during the forward pass, backward pass and initialization. The callbacks
will receive the same arguments as the functions they're called in.
"""
name = layer.name if name is None else name
def forward(model: Model, X: Any, is_train: bool) -> Tuple[Any, Callable]:
on_forward(model, X, is_train)
layer_Y, layer_callback = layer(X, is_train=is_train)
def backprop(dY: Any) -> Any:
on_backprop(dY)
return layer_callback(dY)
return layer_Y, backprop
def init(model: Model, X: Any, Y: Any) -> Model:
on_init(model, X, Y)
return layer.initialize(X, Y)
return Model(f"debug:{name}", forward, init=init)
| mit | Python |
77132e94789cc1b2f7765c34545d39f4770e3aa2 | Use the local logger.info instead of logging.info. This can cause issues with the line with other loggers. | vertexproject/synapse,vertexproject/synapse,vertexproject/synapse,vivisect/synapse | synapse/lib/modules.py | synapse/lib/modules.py | '''
Module which implements the synapse module API/convention.
'''
import logging
import synapse.dyndeps as s_dyndeps
logger = logging.getLogger(__name__)
synmods = {}
modlist = []
def call(name, *args, **kwargs):
'''
Call the given function on all loaded synapse modules.
Returns a list of name,ret,exc tuples where each module
which implements the given function returns either ret on
successful execution or exc in the event of an exception.
Example:
import synapse.lib.modules as s_modules
for name,ret,exc in s_modules.call('getFooByBar',bar):
dostuff()
'''
ret = []
for sname,smod in modlist:
func = getattr(smod,name,None)
if func == None:
continue
try:
val = func(*args,**kwargs)
ret.append( (sname,val,None) )
except Exception as e:
ret.append( (sname,None,e) )
#print('call: %r %r %r %r' % (name,args,kwargs,ret))
return ret
def load(name):
'''
Load the given python module path as a synapse module.
Example:
import synapse.lib.modules as s_modules
s_modules.load('foopkg.barmod')
'''
smod = synmods.get(name)
if smod == None:
logger.info('loading syn mod: %s' % (name,))
smod = s_dyndeps.tryDynMod(name)
synmods[name] = smod
modlist.append( (name,smod) )
return smod
| '''
Module which implements the synapse module API/convention.
'''
import logging
import synapse.dyndeps as s_dyndeps
logger = logging.getLogger(__name__)
synmods = {}
modlist = []
def call(name, *args, **kwargs):
'''
Call the given function on all loaded synapse modules.
Returns a list of name,ret,exc tuples where each module
which implements the given function returns either ret on
successful execution or exc in the event of an exception.
Example:
import synapse.lib.modules as s_modules
for name,ret,exc in s_modules.call('getFooByBar',bar):
dostuff()
'''
ret = []
for sname,smod in modlist:
func = getattr(smod,name,None)
if func == None:
continue
try:
val = func(*args,**kwargs)
ret.append( (sname,val,None) )
except Exception as e:
ret.append( (sname,None,e) )
#print('call: %r %r %r %r' % (name,args,kwargs,ret))
return ret
def load(name):
'''
Load the given python module path as a synapse module.
Example:
import synapse.lib.modules as s_modules
s_modules.load('foopkg.barmod')
'''
smod = synmods.get(name)
if smod == None:
logging.info('loading syn mod: %s' % (name,))
smod = s_dyndeps.tryDynMod(name)
synmods[name] = smod
modlist.append( (name,smod) )
return smod
| apache-2.0 | Python |
650b71c637b6bd09a757791b9f9d8483eb153d58 | Prepare release 0.6b1 | flux3dp/fluxghost,flux3dp/fluxghost,flux3dp/fluxghost,flux3dp/fluxghost | fluxghost/__init__.py | fluxghost/__init__.py |
__version__ = "0.6b1"
DEBUG = False
|
__version__ = "0.6a1"
DEBUG = False
| agpl-3.0 | Python |
98e2d36d4759e24eb5c03d369d1cd0ac8c3dfdcf | Add Project.queue_name to list_filter and list_display | frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq | frigg/builds/admin.py | frigg/builds/admin.py | # -*- coding: utf8 -*-
from django.contrib import admin
from django.template.defaultfilters import pluralize
from .models import Build, BuildResult, Project
class BuildResultInline(admin.StackedInline):
model = BuildResult
readonly_fields = ('result_log', 'succeeded')
extra = 0
max_num = 0
class BuildInline(admin.TabularInline):
model = Build
readonly_fields = ('build_number', 'branch', 'color', 'pull_request_id', 'sha')
extra = 0
max_num = 0
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('__str__', 'git_repository', 'queue_name', 'average_time', 'last_build_number')
list_filter = ['owner', 'queue_name']
actions = ['sync_members']
def sync_members(self, request, queryset):
for project in queryset:
project.update_members()
self.message_user(
request,
'{} project{} was synced'.format(len(queryset), pluralize(len(queryset)))
)
sync_members.short_description = 'Sync members of selected projects'
@admin.register(Build)
class BuildAdmin(admin.ModelAdmin):
list_display = ('build_number', 'project', 'branch', 'pull_request_id', 'sha', 'color')
inlines = [BuildResultInline]
list_filter = ['project']
actions = ['restart_build']
def restart_build(self, request, queryset):
for build in queryset:
build.start()
self.message_user(
request,
'{} build{} was restarted'.format(len(queryset), pluralize(len(queryset)))
)
restart_build.short_description = 'Restart selected builds'
@admin.register(BuildResult)
class BuildResultAdmin(admin.ModelAdmin):
list_display = ('__str__', 'succeeded', 'coverage')
| # -*- coding: utf8 -*-
from django.contrib import admin
from django.template.defaultfilters import pluralize
from .models import Build, BuildResult, Project
class BuildResultInline(admin.StackedInline):
model = BuildResult
readonly_fields = ('result_log', 'succeeded')
extra = 0
max_num = 0
class BuildInline(admin.TabularInline):
model = Build
readonly_fields = ('build_number', 'branch', 'color', 'pull_request_id', 'sha')
extra = 0
max_num = 0
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('__str__', 'git_repository', 'average_time', 'last_build_number')
list_filter = ['owner']
actions = ['sync_members']
def sync_members(self, request, queryset):
for project in queryset:
project.update_members()
self.message_user(
request,
'{} project{} was synced'.format(len(queryset), pluralize(len(queryset)))
)
sync_members.short_description = 'Sync members of selected projects'
@admin.register(Build)
class BuildAdmin(admin.ModelAdmin):
list_display = ('build_number', 'project', 'branch', 'pull_request_id', 'sha', 'color')
inlines = [BuildResultInline]
list_filter = ['project']
actions = ['restart_build']
def restart_build(self, request, queryset):
for build in queryset:
build.start()
self.message_user(
request,
'{} build{} was restarted'.format(len(queryset), pluralize(len(queryset)))
)
restart_build.short_description = 'Restart selected builds'
@admin.register(BuildResult)
class BuildResultAdmin(admin.ModelAdmin):
list_display = ('__str__', 'succeeded', 'coverage')
| mit | Python |
4bd328d14f5997bb8cd605ace428f8f95d7f7ea5 | update public data in a loop | CIRCL/bgpranking-redis-api,CIRCL/bgpranking-redis-api,CIRCL/bgpranking-redis-api | example/website/scripts/generate_static_data.py | example/website/scripts/generate_static_data.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import time
from bgpranking import tools
csv_dir = os.path.join('..', 'data', 'csv')
agg_csv_dir = os.path.join('..', 'data', 'csv_agg')
js_dir = os.path.join('..', 'data', 'js')
while True:
tools.prepare_all_csv(csv_dir)
# cat LU-overall-allocv4-v6-asn.txt | grep asn | cut -d "|" -f 4
lu_raw_asns = open('lu_asns_dump', 'r').read()
lu_asns = lu_raw_asns.split('\n')
tools.aggregate_csvs(csv_dir, agg_csv_dir, **{'luxembourg': lu_asns})
tools.generate_js_for_worldmap(js_dir)
time.sleep(10000)
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from bgpranking import tools
csv_dir = os.path.join('..', 'data', 'csv')
tools.prepare_all_csv(csv_dir)
agg_csv_dir = os.path.join('..', 'data', 'csv_agg')
# cat LU-overall-allocv4-v6-asn.txt | grep asn | cut -d "|" -f 4
lu_raw_asns = open('lu_asns_dump', 'r').read()
lu_asns = lu_raw_asns.split('\n')
tools.aggregate_csvs(csv_dir, agg_csv_dir, **{'luxembourg': lu_asns})
js_dir = os.path.join('..', 'data', 'js')
tools.generate_js_for_worldmap(js_dir)
| bsd-2-clause | Python |
2eff140499c66562f8a87d32531d3ecbb78b57f2 | Fix import for py3k | geoalchemy/geoalchemy2 | geoalchemy2/compat.py | geoalchemy2/compat.py | """
Python 2 and 3 compatibility:
- Py3k `memoryview()` made an alias for Py2k `buffer()`
- Py3k `bytes()` made an alias for Py2k `str()`
"""
try:
import __builtin__ as builtins
except ImportError:
import builtins
import sys
if sys.version_info[0] == 2:
buffer = getattr(builtins, 'buffer')
bytes = str
else:
# Python 2.6 flake8 workaround
buffer = getattr(builtins, 'memoryview')
bytes = bytes
| """
Python 2 and 3 compatibility:
- Py3k `memoryview()` made an alias for Py2k `buffer()`
- Py3k `bytes()` made an alias for Py2k `str()`
"""
import __builtin__
import sys
if sys.version_info[0] == 2:
buffer = getattr(__builtin__, 'buffer')
bytes = str
else:
# Python 2.6 flake8 workaround
buffer = getattr(__builtin__, 'memoryview')
bytes = bytes
| mit | Python |
3c9e87e48ab65db36e503090af7cc244354e679f | Remove multiline handing attemp, Fix #5 | vors/jupyter-powershell | powershell_kernel/powershell_repl.py | powershell_kernel/powershell_repl.py | # -*- coding: utf-8 -*-
# Copyright (c) 2011, Wojciech Bederski (wuub.net)
# All rights reserved.
# See LICENSE.txt for details.
import os
import re
from . import subprocess_repl
class PowershellRepl(subprocess_repl.SubprocessRepl):
TYPE = "powershell"
def __init__(self, encoding, **kwds):
if not encoding:
# Detect encoding
chcp = os.popen('chcp')
chcp_encoding = re.match(r'[^\d]+(\d+)', chcp.read())
if not chcp_encoding:
raise LookupError("Can't detect encoding from chcp")
encoding = "cp" + chcp_encoding.groups()[0]
print(encoding)
super(PowershellRepl, self).__init__(encoding, **kwds)
def read_bytes(self):
# this is windows specific problem, that you cannot tell if there
# are more bytes ready, so we read only 1 at a times
result = super(PowershellRepl, self).read_bytes()
return result
def write_bytes(self, bytes):
# Drop flag on new input
self.do_write(bytes)
def do_write(self, bytes):
super(PowershellRepl, self).write_bytes(bytes)
| # -*- coding: utf-8 -*-
# Copyright (c) 2011, Wojciech Bederski (wuub.net)
# All rights reserved.
# See LICENSE.txt for details.
import os
import re
from . import subprocess_repl
# PowerShell in interactive mode shows no prompt, so we must hold it by hand.
# Every command prepended with other command, which will output only one character ('.')
# When user command leads to no output (for example, 'cd ..'), we get only this character,
# and then we send command to show prompt explicitly.
# No output at all means, that PowerShell needs more input (multiline mode).
# In this case we proceeds sending user input without modifications.
class PowershellRepl(subprocess_repl.SubprocessRepl):
TYPE = "powershell"
PREPENDER = b"."
def __init__(self, encoding, **kwds):
if not encoding:
# Detect encoding
chcp = os.popen('chcp')
chcp_encoding = re.match(r'[^\d]+(\d+)', chcp.read())
if not chcp_encoding:
raise LookupError("Can't detect encoding from chcp")
encoding = "cp" + chcp_encoding.groups()[0]
print(encoding)
super(PowershellRepl, self).__init__(encoding, **kwds)
# Using this to detect whether PowerShell returns some output or it needs more input
# PowerShell in interactive mode doesn't show prompt, so we must hold it by hand
# It's a hack and, for example, we can send 'Write-Host "" -NoNewLine' with no output, but in outhr cases it may work well
self.got_output = True
self.multiline = False
def read_bytes(self):
# this is windows specific problem, that you cannot tell if there
# are more bytes ready, so we read only 1 at a times
result = super(PowershellRepl, self).read_bytes()
# Consumes output (it must be equal to PREPENDER)
if result and not self.got_output:
self.got_output = True
self.multiline = False
# Don't return PREPENDER, read another input
return self.read_bytes()
return result
def write_bytes(self, bytes):
# Drop flag on new input
self.got_output = False
if not self.multiline:
# Turn multiline mode on, it will be turned off, when PowerShell returns some output
self.multiline = True
self.prepend()
self.do_write(bytes)
def do_write(self, bytes):
super(PowershellRepl, self).write_bytes(bytes)
def prepend(self):
""" Command to prepend every output with special mark to detect multiline mode """
self.do_write(b'Write-Host "' + PowershellRepl.PREPENDER + b'" -NoNewLine; ')
| mit | Python |
e4af11394bfc81d4476af2a8fe60f82f11e674d6 | Add staticfiles URLs to urls.development by default | armstrong/armstrong.templates.standard,armstrong/armstrong.templates.standard | project_template/urls/development.py | project_template/urls/development.py | """
Add any additional URLs that should only be available when using the the
settings.development configuration.
See ``urls.defaults`` for a list of all URLs available across both
configurations.
"""
from .defaults import *
urlpatterns += patterns('',
# Examples:
# url(r'^$', '{{ project_name }}.views.debug', name='debug'),
# url(r'^{{ project_name }}/', include('{{ project_name }}.debug.urls')),
)
# Load staticfiles for testing purposes
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatterns()
| """
Add any additional URLs that should only be available when using the the
settings.development configuration.
See ``urls.defaults`` for a list of all URLs available across both
configurations.
"""
from .defaults import *
urlpatterns += patterns('',
# Examples:
# url(r'^$', '{{ project_name }}.views.debug', name='debug'),
# url(r'^{{ project_name }}/', include('{{ project_name }}.debug.urls')),
)
| apache-2.0 | Python |
5c45fbd31c61dc72ce1e620c2840f06334f1cc3c | improve 'Create' entry: use q; only if needed | dsanders11/django-autocomplete-light,luzfcb/django-autocomplete-light,luzfcb/django-autocomplete-light,dsanders11/django-autocomplete-light,yourlabs/django-autocomplete-light,shubhamdipt/django-autocomplete-light,Perkville/django-autocomplete-light,luzfcb/django-autocomplete-light,Eraldo/django-autocomplete-light,Perkville/django-autocomplete-light,yourlabs/django-autocomplete-light,shubhamdipt/django-autocomplete-light,yourlabs/django-autocomplete-light,Eraldo/django-autocomplete-light,shubhamdipt/django-autocomplete-light,yourlabs/django-autocomplete-light,dsanders11/django-autocomplete-light,Eraldo/django-autocomplete-light,Perkville/django-autocomplete-light,luzfcb/django-autocomplete-light,dsanders11/django-autocomplete-light,Perkville/django-autocomplete-light,Eraldo/django-autocomplete-light,shubhamdipt/django-autocomplete-light | autocomplete_light/example_apps/create_choice_on_the_fly/autocomplete_light_registry.py | autocomplete_light/example_apps/create_choice_on_the_fly/autocomplete_light_registry.py | import autocomplete_light.shortcuts as autocomplete_light
from django import http
from .models import OnTheFly
class OnTheFlyAutocomplete(autocomplete_light.AutocompleteModelBase):
choices = OnTheFly.objects.all()
def autocomplete_html(self):
html = super(OnTheFlyAutocomplete, self).autocomplete_html()
q = self.request.REQUEST.get('q')
if q and not q in [c.name for c in self.choices]:
html += '<span data-value="create">Create "{}"</span>'.format(q)
return html
def post(self, request, *args, **kwargs):
return http.HttpResponse(
OnTheFly.objects.create(name=request.POST['name']).pk
)
autocomplete_light.register(OnTheFly, OnTheFlyAutocomplete)
| import autocomplete_light.shortcuts as autocomplete_light
from django import http
from .models import OnTheFly
class OnTheFlyAutocomplete(autocomplete_light.AutocompleteModelBase):
choices = OnTheFly.objects.all()
def autocomplete_html(self):
html = super(OnTheFlyAutocomplete, self).autocomplete_html()
html += '<span data-value="create">Create Fly</span>'
return html
def post(self, request, *args, **kwargs):
return http.HttpResponse(
OnTheFly.objects.create(name=request.POST['name']).pk
)
autocomplete_light.register(OnTheFly, OnTheFlyAutocomplete)
| mit | Python |
85cfdc004c680213883f559f4fa9aafc58690698 | revert owlbot main branch templates (#35) | googleapis/python-life-sciences,googleapis/python-life-sciences | owlbot.py | owlbot.py | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import synthtool as s
import synthtool.gcp as gcp
from synthtool.languages import python
# ----------------------------------------------------------------------------
# Copy the generated client from the owl-bot staging directory
# ----------------------------------------------------------------------------
default_version = "v2beta"
for library in s.get_staging_dirs(default_version):
s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst"])
s.remove_staging_dirs()
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
templated_files = gcp.CommonTemplates().py_library(microgenerator=True)
python.py_samples(skip_readmes=True)
s.move(templated_files, excludes=[".coveragerc"]) # the microgenerator has a good coveragerc file
# ----------------------------------------------------------------------------
# Run blacken session
# ----------------------------------------------------------------------------
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
| # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import synthtool as s
import synthtool.gcp as gcp
from synthtool.languages import python
# ----------------------------------------------------------------------------
# Copy the generated client from the owl-bot staging directory
# ----------------------------------------------------------------------------
default_version = "v2beta"
for library in s.get_staging_dirs(default_version):
s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst"])
s.remove_staging_dirs()
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
templated_files = gcp.CommonTemplates().py_library(microgenerator=True)
python.py_samples(skip_readmes=True)
s.move(templated_files, excludes=[".coveragerc"]) # the microgenerator has a good coveragerc file
# ----------------------------------------------------------------------------
# Run blacken session
# ----------------------------------------------------------------------------
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
# ----------------------------------------------------------------------------
# Main Branch migration
# ----------------------------------------------------------------------------
s.replace(
"*.rst",
"master",
"main"
)
s.replace(
"*.rst",
"google-cloud-python/blob/main",
"google-cloud-python/blob/master"
)
s.replace(
"CONTRIBUTING.rst",
"kubernetes/community/blob/main",
"kubernetes/community/blob/master"
)
s.replace(
".kokoro/*",
"master",
"main"
)
| apache-2.0 | Python |
12a71f104e3693e73edf7f0bdfc56da19bae1bf1 | Change parser | nanalelfe/fofe-ner,nanalelfe/fofe-ner,nanalelfe/fofe-ner,nanalelfe/fofe-ner | parser.py | parser.py | import glob, os
def OntoNotes(directory):
"""
Parameters
----------
directory: str
directory in which the OntoNotes project is located
files : str
path to a file containing all of the paths to files containing NER-annotated
data
Yields
------
sentence : list of str
original sentence
ner_begin : list of int
start indices of NER, inclusive
ner_end : list of int
end indices of NER, excusive
ner_label : list of int
The entity type of sentence[ner_begin[i]:ner_end[i]] is label[i]
"""
entity2cls = {
# OntoNotes labels
'PERSON': 0,
'FAC': 1,
'ORG': 2,
'GPE': 3,
'LOC': 4,
'PRODUCT': 5,
'DATE': 6,
'TIME': 7,
'PERCENT': 8,
'MONEY': 9,
'QUANTITY': 10,
'ORDINAL': 11,
'CARDINAL': 12,
'EVENT': 13,
'WORK_OF_ART': 14,
'LAW': 15,
'LANGUAGE': 16,
'NORP': 17
}
sentence, ner_begin, ner_end, ner_label = [], [], [], []
ret = []
for filename in glob.glob(os.path.join(directory, "cnn_0160.v4_gold_conll")):
textfile = open(filename, "r")
for line in textfile:
tokens = line.strip().split
ret.append(token)
return ret
if __name__ == '__main__':
print(OntoNotes("/eecs/research/asr/quanliu/Datasets/CoNLL2012/data/development/conll")) | import glob, os
def OntoNotes(directory):
"""
Parameters
----------
directory: str
directory in which the OntoNotes project is located
files : str
path to a file containing all of the paths to files containing NER-annotated
data
Yields
------
sentence : list of str
original sentence
ner_begin : list of int
start indices of NER, inclusive
ner_end : list of int
end indices of NER, excusive
ner_label : list of int
The entity type of sentence[ner_begin[i]:ner_end[i]] is label[i]
"""
entity2cls = {
# OntoNotes labels
'PERSON': 0,
'FAC': 1,
'ORG': 2,
'GPE': 3,
'LOC': 4,
'PRODUCT': 5,
'DATE': 6,
'TIME': 7,
'PERCENT': 8,
'MONEY': 9,
'QUANTITY': 10,
'ORDINAL': 11,
'CARDINAL': 12,
'EVENT': 13,
'WORK_OF_ART': 14,
'LAW': 15,
'LANGUAGE': 16,
'NORP': 17
}
sentence, ner_begin, ner_end, ner_label = [], [], [], []
ret = []
for filename in glob.glob(os.path.join(directory, "cnn_0160.v4_gold_conll")):
textfile = open(filename, "r")
for line in textfile:
tokens = line.strip().split
ret.append(token)
return ret
if __name__ == '__name__':
print(OntoNotes("/eecs/research/asr/quanliu/Datasets/CoNLL2012/data/development/conll")) | mit | Python |
d754690afc2233dfe8fb84ea9cbb0bb820600529 | update tests | evansloan082/sports.py | tests/score_test.py | tests/score_test.py | import json
import unittest
import sports
class TestScores(unittest.TestCase):
match_data = {
'league': 'NHL',
'home_team': 'Pittsburgh Penguins',
'away_team': 'Nashville Predators',
'match_score': '2-0',
'match_date': 'Sat, 19 Aug 2017 02:12:05 GMT',
'match_time': 'Game Finished',
'match_link': 'test',
}
match = sports.models.Match(sports.HOCKEY, match_data)
matches = sports.get_sport(sports.BASEBALL)
def test_match(self):
self.assertIsNotNone(self.match)
def test_teams(self):
self.assertEqual(self.match.home_team, 'Pittsburgh Penguins')
self.assertEqual(self.match.away_team, 'Nashville Predators')
def test_score(self):
self.assertEqual(self.match.home_score, '2')
self.assertEqual(self.match.away_score, '0')
def test_date(self):
self.assertIsNotNone(self.match.match_date)
def test_sport(self):
self.assertEqual(self.match.sport, sports.HOCKEY)
if __name__ == '__main__':
unittest.main()
| import json
import unittest
import sports_py
class TestScores(unittest.TestCase):
match_data = {
'league': 'NHL',
'home_team': 'Pittsburgh Penguins',
'away_team': 'Nashville Predators',
'match_score': '2-0',
'match_date': 'Sat, 19 Aug 2017 02:12:05 GMT',
'match_time': 'Game Finished',
'match_link': 'test',
}
match = sports_py.models.Match('hockey', match_data)
matches = sports_py.get_sport_scores('baseball')
def test_match(self):
self.assertIsNotNone(self.match)
def test_teams(self):
self.assertEqual(self.match.home_team, 'Pittsburgh Penguins')
self.assertEqual(self.match.away_team, 'Nashville Predators')
def test_score(self):
self.assertEqual(self.match.home_score, '2')
self.assertEqual(self.match.away_score, '0')
def test_date(self):
self.assertIsNotNone(self.match.match_date)
def test_sport(self):
self.assertEqual(self.match.sport, 'hockey')
def test_json(self):
try:
json.loads(self.match.to_json())
for match in self.matches:
json.loads(match.to_json())
self.test = True
except ValueError:
self.test = False
self.assertEqual(self.test, True)
if __name__ == '__main__':
unittest.main()
| mit | Python |
17015ecf48ec37909de6de2c299454fc89b592e9 | Add failing test for URL without zoom | bfontaine/jinja2_maps | tests/test_gmaps.py | tests/test_gmaps.py | # -*- coding: UTF-8 -*-
from base import TestCase
from jinja2_maps.gmaps import gmaps_url
class TestGmaps(TestCase):
def test_url_dict(self):
url = "https://www.google.com/maps/place/12.34,56.78/@12.34,56.78,42z"
self.assertEquals(url,
gmaps_url(dict(latitude=12.34, longitude=56.78), zoom=42))
def test_url_dict_no_zoom(self):
url = "https://www.google.com/maps/place/12.34,56.78/@12.34,56.78,16z"
self.assertEquals(url,
gmaps_url(dict(latitude=12.34, longitude=56.78)))
| # -*- coding: UTF-8 -*-
from base import TestCase
from jinja2_maps.gmaps import gmaps_url
class TestGmaps(TestCase):
def test_url_dict(self):
url = "https://www.google.com/maps/place/12.34,56.78/@12.34,56.78,42z"
self.assertEquals(url,
gmaps_url(dict(latitude=12.34, longitude=56.78), zoom=42))
| mit | Python |
2a95db43b1294e8c7620687cd824364cc8618d22 | Fix typo | Heufneutje/txircd,DesertBus/txircd,ElementalAlchemist/txircd | txircd/modules/cmd_kill.py | txircd/modules/cmd_kill.py | from twisted.words.protocols import irc
from txircd.modbase import Command
class KillCommand(Command):
def onUse(self, user, data):
target = data["targetuser"]
reason = "Killed by {}: {}".format(user.nickname, data["reason"])
target.sendMessage("KILL", ":{} {}".format(user.nickname, data["reason"]))
quit_to = set()
for chan in target.channels.iterkeys():
cdata = self.ircd.channels[chan]
target.leave(chan)
for u in cdata.users:
quit_to.add(u)
for u in quit_to:
u.sendMessage("QUIT", ":{}".format(reason), to=None, prefix=target.prefix())
target.sendMessage("ERROR", ":Closing Link {} [{}]".format(user.prefix(), data["reason"]), to=None, prefix=None)
del self.ircd.users[target.nickname]
target.socket.transport.loseConnection()
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTYETREGISTERED, "KILL", ":You have not registered")
return {}
if "o" not in user.mode:
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - You do not have the correct operator privileges")
return {}
if not params or len(params) < 2:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "KILL", ":Not enough parameters")
return {}
if params[0] not in self.ircd.users:
user.sendMessage(irc.ERR_NOSUCHNICK, params[0], ":No such nick/channel")
return {}
target = self.ircd.users[params[0]]
if "o" in target.mode:
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - You cannot KILL another oper")
return {}
return {
"user": user,
"targetuser": target,
"reason": " ".join(params[1:])
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"KILL": KillCommand()
}
}
def cleanup(self):
del self.ircd.commands["KILL"] | from twisted.words.protocols import irc
from txircd.modbase import command
class KillCommand(Command):
def onUse(self, user, data):
target = data["targetuser"]
reason = "Killed by {}: {}".format(user.nickname, data["reason"])
target.sendMessage("KILL", ":{} {}".format(user.nickname, data["reason"]))
quit_to = set()
for chan in target.channels.iterkeys():
cdata = self.ircd.channels[chan]
target.leave(chan)
for u in cdata.users:
quit_to.add(u)
for u in quit_to:
u.sendMessage("QUIT", ":{}".format(reason), to=None, prefix=target.prefix())
target.sendMessage("ERROR", ":Closing Link {} [{}]".format(user.prefix(), data["reason"]), to=None, prefix=None)
del self.ircd.users[target.nickname]
target.socket.transport.loseConnection()
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTYETREGISTERED, "KILL", ":You have not registered")
return {}
if "o" not in user.mode:
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - You do not have the correct operator privileges")
return {}
if not params or len(params) < 2:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "KILL", ":Not enough parameters")
return {}
if params[0] not in self.ircd.users:
user.sendMessage(irc.ERR_NOSUCHNICK, params[0], ":No such nick/channel")
return {}
target = self.ircd.users[params[0]]
if "o" in target.mode:
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - You cannot KILL another oper")
return {}
return {
"user": user,
"targetuser": target,
"reason": " ".join(params[1:])
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"KILL": KillCommand()
}
}
def cleanup(self):
del self.ircd.commands["KILL"] | bsd-3-clause | Python |
47bdbeb445c52e846f20da214fa4086cac13de0d | Update logpm.py | TingPing/plugins,TingPing/plugins | HexChat/logpm.py | HexChat/logpm.py | import hexchat
__module_name__ = "LogPMs"
__module_author__ = "TingPing"
__module_version__ = "1"
__module_description__ = "Auto log pm's"
def open_cb(word, word_eol, userdata):
chan = hexchat.get_info('channel')
# Assume nick if not prefixed with #
# Use existing pref for nicks I usually ignore (i.e. chanserv)
if chan and chan[0] != '#' and chan != '>>python<<' and chan not in hexchat.get_prefs('irc_no_hilight'):
hexchat.command('chanopt text_logging on')
hexchat.hook_print("Open Context", open_cb)
| import hexchat
__module_name__ = "LogPMs"
__module_author__ = "TingPing"
__module_version__ = "1"
__module_description__ = "Auto log pm's"
def open_cb(word, word_eol, userdata):
chan = hexchat.get_info('channel')
# Assume nick if not prefixed with #
# Use existing pref for nicks I usually ignore (i.e. chanserv)
if chan and chan[0] != '#' and chan not in hexchat.get_prefs('irc_no_hilight'):
hexchat.command('chanopt text_logging on')
hexchat.hook_print("Open Context", open_cb)
| mit | Python |
8583e1fec3fb1da321687acd37f92bd9d988bb10 | Enable adding/removing boops with command during runtime | DesertBot/DesertBot | desertbot/modules/automatic/Boops.py | desertbot/modules/automatic/Boops.py | from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule, ignore
from desertbot.modules.commandinterface import admin
from zope.interface import implementer
import random
import re
from desertbot.message import IRCMessage
from desertbot.response import IRCResponse, ResponseType
@implementer(IPlugin, IModule)
class Boops(BotModule):
def actions(self):
return super(Boops, self).actions() + [('message-channel', 1, self.respond),
('message-user', 1, self.respond),
('action-channel', 1, self.respond),
('action-user', 1, self.respond)]
def help(self, arg: list) -> str:
return f"Responds to boops. Admins may use {self.bot.commandChar}boop add/remove <url> to add and remove boops."
@ignore
def respond(self, message: IRCMessage) -> IRCResponse:
# TODO store boops in self.bot.storage['boops'] as a dict of boopName to boopUrl, for easier identification in add/remove
if message.command == "boop":
subcommand = message.parameterList[0]
if subcommand == "add":
return self._addBoop(message)
elif subcommand == "remove":
return self._removeBoop(message)
else:
return IRCResponse(ResponseType.Say, self.help(message.parameterList), message.replyTo)
else:
match = re.search('(^|[^\w])b[o0]{2,}ps?([^\w]|$)', message.messageString, re.IGNORECASE)
if match:
return IRCResponse(ResponseType.Say, f"Boop! {random.choice(self.bot.storage['boops'])}", message.replyTo)
@admin("Only my admins may add boops!")
def _addBoop(self, message: IRCMessage) -> IRCResponse:
self.bot.storage['boops'].append(message.parameterList[1])
return IRCResponse(ResponseType.Say, f"Added {message.parameterList[1]} to the list of boops!", message.replyTo)
@admin("Only my admins may remove boops!")
def _removeBoop(self, message: IRCMessage) -> IRCResponse:
if message.parameterList[1] in self.bot.storage['boops']:
self.bot.storage['boops'].remove(message.parameterList[1])
return IRCResponse(ResponseType.Say, f"Removed {message.parameterList[1]} from the list of boops!", message.replyTo)
else:
return IRCResponse(ResponseType.Say, f"Couldn't find {message.parameterList[1]} in the list of boops, did you maybe do a typo?", message.replyTo)
boop = Boops()
| from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule, ignore
from zope.interface import implementer
import random
import re
from desertbot.message import IRCMessage
from desertbot.response import IRCResponse, ResponseType
@implementer(IPlugin, IModule)
class Boops(BotModule):
def actions(self):
return super(Boops, self).actions() + [('message-channel', 1, self.respond),
('message-user', 1, self.respond),
('action-channel', 1, self.respond),
('action-user', 1, self.respond)]
def help(self, arg):
return 'Responds to boops.'
@ignore
def respond(self, message: IRCMessage) -> IRCResponse:
match = re.search('(^|[^\w])b[o0]{2,}ps?([^\w]|$)', message.messageString, re.IGNORECASE)
if match:
return IRCResponse(ResponseType.Say, f"Boop! {random.choice(self.bot.storage['boops'])}", message.replyTo)
boop = Boops()
| mit | Python |
6adae0b495f93f00da7ae42e54c14eaaceed435e | Cut down on number of imports | ioam/svn-history,ioam/svn-history,ioam/svn-history,ioam/svn-history,ioam/svn-history | topo/tk/__init__.py | topo/tk/__init__.py | # Tk based GUI support files
#
# $Id$
# For importing the tk GUI files
import topo.tk.propertiesframe
import topo.tk.taggedslider
import topo.tk.topoconsole
import topo.tk.plotpanel
# For show_cmd_prompt() and start()
import Pmw, sys, Tkinter
import topo.simulator
import topo.base
def show_cmd_prompt():
"""
Small helper to print the sys.ps1 prompt to the command-line.
Useful after a bunch of output has been displayed to stdout,
so as to let the user know that the command-line is still
active.
"""
if topo.base.min_print_level >= topo.base.MESSAGE:
print "\n", sys.ps1,
sys.stdout.flush()
def start(sim=None, mainloop=False):
"""
Startup code for GUI.
sim: Adds a simulation object into the GUI's active_simulator
variable This simulator will be the one that the GUI polls for
plots and other types of data.
mainloop: If True, then the command-line is frozen while the GUI
is open. If False, then commands can be entered at the command-line
even while the GUI is operational. Default is False.
"""
assert isinstance(sim,topo.simulator.Simulator) or sim == None, 'sim is not a Simulator object'
root = Tkinter.Tk()
root.resizable(1,1)
Pmw.initialise(root)
console = topo.tk.topoconsole.TopoConsole(parent=root)
console.pack(expand=Tkinter.YES,fill=Tkinter.BOTH)
console.set_active_simulator(sim)
# mainloop() freezes the commandline until the GUI window exits.
# Without this line the command-line remains responsive.
if mainloop:
console.mainloop()
return console
#######################
if __name__ == '__main__':
start(mainloop=True)
| # Tk based GUI support files
#
# $Id$
import propertiesframe
import taggedslider
import topoconsole
import plotpanel
# Code block-copied over from topo/gui.py These imports should
# be cleaned up.
from Tkinter import *
import Pmw, sys
import topo.simulator as simulator
from topo.tk.topoconsole import *
def show_cmd_prompt():
"""
Small helper to print the sys.ps1 prompt to the command-line.
Useful after a bunch of output has been displayed to stdout,
so as to let the user know that the command-line is still
active.
"""
print "\n", sys.ps1,
sys.stdout.flush()
def start(sim=None, mainloop=False):
"""
Startup code for GUI.
sim: Adds a simulation object into the GUI's active_simulator
variable This simulator will be the one that the GUI polls for
plots and other types of data.
mainloop: If True, then the command-line is frozen while the GUI
is open. If False, then commands can be entered at the command-line
even while the GUI is operational. Default is False.
"""
assert isinstance(sim,simulator.Simulator) or sim == None, 'sim is not a Simulator object'
root = Tk()
root.resizable(1,1)
Pmw.initialise(root)
console = TopoConsole(parent=root)
console.pack(expand=YES,fill=BOTH)
console.set_active_simulator(sim)
# mainloop() freezes the commandline until the GUI window exits.
# Without this line the command-line remains responsive.
if mainloop:
console.mainloop()
return console
#######################
if __name__ == '__main__':
start(mainloop=True)
| bsd-3-clause | Python |
8c9996ad4450de2f444bf9eb4db64136329055cc | fix auto DB finder | WilmerLab/HTSOHM-dev,WilmerLab/HTSOHM-dev | htsohm/db/__init__.py | htsohm/db/__init__.py | from datetime import datetime
from glob import glob
import os
from shutil import copy2
import sys
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import yaml
__engine__ = None
__session__ = None
def get_session():
return __session__
def get_engine():
return __engine__
def get_sqlite_dbcs(database_path=None):
if database_path is None:
dbs = glob("*.db")
if len(dbs) == 0:
raise FileNotFoundError("Cannot find sqlite DBCS in the current directory: %s" % os.getcwd())
elif len(dbs) > 1:
print("WARNING: more than one *.db file found in this directory. Using first one: %s" % dbs[0])
database_path = dbs[0]
return "sqlite:///%s" % database_path
def init_database(connection_string, backup=False):
global __engine__
global __session__
if connection_string[0:10] == "sqlite:///":
print(
'WARNING: attempting to use SQLite database! Okay for local debugging\n' +
'but will not work with multiple workers, due to lack of locking features.',
file=sys.stderr
)
db_path = connection_string[10:]
if backup and os.path.exists(db_path):
backup_path = db_path + "." + datetime.now().isoformat() + ".backup"
copy2(db_path, backup_path)
print("backing up prexisting database file %s to %s" % (db_path, backup_path))
__engine__ = create_engine(connection_string)
__session__ = sessionmaker(bind=__engine__)()
# Create tables in the engine, if they don't exist already.
Base.metadata.create_all(__engine__)
Base.metadata.bind = __engine__
# Import all models
from htsohm.db.base import Base
from htsohm.db.atom_sites import AtomSites
from htsohm.db.lennard_jones import LennardJones
from htsohm.db.gas_loading import GasLoading
from htsohm.db.surface_area import SurfaceArea
from htsohm.db.void_fraction import VoidFraction
from htsohm.db.material import Material
from htsohm.db.structure import Structure
| from datetime import datetime
from glob import glob
import os
from shutil import copy2
import sys
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import yaml
__engine__ = None
__session__ = None
def get_session():
return __session__
def get_engine():
return __engine__
def get_sqlite_dbcs(database_path=None):
if database_path is None:
dbs = glob("*.db")
if len(dbs) > 1:
print("WARNING: more than one *.db file found in this directory. Using first one: %s" % dbs[0])
path = dbs[0]
return "sqlite:///%s" % database_path
def init_database(connection_string, backup=False):
global __engine__
global __session__
if connection_string[0:10] == "sqlite:///":
print(
'WARNING: attempting to use SQLite database! Okay for local debugging\n' +
'but will not work with multiple workers, due to lack of locking features.',
file=sys.stderr
)
db_path = connection_string[10:]
if backup and os.path.exists(db_path):
backup_path = db_path + "." + datetime.now().isoformat() + ".backup"
copy2(db_path, backup_path)
print("backing up prexisting database file %s to %s" % (db_path, backup_path))
__engine__ = create_engine(connection_string)
__session__ = sessionmaker(bind=__engine__)()
# Create tables in the engine, if they don't exist already.
Base.metadata.create_all(__engine__)
Base.metadata.bind = __engine__
# Import all models
from htsohm.db.base import Base
from htsohm.db.atom_sites import AtomSites
from htsohm.db.lennard_jones import LennardJones
from htsohm.db.gas_loading import GasLoading
from htsohm.db.surface_area import SurfaceArea
from htsohm.db.void_fraction import VoidFraction
from htsohm.db.material import Material
from htsohm.db.structure import Structure
| mit | Python |
f05dc9faf43598bed5ce37d721f67b31f3e8eccb | Index parse to int | uncovertruth/django-horizon | horizon/settings.py | horizon/settings.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.utils.lru_cache import lru_cache
CONFIG_DEFAULTS = {
'GROUPS': {},
'METADATA_MODEL': None,
}
@lru_cache()
def get_config():
USER_CONFIG = getattr(settings, 'HORIZONTAL_CONFIG', {})
CONFIG = CONFIG_DEFAULTS.copy()
CONFIG.update(USER_CONFIG)
for name, horizontal_group in CONFIG['GROUPS'].items():
horizontal_group['DATABASE_SET'] = set()
for key, member in horizontal_group['DATABASES'].items():
horizontal_group['DATABASE_SET'].add(member['write'])
horizontal_group['DATABASE_SET'].update(member.get('read', []))
if 'read' not in member:
member['read'] = [member['write']]
if 'PICKABLES' not in horizontal_group:
horizontal_group['PICKABLES'] = [int(i) for i in list(horizontal_group['DATABASES'].keys())]
return CONFIG
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.utils.lru_cache import lru_cache
CONFIG_DEFAULTS = {
'GROUPS': {},
'METADATA_MODEL': None,
}
@lru_cache()
def get_config():
USER_CONFIG = getattr(settings, 'HORIZONTAL_CONFIG', {})
CONFIG = CONFIG_DEFAULTS.copy()
CONFIG.update(USER_CONFIG)
for name, horizontal_group in CONFIG['GROUPS'].items():
horizontal_group['DATABASE_SET'] = set()
for key, member in horizontal_group['DATABASES'].items():
horizontal_group['DATABASE_SET'].add(member['write'])
horizontal_group['DATABASE_SET'].update(member.get('read', []))
if 'read' not in member:
member['read'] = [member['write']]
if 'PICKABLES' not in horizontal_group:
horizontal_group['PICKABLES'] = list(horizontal_group['DATABASES'].keys())
return CONFIG
| mit | Python |
f5da38b312a730bf268ad0d8b49c5306f13081f2 | Fix leftover session query in services view | pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine | scoring_engine/web/views/services.py | scoring_engine/web/views/services.py | from flask import Blueprint, render_template, url_for, redirect
from flask_login import login_required, current_user
from scoring_engine.models.service import Service
from scoring_engine.db import session
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
return redirect(url_for('auth.unauthorized'))
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
service = session.query(Service).get(id)
if service is None or not current_user.team == service.team:
return redirect(url_for('auth.unauthorized'))
return render_template('service.html', id=id, service=service)
| from flask import Blueprint, render_template, url_for, redirect
from flask_login import login_required, current_user
from scoring_engine.models.service import Service
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
return redirect(url_for('auth.unauthorized'))
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
service = Service.query.get(id)
if service is None or not current_user.team == service.team:
return redirect(url_for('auth.unauthorized'))
return render_template('service.html', id=id, service=service)
| mit | Python |
5a1a65dfba3adbbf0942b71bc71b5141adb2c4bf | use ranking helper | knifeofdreams/poker-player-thedeadparrot | player.py | player.py | import json
import logging
from random import randint
import sys
from ranking_helper import RankingHelper
logging.basicConfig(format='%(levelname)s %(lineno)d:%(funcName)s %(message)s')
log = logging.getLogger('player.Player')
log.addHandler(logging.StreamHandler(sys.stderr))
log.setLevel(logging.DEBUG)
class Player:
VERSION = "Cautios parrot"
def betRequest(self, game_state):
in_action = game_state['in_action']
current_player = game_state['players'][in_action]
cards = current_player['hole_cards']
helper = RankingHelper(cards)
call_value = game_state['current_buy_in'] - current_player['bet'] + game_state['minimum_raise']
if helper.is_pair():
if cards[0]['rank'] in ("Q", "K", "A"):
log.info('All in (or at least 1000)')
return call_value + randint(100, 200)
log.info('Call!')
return call_value
log.info('Fold!')
return 0
def showdown(self, game_state):
log.info('showdown')
log.info("number_of_community_cards %d", len(game_state['community_cards']))
| import json
import logging
from random import randint
import sys
logging.basicConfig(format='%(levelname)s %(lineno)d:%(funcName)s %(message)s')
log = logging.getLogger('player.Player')
log.addHandler(logging.StreamHandler(sys.stderr))
log.setLevel(logging.DEBUG)
class Player:
VERSION = "Cautios parrot"
def betRequest(self, game_state):
in_action = game_state['in_action']
current_player = game_state['players'][in_action]
cards = current_player['hole_cards']
call_value = game_state['current_buy_in'] - current_player['bet'] + game_state['minimum_raise']
if cards[0]['rank'] == cards[1]['rank']:
if cards[0]['rank'] in ("Q", "K", "A"):
log.info('All in (or at least 1000)')
return call_value + randint(100, 200)
log.info('Call!')
return call_value
log.info('Fold!')
return 0
def showdown(self, game_state):
log.info('showdown')
log.info("number_of_community_cards %d", len(game_state['community_cards']))
| mit | Python |
91d7c4182de70ced147296cc47cd7be26dd48985 | speed up preload_sites.py maintenance script | wikimedia/pywikibot-core,wikimedia/pywikibot-core | scripts/maintenance/preload_sites.py | scripts/maintenance/preload_sites.py | #!/usr/bin/python
"""Script that preloads site and user info for all sites of given family.
The following parameters are supported:
-worker:<num> The number of parallel tasks to be run. Default is the
number of precessors on the machine
Usage:
python pwb.py preload_sites [{<family>}] [-worker{<num>}]
To force preloading, change the global expiry value to 0:
python pwb.py -API_config_expiry:0 preload_sites [{<family>}]
"""
#
# (C) Pywikibot team, 2021
#
# Distributed under the terms of the MIT license.
#
from concurrent.futures import ThreadPoolExecutor, wait
from datetime import datetime
import pywikibot
from pywikibot.family import Family
# supported families by this script
families_list = [
'wikibooks',
'wikinews',
'wikipedia',
'wikiquote',
'wikisource',
'wikiversity',
'wikivoyage',
'wiktionary',
]
exceptions = {
}
def preload_family(family, executor):
"""Preload all sites of a single family file."""
msg = 'Preloading sites of {} family{}'
pywikibot.output(msg.format(family, '...'))
codes = Family.load(family).languages_by_size
for code in exceptions.get(family, []):
if code in codes:
codes.remove(code)
obsolete = Family.load(family).obsolete
futures = set()
for code in codes:
if code not in obsolete:
site = pywikibot.Site(code, family)
# page title does not care
futures.add(executor.submit(pywikibot.Page, site, 'Main page'))
wait(futures)
pywikibot.output(msg.format(family, ' completed.'))
def preload_families(families, worker):
"""Preload all sites of all given family files."""
start = datetime.now()
with ThreadPoolExecutor(worker) as executor:
futures = {executor.submit(preload_family, family, executor)
for family in families}
wait(futures)
pywikibot.output('Loading time used: {}'.format(datetime.now() - start))
if __name__ == '__main__':
fam = set()
worker = None
for arg in pywikibot.handle_args():
if arg in families_list:
fam.add(arg)
elif arg.startswith('-worker'):
worker = int(arg.partition(':')[2])
preload_families(fam or families_list, worker)
| #!/usr/bin/python
"""Script that preloads site and user info for all sites of given family.
The following parameters are supported:
-worker:<num> The number of parallel tasks to be run. Default is the
number of precessors on the machine
Usage:
python pwb.py preload_sites [{<family>}] [-worker{<num>}]
To force preloading, change the global expiry value to 0:
python pwb.py -API_config_expiry:0 preload_sites [{<family>}]
"""
#
# (C) Pywikibot team, 2021
#
# Distributed under the terms of the MIT license.
#
from concurrent.futures import ThreadPoolExecutor, wait
from datetime import datetime
import pywikibot
from pywikibot.family import Family
# supported families by this script
families_list = [
'wikibooks',
'wikinews',
'wikipedia',
'wikiquote',
'wikisource',
'wikiversity',
'wikivoyage',
'wiktionary',
]
exceptions = {
}
def preload_family(family):
"""Preload all sites of a single family file."""
msg = 'Preloading sites of {} family{}'
pywikibot.output(msg.format(family, '...'))
codes = Family.load(family).languages_by_size
for code in exceptions.get(family, []):
if code in codes:
codes.remove(code)
obsolete = Family.load(family).obsolete
for code in codes:
if code not in obsolete:
site = pywikibot.Site(code, family)
pywikibot.Page(site, 'Main page') # title does not care
pywikibot.output(msg.format(family, ' completed.'))
def preload_families(families, worker):
"""Preload all sites of all given family files."""
start = datetime.now()
with ThreadPoolExecutor(worker) as executor:
futures = {executor.submit(preload_family, family):
family for family in families}
wait(futures)
pywikibot.output('Loading time used: {}'.format(datetime.now() - start))
if __name__ == '__main__':
fam = set()
worker = None
for arg in pywikibot.handle_args():
if arg in families_list:
fam.add(arg)
elif arg.startswith('-worker'):
worker = int(arg.partition(':')[2])
preload_families(fam or families_list, worker)
| mit | Python |
5977adef1c792c1f7afe137eff3809ef4d57dfb5 | Add live to excluded environments in migration | alphagov/notifications-api,alphagov/notifications-api | migrations/versions/0300_migrate_org_types.py | migrations/versions/0300_migrate_org_types.py | import os
"""
Revision ID: 0300_migrate_org_types
Revises: 0299_org_types_table
Create Date: 2019-07-24 16:18:27.467361
"""
from alembic import op
import sqlalchemy as sa
revision = '0300_migrate_org_types'
down_revision = '0299_org_types_table'
environment = os.environ['NOTIFY_ENVIRONMENT']
def upgrade():
if environment not in ["live", "production"]:
op.execute("""
UPDATE
organisation
SET
organisation_type = 'nhs_local'
WHERE
organisation.organisation_type = 'nhs'
""")
op.execute("""
UPDATE
services
SET
organisation_type = 'nhs_local'
WHERE
services.organisation_type = 'nhs'
""")
op.alter_column('organisation_types', 'name', existing_type=sa.VARCHAR(), type_=sa.String(length=255))
op.create_foreign_key(
'organisation_organisation_type_fkey', 'organisation', 'organisation_types', ['organisation_type'], ['name']
)
op.create_foreign_key(
'services_organisation_type_fkey', 'services', 'organisation_types', ['organisation_type'], ['name']
)
def downgrade():
op.drop_constraint('services_organisation_type_fkey', 'services', type_='foreignkey')
op.drop_constraint('organisation_organisation_type_fkey', 'organisation', type_='foreignkey')
op.alter_column('organisation_types', 'name', existing_type=sa.String(length=255), type_=sa.VARCHAR())
if environment not in ["live", "production"]:
op.execute("""
UPDATE
organisation
SET
organisation_type = 'nhs'
WHERE
organisation_type = 'nhs_local'
""")
op.execute("""
UPDATE
services
SET
organisation_type = 'nhs'
WHERE
organisation_type = 'nhs_local'
""")
| import os
"""
Revision ID: 0300_migrate_org_types
Revises: 0299_org_types_table
Create Date: 2019-07-24 16:18:27.467361
"""
from alembic import op
import sqlalchemy as sa
revision = '0300_migrate_org_types'
down_revision = '0299_org_types_table'
environment = os.environ['NOTIFY_ENVIRONMENT']
def upgrade():
if environment != "production":
op.execute("""
UPDATE
organisation
SET
organisation_type = 'nhs_local'
WHERE
organisation.organisation_type = 'nhs'
""")
op.execute("""
UPDATE
services
SET
organisation_type = 'nhs_local'
WHERE
services.organisation_type = 'nhs'
""")
op.alter_column('organisation_types', 'name', existing_type=sa.VARCHAR(), type_=sa.String(length=255))
op.create_foreign_key(
'organisation_organisation_type_fkey', 'organisation', 'organisation_types', ['organisation_type'], ['name']
)
op.create_foreign_key(
'services_organisation_type_fkey', 'services', 'organisation_types', ['organisation_type'], ['name']
)
def downgrade():
op.drop_constraint('services_organisation_type_fkey', 'services', type_='foreignkey')
op.drop_constraint('organisation_organisation_type_fkey', 'organisation', type_='foreignkey')
op.alter_column('organisation_types', 'name', existing_type=sa.String(length=255), type_=sa.VARCHAR())
if environment != "production":
op.execute("""
UPDATE
organisation
SET
organisation_type = 'nhs'
WHERE
organisation_type = 'nhs_local'
""")
op.execute("""
UPDATE
services
SET
organisation_type = 'nhs'
WHERE
organisation_type = 'nhs_local'
""")
| mit | Python |
2ca0aa4f29c20f21cce7337b851481bdfaea9a67 | Remove hubsbpot tracking URL params | dalf/searx,dalf/searx,dalf/searx,dalf/searx | searx/plugins/tracker_url_remover.py | searx/plugins/tracker_url_remover.py | '''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <asciimoo@gmail.com>
'''
from flask_babel import gettext
import re
from urllib.parse import urlunparse, parse_qsl, urlencode
regexes = {re.compile(r'utm_[^&]+'),
re.compile(r'(wkey|wemail)[^&]*'),
re.compile(r'(_hsenc|_hsmi|hsCtaTracking|__hssc|__hstc|__hsfp)[^&]*'),
re.compile(r'&$')}
name = gettext('Tracker URL remover')
description = gettext('Remove trackers arguments from the returned URL')
default_on = True
preference_section = 'privacy'
def on_result(request, search, result):
if 'parsed_url' not in result:
return True
query = result['parsed_url'].query
if query == "":
return True
parsed_query = parse_qsl(query)
changes = 0
for i, (param_name, _) in enumerate(list(parsed_query)):
for reg in regexes:
if reg.match(param_name):
parsed_query.pop(i - changes)
changes += 1
result['parsed_url'] = result['parsed_url']._replace(query=urlencode(parsed_query))
result['url'] = urlunparse(result['parsed_url'])
break
return True
| '''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <asciimoo@gmail.com>
'''
from flask_babel import gettext
import re
from urllib.parse import urlunparse, parse_qsl, urlencode
regexes = {re.compile(r'utm_[^&]+'),
re.compile(r'(wkey|wemail)[^&]*'),
re.compile(r'&$')}
name = gettext('Tracker URL remover')
description = gettext('Remove trackers arguments from the returned URL')
default_on = True
preference_section = 'privacy'
def on_result(request, search, result):
if 'parsed_url' not in result:
return True
query = result['parsed_url'].query
if query == "":
return True
parsed_query = parse_qsl(query)
changes = 0
for i, (param_name, _) in enumerate(list(parsed_query)):
for reg in regexes:
if reg.match(param_name):
parsed_query.pop(i - changes)
changes += 1
result['parsed_url'] = result['parsed_url']._replace(query=urlencode(parsed_query))
result['url'] = urlunparse(result['parsed_url'])
break
return True
| agpl-3.0 | Python |
73673598e1998252b16b48d31b800ab0fb441392 | Add documentation for the control system | willrogers/pml,willrogers/pml | pml/cs.py | pml/cs.py | """
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
class NullControlSystem(ControlSystem):
""" Dummy control system to set the value of a pv."""
def __init__(self):
pass
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
pass
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
pass
| """
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
raise NotImplementedError()
def put(self, pv, value):
raise NotImplementedError()
class NullControlSystem(ControlSystem):
def __init__(self):
pass
def get(self, pv):
pass
def put(self, pv, value):
pass
| apache-2.0 | Python |
6157be23dce8857963150d8f162978d967f22bd2 | Move imports in cloudflare integration(#27882) | titilambert/home-assistant,leppa/home-assistant,nkgilley/home-assistant,GenericStudent/home-assistant,tchellomello/home-assistant,FreekingDean/home-assistant,titilambert/home-assistant,tboyce021/home-assistant,soldag/home-assistant,postlund/home-assistant,FreekingDean/home-assistant,robbiet480/home-assistant,home-assistant/home-assistant,qedi-r/home-assistant,pschmitt/home-assistant,turbokongen/home-assistant,soldag/home-assistant,mKeRix/home-assistant,partofthething/home-assistant,rohitranjan1991/home-assistant,tboyce021/home-assistant,qedi-r/home-assistant,kennedyshead/home-assistant,turbokongen/home-assistant,lukas-hetzenecker/home-assistant,partofthething/home-assistant,joopert/home-assistant,w1ll1am23/home-assistant,sander76/home-assistant,sdague/home-assistant,mKeRix/home-assistant,tboyce1/home-assistant,sander76/home-assistant,tchellomello/home-assistant,mezz64/home-assistant,Danielhiversen/home-assistant,mezz64/home-assistant,kennedyshead/home-assistant,Teagan42/home-assistant,mKeRix/home-assistant,home-assistant/home-assistant,leppa/home-assistant,toddeye/home-assistant,balloob/home-assistant,adrienbrault/home-assistant,pschmitt/home-assistant,tboyce1/home-assistant,GenericStudent/home-assistant,balloob/home-assistant,jawilson/home-assistant,jawilson/home-assistant,postlund/home-assistant,rohitranjan1991/home-assistant,robbiet480/home-assistant,sdague/home-assistant,mKeRix/home-assistant,balloob/home-assistant,tboyce1/home-assistant,Danielhiversen/home-assistant,adrienbrault/home-assistant,tboyce1/home-assistant,nkgilley/home-assistant,joopert/home-assistant,Teagan42/home-assistant,rohitranjan1991/home-assistant,toddeye/home-assistant,aronsky/home-assistant,lukas-hetzenecker/home-assistant,w1ll1am23/home-assistant,aronsky/home-assistant | homeassistant/components/cloudflare/__init__.py | homeassistant/components/cloudflare/__init__.py | """Update the IP addresses of your Cloudflare DNS records."""
from datetime import timedelta
import logging
from pycfdns import CloudflareUpdater
import voluptuous as vol
from homeassistant.const import CONF_API_KEY, CONF_EMAIL, CONF_ZONE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_time_interval
_LOGGER = logging.getLogger(__name__)
CONF_RECORDS = "records"
DOMAIN = "cloudflare"
INTERVAL = timedelta(minutes=60)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_ZONE): cv.string,
vol.Required(CONF_RECORDS): vol.All(cv.ensure_list, [cv.string]),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Cloudflare component."""
cfupdate = CloudflareUpdater()
email = config[DOMAIN][CONF_EMAIL]
key = config[DOMAIN][CONF_API_KEY]
zone = config[DOMAIN][CONF_ZONE]
records = config[DOMAIN][CONF_RECORDS]
def update_records_interval(now):
"""Set up recurring update."""
_update_cloudflare(cfupdate, email, key, zone, records)
def update_records_service(now):
"""Set up service for manual trigger."""
_update_cloudflare(cfupdate, email, key, zone, records)
track_time_interval(hass, update_records_interval, INTERVAL)
hass.services.register(DOMAIN, "update_records", update_records_service)
return True
def _update_cloudflare(cfupdate, email, key, zone, records):
"""Update DNS records for a given zone."""
_LOGGER.debug("Starting update for zone %s", zone)
headers = cfupdate.set_header(email, key)
_LOGGER.debug("Header data defined as: %s", headers)
zoneid = cfupdate.get_zoneID(headers, zone)
_LOGGER.debug("Zone ID is set to: %s", zoneid)
update_records = cfupdate.get_recordInfo(headers, zoneid, zone, records)
_LOGGER.debug("Records: %s", update_records)
result = cfupdate.update_records(headers, zoneid, update_records)
_LOGGER.debug("Update for zone %s is complete", zone)
if result is not True:
_LOGGER.warning(result)
| """Update the IP addresses of your Cloudflare DNS records."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.const import CONF_API_KEY, CONF_EMAIL, CONF_ZONE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_time_interval
_LOGGER = logging.getLogger(__name__)
CONF_RECORDS = "records"
DOMAIN = "cloudflare"
INTERVAL = timedelta(minutes=60)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_ZONE): cv.string,
vol.Required(CONF_RECORDS): vol.All(cv.ensure_list, [cv.string]),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Cloudflare component."""
from pycfdns import CloudflareUpdater
cfupdate = CloudflareUpdater()
email = config[DOMAIN][CONF_EMAIL]
key = config[DOMAIN][CONF_API_KEY]
zone = config[DOMAIN][CONF_ZONE]
records = config[DOMAIN][CONF_RECORDS]
def update_records_interval(now):
"""Set up recurring update."""
_update_cloudflare(cfupdate, email, key, zone, records)
def update_records_service(now):
"""Set up service for manual trigger."""
_update_cloudflare(cfupdate, email, key, zone, records)
track_time_interval(hass, update_records_interval, INTERVAL)
hass.services.register(DOMAIN, "update_records", update_records_service)
return True
def _update_cloudflare(cfupdate, email, key, zone, records):
"""Update DNS records for a given zone."""
_LOGGER.debug("Starting update for zone %s", zone)
headers = cfupdate.set_header(email, key)
_LOGGER.debug("Header data defined as: %s", headers)
zoneid = cfupdate.get_zoneID(headers, zone)
_LOGGER.debug("Zone ID is set to: %s", zoneid)
update_records = cfupdate.get_recordInfo(headers, zoneid, zone, records)
_LOGGER.debug("Records: %s", update_records)
result = cfupdate.update_records(headers, zoneid, update_records)
_LOGGER.debug("Update for zone %s is complete", zone)
if result is not True:
_LOGGER.warning(result)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.