commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
983ba079bb99da836f64fc2667100c66a2441f0c | use python logging module. | geekan/task-manager,geekan/task-manager,geekan/task-manager,geekan/task-manager | task_manager/task_processor/views.py | task_manager/task_processor/views.py | from django.shortcuts import render
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.db.models import Q
from models import ImageNeuralTask
from time import strftime, localtime
import logging
l = logging.getLogger(__name__)
# Create your views here.
def merge_dicts(*dict_args):
'''
Given any number of dicts, shallow copy and merge into a new dict,
precedence goes to key value pairs in latter dicts.
'''
result = {}
for dictionary in dict_args:
result.update(dictionary)
return result
def index(request):
tasks = list(ImageNeuralTask.objects.all().values())
l.debug(tasks)
return HttpResponse('<br/>'.join([str(task) for task in tasks]))
@csrf_exempt
def neural_task(request, *args, **kwargs):
l.info(args, kwargs, request.POST, request.GET)
good_paras = ['image_url', 'image_id', 'style_image_path', 'user_id']
para_dict = {k: request.REQUEST.get(k, '') for k in good_paras}
para_dict['create_time'] = strftime("%Y-%m-%d %H:%M:%S", localtime())
task = ImageNeuralTask(**para_dict)
task.save()
return index(request)
@csrf_exempt
def neural_task_clean(request, *args, **kwargs):
l.info(args, kwargs, request.POST, request.GET)
ImageNeuralTask.objects.filter(Q(image_id='') | Q(user_id='')).delete()
return index(request)
| from django.shortcuts import render
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.db.models import Q
from models import ImageNeuralTask
from time import strftime, localtime
# Create your views here.
def merge_dicts(*dict_args):
'''
Given any number of dicts, shallow copy and merge into a new dict,
precedence goes to key value pairs in latter dicts.
'''
result = {}
for dictionary in dict_args:
result.update(dictionary)
return result
def index(request):
tasks = list(ImageNeuralTask.objects.all().values())
print(tasks)
return HttpResponse('<br/>'.join([str(task) for task in tasks]))
@csrf_exempt
def neural_task(request, *args, **kwargs):
print(args, kwargs, request.POST, request.GET)
good_paras = ['image_url', 'image_id', 'style_image_path', 'user_id']
para_dict = {k: request.REQUEST.get(k, '') for k in good_paras}
para_dict['create_time'] = strftime("%Y-%m-%d %H:%M:%S", localtime())
task = ImageNeuralTask(**para_dict)
task.save()
return index(request)
@csrf_exempt
def neural_task_clean(request, *args, **kwargs):
print(args, kwargs, request.POST, request.GET)
ImageNeuralTask.objects.filter(Q(image_id='') | Q(user_id='')).delete()
return index(request)
| mit | Python |
673f811b8325a6926ac52ffa404a74f2ec9e5e8a | Change parcellation to DKT | MadsJensen/RP_scripts,MadsJensen/RP_scripts,MadsJensen/RP_scripts | extract_ts_epochs.py | extract_ts_epochs.py | import sys
import numpy as np
import mne
from mne.minimum_norm import read_inverse_operator, apply_inverse_epochs
from my_settings import (mne_folder, epochs_folder, source_folder, conditions)
subject = sys.argv[1]
method = "dSPM"
snr = 1.
lambda2 = 1. / snr**2
# labels = mne.read_labels_from_annot(
# subject=subject, parc="PALS_B12_Brodmann", regexp="Brodmann")
labels = mne.read_labels_from_annot(subject, parc="aparc")
for condition in conditions:
inv = read_inverse_operator(mne_folder + "%s_%s-inv.fif" % (subject,
condition))
epochs = mne.read_epochs(epochs_folder + "%s_%s-epo.fif" % (subject,
condition))
stcs = apply_inverse_epochs(
epochs["press"], inv, lambda2, method=method, pick_ori=None)
ts = [
mne.extract_label_time_course(
stc, labels, inv["src"], mode="mean_flip") for stc in stcs
]
# for h, tc in enumerate(ts):
# for j, t in enumerate(tc):
# t *= np.sign(t[np.argmax(np.abs(t))])
# tc[j, :] = t
# ts[h] = tc
ts = np.asarray(ts)
# stc.save(source_folder + "%s_%s_ar_epo" % (subject, condition))
np.save(source_folder + "ave_ts/%s_%s_ts_DKT-epo.npy" %
(subject, condition), ts)
del epochs
del stcs
del ts
| import sys
import numpy as np
import mne
from mne.minimum_norm import read_inverse_operator, apply_inverse_epochs
from my_settings import (mne_folder, epochs_folder, source_folder, conditions)
subject = sys.argv[1]
method = "dSPM"
snr = 1.
lambda2 = 1. / snr**2
labels = mne.read_labels_from_annot(
subject=subject, parc="PALS_B12_Brodmann", regexp="Brodmann")
for condition in conditions:
inv = read_inverse_operator(mne_folder + "%s_%s_ar-inv.fif" % (subject,
condition))
epochs = mne.read_epochs(epochs_folder + "%s_%s_ar-epo.fif" % (subject,
condition))
# epochs.resample(500)
stcs = apply_inverse_epochs(
epochs["press"], inv, lambda2, method=method, pick_ori=None)
ts = [
mne.extract_label_time_course(
stc, labels, inv["src"], mode="mean_flip") for stc in stcs
]
# for h, tc in enumerate(ts):
# for j, t in enumerate(tc):
# t *= np.sign(t[np.argmax(np.abs(t))])
# tc[j, :] = t
# ts[h] = tc
ts = np.asarray(ts)
# stc.save(source_folder + "%s_%s_ar_epo" % (subject, condition))
np.save(source_folder + "ave_ts/%s_%s_ar_ts-epo.npy" %
(subject, condition), ts)
del epochs
del stcs
del ts
| bsd-3-clause | Python |
0a9fc9e4d8e43954b10db5668dacb96e68a4ef4f | fix add media with unknown fields | PnEcrins/GeoNature,PnEcrins/GeoNature,PnEcrins/GeoNature,PnEcrins/GeoNature | backend/geonature/core/gn_commons/schemas.py | backend/geonature/core/gn_commons/schemas.py | from marshmallow import pre_load, fields, EXCLUDE
from pypnnomenclature.schemas import NomenclatureSchema
from pypnusershub.schemas import UserSchema
from geonature.utils.env import MA
from geonature.core.gn_commons.models import TModules, TMedias, TValidations
class ModuleSchema(MA.SQLAlchemyAutoSchema):
class Meta:
model = TModules
load_instance = True
exclude = (
"module_picto",
"module_desc",
"module_group",
"module_path",
"module_external_url",
"module_target",
"module_comment",
"active_frontend",
"active_backend",
"module_doc_url",
"module_order",
)
class MediaSchema(MA.SQLAlchemyAutoSchema):
class Meta:
model = TMedias
load_instance = True
include_fk = True
unknown = EXCLUDE
meta_create_date = fields.DateTime(dump_only=True)
meta_update_date = fields.DateTime(dump_only=True)
@pre_load
def make_media(self, data, **kwargs):
if data.get("id_media") is None:
data.pop("id_media", None)
return data
class TValidationSchema(MA.SQLAlchemyAutoSchema):
class Meta:
model = TValidations
load_instance = True
include_fk = True
validation_label = fields.Nested(
NomenclatureSchema,
dump_only=True
)
validator_role = MA.Nested(
UserSchema,
dump_only=True
) | from marshmallow import pre_load, fields
from pypnnomenclature.schemas import NomenclatureSchema
from pypnusershub.schemas import UserSchema
from geonature.utils.env import MA
from geonature.core.gn_commons.models import TModules, TMedias, TValidations
class ModuleSchema(MA.SQLAlchemyAutoSchema):
class Meta:
model = TModules
load_instance = True
exclude = (
"module_picto",
"module_desc",
"module_group",
"module_path",
"module_external_url",
"module_target",
"module_comment",
"active_frontend",
"active_backend",
"module_doc_url",
"module_order",
)
class MediaSchema(MA.SQLAlchemyAutoSchema):
class Meta:
model = TMedias
load_instance = True
include_fk = True
meta_create_date = fields.DateTime(dump_only=True)
meta_update_date = fields.DateTime(dump_only=True)
@pre_load
def make_media(self, data, **kwargs):
if data.get("id_media") is None:
data.pop("id_media", None)
return data
class TValidationSchema(MA.SQLAlchemyAutoSchema):
class Meta:
model = TValidations
load_instance = True
include_fk = True
validation_label = fields.Nested(
NomenclatureSchema,
dump_only=True
)
validator_role = MA.Nested(
UserSchema,
dump_only=True
) | bsd-2-clause | Python |
2835e9bf17d435c3d60e3a9c05ab65e3801c0e77 | Add py-setuptools-scm 3.3.3 (#13183) | iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack | var/spack/repos/builtin/packages/py-setuptools-scm/package.py | var/spack/repos/builtin/packages/py-setuptools-scm/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PySetuptoolsScm(PythonPackage):
"""The blessed package to manage your versions by scm tags."""
homepage = "https://github.com/pypa/setuptools_scm"
url = "https://pypi.io/packages/source/s/setuptools_scm/setuptools_scm-3.3.3.tar.gz"
import_modules = ['setuptools_scm']
version('3.3.3', sha256='bd25e1fb5e4d603dcf490f1fde40fb4c595b357795674c3e5cb7f6217ab39ea5')
version('3.1.0', sha256='1191f2a136b5e86f7ca8ab00a97ef7aef997131f1f6d4971be69a1ef387d8b40')
version('1.15.6', sha256='49ab4685589986a42da85706b3311a2f74f1af567d39fee6cb1e088d7a75fb5f')
depends_on('py-setuptools', type='build')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PySetuptoolsScm(PythonPackage):
"""The blessed package to manage your versions by scm tags."""
homepage = "https://github.com/pypa/setuptools_scm"
url = "https://pypi.io/packages/source/s/setuptools_scm/setuptools_scm-3.1.0.tar.gz"
import_modules = ['setuptools_scm']
version('3.1.0', sha256='1191f2a136b5e86f7ca8ab00a97ef7aef997131f1f6d4971be69a1ef387d8b40')
version('1.15.6', sha256='49ab4685589986a42da85706b3311a2f74f1af567d39fee6cb1e088d7a75fb5f')
depends_on('py-setuptools', type='build')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
| lgpl-2.1 | Python |
5f6ca3bb95e371afd2bbf97c690cde393e7c4b24 | fix cooccur process script | patverga/torch-relation-extraction,patverga/torch-relation-extraction,patverga/torch-relation-extraction | bin/process/co-occuring-mentions-from-mtx.py | bin/process/co-occuring-mentions-from-mtx.py | import sys
import getopt
import itertools
import random
def file_to_ep_dict(fname):
entity_pairs = {}
with open(fname) as f:
for line in f:
fields = line.strip().split("\t")
ep = (fields[0], fields[1])
if ep not in entity_pairs:
entity_pairs[ep] = []
entity_pairs[ep].append(fields[2])
return entity_pairs
# for entity pairs with many mentions, randomly subsample the combinations
def iter_sample_fast(iterable, samplesize):
results = []
iterator = iter(iterable)
# Fill in the first samplesize elements:
try:
for _ in xrange(samplesize):
results.append(iterator.next())
except StopIteration:
random.shuffle(results)
return results
random.shuffle(results) # Randomize their positions
for i, v in enumerate(iterator, samplesize):
r = random.randint(0, i)
if r < samplesize:
results[r] = v # at a decreasing rate, replace random items
return results
def main(argv):
in_file = ''
out_file = ''
max_samples = 1000
help_msg = 'CandidateWildCardArgs.py -i <inFile> -o <outputfile>'
try:
opts, args = getopt.getopt(argv, "hi:o:m:", ["inFile=", "outFile="])
except getopt.GetoptError:
print help_msg
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print help_msg
sys.exit()
elif opt in ("-i", "--inFile"):
in_file = arg
elif opt in ("-o", "--outFile"):
out_file = arg
elif opt in ("-m", "--maxSamples"):
max_samples = int(arg)
print 'Processing lines from ' + in_file
entity_pairs = file_to_ep_dict(in_file)
print 'Exporting lines to ' + out_file
out = open(out_file, 'w')
for key, value in entity_pairs.iteritems():
[out.write('____\t' + pair[0] + '\t' + pair[1] + '\t1\n') for pair in
iter_sample_fast(itertools.combinations(value, 2), max_samples)]
out.close()
print 'Done'
if __name__ == "__main__":
main(sys.argv[1:])
| import sys
import getopt
import itertools
def file_to_ep_dict(fname):
entity_pairs = {}
with open(fname) as f:
for line in f:
fields = line.strip().split("\t")
ep = (fields[0], fields[1])
if ep not in entity_pairs:
entity_pairs[ep] = []
entity_pairs[ep] += fields[2]
return entity_pairs
def main(argv):
in_file = ''
out_file = ''
help_msg = 'CandidateWildCardArgs.py -i <inFile> -o <outputfile>'
try:
opts, args = getopt.getopt(argv, "hi:o:", ["inFile=", "outFile="])
except getopt.GetoptError:
print help_msg
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print help_msg
sys.exit()
elif opt in ("-i", "--inFile"):
in_file = arg
elif opt in ("-o", "--outFile"):
out_file = arg
print 'Processing lines from ' + in_file
entity_pairs = file_to_ep_dict(in_file)
print 'Exporting lines to ' + out_file
out = open(out_file, 'w')
for key, value in entity_pairs.iteritems():
[out.write('____\t' + pair[0] + '\t' + pair[1] + '\t1\n') for pair in itertools.product(value, repeat=2)]
out.close()
print 'Done'
if __name__ == "__main__":
main(sys.argv[1:])
| mit | Python |
e01175427f8615067730024b93aad834880456cd | update version | cjbrasher/LipidFinder | LipidFinder/__init__.py | LipidFinder/__init__.py | # Copyright (c) 2019 J. Alvarez-Jarreta and C.J. Brasher
#
# This file is part of the LipidFinder software tool and governed by the
# 'MIT License'. Please see the LICENSE file that should have been
# included as part of this software.
"""LipidFinder's software information."""
name = "LipidFinder"
__version__ = "2.0.1"
__author__ = ("Jorge Alvarez-Jarreta, Christopher J. Brasher, Eoin Fahy, Anne O'Connor and Anna Price")
__copyright__ = "Copyright 2019, LipidFinder"
__credits__ = ["Jorge Alvarez-Jarreta", "Eoin Fahy", "Christopher J. Brasher",
"Anne O'Connor", "Patricia Rodrigues", "Anna Price"
"Jade I. Hawksworth", "Maria Valdivia-Garcia", "Stuart M. Allen",
"Valerie B. O'Donnell"]
__license__ = "MIT"
__maintainer__ = "Jorge Alvarez-Jarreta"
__email__ = "lipidfinder@cardiff.ac.uk"
__status__ = "Production"
| # Copyright (c) 2019 J. Alvarez-Jarreta and C.J. Brasher
#
# This file is part of the LipidFinder software tool and governed by the
# 'MIT License'. Please see the LICENSE file that should have been
# included as part of this software.
"""LipidFinder's software information."""
name = "LipidFinder"
__version__ = "2.0"
__author__ = ("Jorge Alvarez-Jarreta, Christopher J. Brasher, Eoin Fahy, Anne O'Connor and Anna Price")
__copyright__ = "Copyright 2019, LipidFinder"
__credits__ = ["Jorge Alvarez-Jarreta", "Eoin Fahy", "Christopher J. Brasher",
"Anne O'Connor", "Patricia Rodrigues", "Anna Price"
"Jade I. Hawksworth", "Maria Valdivia-Garcia", "Stuart M. Allen",
"Valerie B. O'Donnell"]
__license__ = "MIT"
__maintainer__ = "Jorge Alvarez-Jarreta"
__email__ = "lipidfinder@cardiff.ac.uk"
__status__ = "Production"
| mit | Python |
17ddd95d19b1a737bb2b7aa89a4eb0aa17c35137 | Add contacted column. | inteligencia-coletiva-lsd/pybossa,OpenNewsLabs/pybossa,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa,PyBossa/pybossa,jean/pybossa,Scifabric/pybossa,geotagx/pybossa,geotagx/pybossa,OpenNewsLabs/pybossa,PyBossa/pybossa,stefanhahmann/pybossa,stefanhahmann/pybossa,jean/pybossa | alembic/versions/66594a9866c_add_updated_and_state_to_app.py | alembic/versions/66594a9866c_add_updated_and_state_to_app.py | """add updated and state to app
Revision ID: 66594a9866c
Revises: 29353a1877ba
Create Date: 2014-10-23 10:53:15.357562
"""
# revision identifiers, used by Alembic.
revision = '66594a9866c'
down_revision = '29353a1877ba'
from alembic import op
import sqlalchemy as sa
import datetime
def make_timestamp():
now = datetime.datetime.utcnow()
return now.isoformat()
def upgrade():
op.add_column('app', sa.Column('updated', sa.Text, default=make_timestamp))
op.add_column('app', sa.Column('contacted', sa.Boolean, default=False))
op.add_column('app', sa.Column('completed', sa.Boolean, default=False))
# Update all projects to the day the migration is run
query = "UPDATE app SET updated='%s'" % make_timestamp()
op.execute(query)
# Update the state of the projects
# Put all of them to false
query = 'UPDATE app SET completed=false'
op.execute(query)
# Update to completed those that are not included in the set
query = "UPDATE app SET completed=true WHERE id NOT IN (SELECT app_id FROM task WHERE state!='completed' OR state IS NULL GROUP BY app_id)"
op.execute(query)
# Update to not completed those that do not have any task
query = "UPDATE app SET completed=false WHERE id NOT IN (SELECT app_id FROM task group by app_id)"
op.execute(query)
def downgrade():
op.drop_column('app', 'updated')
op.drop_column('app', 'contacted')
op.drop_column('app', 'completed')
| """add updated and state to app
Revision ID: 66594a9866c
Revises: 29353a1877ba
Create Date: 2014-10-23 10:53:15.357562
"""
# revision identifiers, used by Alembic.
revision = '66594a9866c'
down_revision = '29353a1877ba'
from alembic import op
import sqlalchemy as sa
import datetime
def make_timestamp():
now = datetime.datetime.utcnow()
return now.isoformat()
def upgrade():
op.add_column('app', sa.Column('updated', sa.Text, default=make_timestamp))
op.add_column('app', sa.Column('completed', sa.Boolean, default=False))
# Update all projects to the day the migration is run
query = "UPDATE app SET updated='%s'" % make_timestamp()
op.execute(query)
# Update the state of the projects
# Put all of them to false
query = 'UPDATE app SET completed=false'
op.execute(query)
# Update to completed those that are not included in the set
query = "UPDATE app SET completed=true WHERE id NOT IN (SELECT app_id FROM task WHERE state!='completed' OR state IS NULL GROUP BY app_id)"
op.execute(query)
# Update to not completed those that do not have any task
query = "UPDATE app SET completed=false WHERE id NOT IN (SELECT app_id FROM task group by app_id)"
op.execute(query)
def downgrade():
op.drop_column('app', 'updated')
op.drop_column('app', 'completed')
| agpl-3.0 | Python |
591ed4b39167daea0eaceca7cf7d79802fd47bd1 | Delete call_aiohttp() (#8) | brettcannon/gidgethub,brettcannon/gidgethub | gidgethub/test/test_aiohttp.py | gidgethub/test/test_aiohttp.py | import datetime
import aiohttp
import pytest
from .. import aiohttp as gh_aiohttp
from .. import sansio
@pytest.mark.asyncio
async def test_sleep():
delay = 1
start = datetime.datetime.now()
async with aiohttp.ClientSession() as session:
gh = gh_aiohttp.GitHubAPI(session, "gidgethub")
await gh._sleep(delay)
stop = datetime.datetime.now()
assert (stop - start) > datetime.timedelta(seconds=delay)
@pytest.mark.asyncio
async def test_request():
request_headers = sansio.create_headers("gidgethub")
async with aiohttp.ClientSession() as session:
gh = gh_aiohttp.GitHubAPI(session, "gidgethub")
aio_call = await gh._request("GET", "https://api.github.com/rate_limit",
request_headers)
data, rate_limit, _ = sansio.decipher_response(*aio_call)
assert "rate" in data
| import datetime
import aiohttp
import pytest
from .. import aiohttp as gh_aiohttp
from .. import sansio
async def call_aiohttp(what, *args, **kwargs):
async with aiohttp.ClientSession() as session:
gh = gh_aiohttp.GitHubAPI(session, "gidgethub")
return await getattr(gh, what)(*args, **kwargs)
@pytest.mark.asyncio
async def test_sleep():
delay = 1
start = datetime.datetime.now()
async with aiohttp.ClientSession() as session:
gh = gh_aiohttp.GitHubAPI(session, "gidgethub")
await gh._sleep(delay)
stop = datetime.datetime.now()
assert (stop - start) > datetime.timedelta(seconds=delay)
@pytest.mark.asyncio
async def test_request():
request_headers = sansio.create_headers("gidgethub")
async with aiohttp.ClientSession() as session:
gh = gh_aiohttp.GitHubAPI(session, "gidgethub")
aio_call = await gh._request("GET", "https://api.github.com/rate_limit",
request_headers)
data, rate_limit, _ = sansio.decipher_response(*aio_call)
assert "rate" in data
| apache-2.0 | Python |
48fab607b1152b8b93cdb0cc0dc5c300dafecf4c | Update default settings to match Slurm 17.X / CentOS installation | mghpcc-projects/user_level_slurm_reservations,mghpcc-projects/user_level_slurm_reservations | common/hil_slurm_settings.py | common/hil_slurm_settings.py | """
MassOpenCloud / Hardware Isolation Layer (HIL)
Slurm / HIL Control Settings
May 2017, Tim Donahue tpd001@gmail.com
"""
DEBUG = True
SLURM_INSTALL_DIR = '/usr/bin/'
HIL_SLURMCTLD_PROLOG_LOGFILE = '/var/log/moc_hil_ulsr/hil_prolog.log'
HIL_MONITOR_LOGFILE = '/var/log/moc_hil_ulsr/hil_monitor.log'
HIL_ENDPOINT = "http://128.31.28.156:80"
HIL_USER = 'admin'
HIL_PW = 'NavedIsSleepy'
HIL_SLURM_PROJECT = 'slurm'
HIL_PARTITION_PREFIX = 'HIL_partition'
HIL_RESERVATION_DEFAULT_DURATION = 24 * 60 * 60 # Seconds
HIL_RESERVATION_GRACE_PERIOD = 4 * 60 * 60 # Seconds
# Partition validation controls
RES_CHECK_DEFAULT_PARTITION = False
RES_CHECK_EXCLUSIVE_PARTITION = False
RES_CHECK_SHARED_PARTITION = False
RES_CHECK_PARTITION_STATE = True
# EOF
| """
MassOpenCloud / Hardware Isolation Layer (HIL)
Slurm / HIL Control Settings
May 2017, Tim Donahue tpd001@gmail.com
"""
DEBUG = True
SLURM_INSTALL_DIR = '/usr/local/bin/'
HIL_SLURMCTLD_PROLOG_LOGFILE = '/var/log/moc_hil_ulsr/hil_prolog.log'
HIL_MONITOR_LOGFILE = '/var/log/moc_hil_ulsr/hil_monitor.log'
HIL_ENDPOINT = "http://128.31.28.156:80"
HIL_USER = 'admin'
HIL_PW = 'NavedIsSleepy'
HIL_SLURM_PROJECT = 'slurm'
HIL_PARTITION_PREFIX = 'HIL_partition'
HIL_PARTITION_PREFIX = 'debug'
HIL_RESERVATION_DEFAULT_DURATION = 24 * 60 * 60 # Seconds
HIL_RESERVATION_GRACE_PERIOD = 4 * 60 * 60 # Seconds
# Partition validation controls
RES_CHECK_DEFAULT_PARTITION = False
RES_CHECK_EXCLUSIVE_PARTITION = False
RES_CHECK_SHARED_PARTITION = False
RES_CHECK_PARTITION_STATE = True
# EOF
| mit | Python |
0ba9fa847a8b605363b298ecad40cb2fc5870cbb | Update build script to work correctly on macOS and linux. | treamology/panda3d-voxels,treamology/panda3d-voxels,treamology/panda3d-voxels | build_modules.py | build_modules.py | import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
from voxel_native.scripts.common import is_macos, is_windows, is_linux
if is_windows():
shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
elif is_macos() or is_linux():
shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so")
if __name__ == "__main__":
build_modules()
| import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
if __name__ == "__main__":
build_modules()
| mit | Python |
182215c61de046b626f754aeec22b2845a52f103 | Call the bootstrap job. | pnomolos/greatbigcrane,pnomolos/greatbigcrane | greatbigcrane/project/views.py | greatbigcrane/project/views.py | """
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os.path
from shutil import copyfile
from django.shortcuts import render_to_response, redirect
from django.views.generic.list_detail import object_list
from django.views.generic.list_detail import object_detail
from django.template import RequestContext
from django.conf import settings
from job_queue.jobs import run_job
from project.models import Project
from project.forms import ProjectForm
from preferences.models import Preference
def index(request):
'''We should move this to a different app'''
projects = Project.objects.all()
return render_to_response('index.html', RequestContext(request,
{'project_list': projects}))
def list_projects(request):
projects = Project.objects.all()
if "orderby" in request.GET:
projects.order_by(request.GET['orderby'])
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
return object_detail(request, Project.objects.all(), object_id=project_id,
template_object_name='project')
def add_project(request):
form = ProjectForm(request.POST or None)
if form.is_valid():
instance = form.save()
os.makedirs(instance.base_directory)
copyfile(os.path.join(settings.PROJECT_HOME, "../bootstrap.py"),
os.path.join(instance.base_directory, "bootstrap.py"))
run_job("BOOTSTRAP", project_id=instance.id)
return redirect(instance.get_absolute_url())
base_url = Preference.objects.get_preference("projects_directory", '')
return render_to_response("project/project_form.html",
RequestContext(request, {'form': form, 'base_url': base_url}))
| """
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os.path
from shutil import copyfile
from django.shortcuts import render_to_response, redirect
from django.views.generic.list_detail import object_list
from django.views.generic.list_detail import object_detail
from django.template import RequestContext
from django.conf import settings
from project.models import Project
from project.forms import ProjectForm
from preferences.models import Preference
def index(request):
'''We should move this to a different app'''
projects = Project.objects.all()
return render_to_response('index.html', RequestContext(request,
{'project_list': projects}))
def list_projects(request):
projects = Project.objects.all()
if "orderby" in request.GET:
projects.order_by(request.GET['orderby'])
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
return object_detail(request, Project.objects.all(), object_id=project_id,
template_object_name='project')
def add_project(request):
form = ProjectForm(request.POST or None)
if form.is_valid():
instance = form.save()
os.makedirs(instance.base_directory)
copyfile(os.path.join(settings.PROJECT_HOME, "../bootstrap.py"),
os.path.join(instance.base_directory, "bootstrap.py"))
return redirect(instance.get_absolute_url())
base_url = Preference.objects.get_preference("projects_directory", '')
return render_to_response("project/project_form.html",
RequestContext(request, {'form': form, 'base_url': base_url}))
| apache-2.0 | Python |
115a71995f2ceae667c05114da8e8ba21c25c402 | Move to 1.6.6 dev for further development | alby128/syncplay,alby128/syncplay,Syncplay/syncplay,Syncplay/syncplay | syncplay/__init__.py | syncplay/__init__.py | version = '1.6.6'
revision = ' development'
milestone = 'Yoitsu'
release_number = '87'
projectURL = 'https://syncplay.pl/'
| version = '1.6.5'
revision = ' release'
milestone = 'Yoitsu'
release_number = '86'
projectURL = 'https://syncplay.pl/'
| apache-2.0 | Python |
9ceddfc1a371c02d983a5b62b7d154f35a4b9006 | Bump version to 0.4.0.dev (#707) | quantumlib/Cirq,quantumlib/Cirq,balopat/Cirq,balopat/Cirq,balopat/Cirq,quantumlib/Cirq,quantumlib/Cirq,quantumlib/Cirq | cirq/_version.py | cirq/_version.py | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define version number here and read it from setup.py automatically"""
import sys
if sys.version_info.major == 2:
__version__ = "0.4.0.dev27" # coverage: ignore
else:
__version__ = "0.4.0.dev35"
| # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define version number here and read it from setup.py automatically"""
import sys
if sys.version_info.major == 2:
__version__ = "0.3.0.27" # coverage: ignore
else:
__version__ = "0.3.0.35"
| apache-2.0 | Python |
2c162b131a936c0607ec2aacbf79108aa9a78555 | Bump to version 0.8.0 | reubano/meza,reubano/tabutils,reubano/tabutils,reubano/tabutils,reubano/meza,reubano/meza | tabutils/__init__.py | tabutils/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
tabutils
~~~~~~~~
Provides methods for reading and processing data from tabular formatted files
Examples:
literal blocks::
python example_google.py
Attributes:
ENCODING (str): Default file encoding.
"""
__title__ = 'tabutils'
__package_name__ = 'tabutils'
__author__ = 'Reuben Cummings'
__description__ = 'tabular data utility methods'
__email__ = 'reubano@gmail.com'
__version__ = '0.8.0'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
tabutils
~~~~~~~~
Provides methods for reading and processing data from tabular formatted files
Examples:
literal blocks::
python example_google.py
Attributes:
ENCODING (str): Default file encoding.
"""
__title__ = 'tabutils'
__package_name__ = 'tabutils'
__author__ = 'Reuben Cummings'
__description__ = 'tabular data utility methods'
__email__ = 'reubano@gmail.com'
__version__ = '0.7.0'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
| mit | Python |
a805379908e8e94f44339a877c094e239ab97a09 | Add command to list all user commands. | Javex/qllbot,sqall01/qllbot,qll/qllbot | modules/usercmds.py | modules/usercmds.py | import contextlib
import lib.cmd
import lib.event
import lib.irc
USERCMD_CHAR = '!'
_user_cmds = {} # storage for all user commands during runtime
@lib.event.subscribe('new_db')
def create_usercmds_schema(db=None):
"""Creates the usercmds SQLite schema."""
db.execute('CREATE TABLE usercmds (cmd TEXT PRIMARY KEY, response TEXT)')
db.commit()
@lib.event.subscribe('connected')
def fetch_usercmds(bot=None):
"""Fetches all user commands at the start of the bot."""
with contextlib.closing(bot.db.cursor()) as c:
c.execute('SELECT cmd, response FROM usercmds')
for cmd, response in c:
_user_cmds[cmd] = response
@lib.event.subscribe('channel_message')
def invoke_usercmd(bot=None, msg=None):
if msg.content.startswith(USERCMD_CHAR) and msg.content[1:] in _user_cmds:
bot.send(lib.irc.say(msg.channel, _user_cmds[msg.content[1:]]))
@lib.cmd.command()
def cmd(msg):
"""Creates or changes an user command (#usercmd name response)."""
if not msg.params or ' ' not in msg.params:
return 'Usage: #usercmd name response'
cmd, response = msg.params.split(' ', 1)
if cmd in _user_cmds:
msg.bot.db.execute('UPDATE usercmds SET response=? WHERE cmd=?',
(response, cmd))
else:
msg.bot.db.execute('INSERT INTO usercmds VALUES (?, ?)',
(cmd, response))
msg.bot.db.commit()
_user_cmds[cmd] = response
return 'User command stored. Usage: %s%s' % (USERCMD_CHAR, cmd)
@lib.cmd.command()
def listcmds(msg):
"""Lists all user commands."""
return ', '.join('!%s' % k for k in _user_cmds)
| import contextlib
import lib.cmd
import lib.event
import lib.irc
USERCMD_CHAR = '!'
_user_cmds = {} # storage for all user commands during runtime
@lib.event.subscribe('new_db')
def create_usercmds_schema(db=None):
"""Creates the usercmds SQLite schema."""
db.execute('CREATE TABLE usercmds (cmd TEXT PRIMARY KEY, response TEXT)')
db.commit()
@lib.event.subscribe('connected')
def fetch_usercmds(bot=None):
"""Fetches all user commands at the start of the bot."""
with contextlib.closing(bot.db.cursor()) as c:
c.execute('SELECT cmd, response FROM usercmds')
for cmd, response in c:
_user_cmds[cmd] = response
@lib.event.subscribe('channel_message')
def invoke_usercmd(bot=None, msg=None):
if msg.content.startswith(USERCMD_CHAR) and msg.content[1:] in _user_cmds:
bot.send(lib.irc.say(msg.channel, _user_cmds[msg.content[1:]]))
@lib.cmd.command()
def cmd(msg):
"""Creates or changes an user command (#usercmd name response)."""
if not msg.params or ' ' not in msg.params:
return 'Usage: #usercmd name response'
cmd, response = msg.params.split(' ', 1)
if cmd in _user_cmds:
msg.bot.db.execute('UPDATE usercmds SET response=? WHERE cmd=?',
(response, cmd))
else:
msg.bot.db.execute('INSERT INTO usercmds VALUES (?, ?)',
(cmd, response))
msg.bot.db.commit()
_user_cmds[cmd] = response
return 'User command stored. Usage: %s%s' % (USERCMD_CHAR, cmd)
| bsd-2-clause | Python |
0bb8641efa2e0e54c782b55eb62601735b60a1ce | change error message to indicate Polygons are allowed, too | geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/drf-utm-zone-info | conversion_service/clipping_geometry/to_polyfile.py | conversion_service/clipping_geometry/to_polyfile.py | from django.contrib.gis.geos import MultiPolygon, Polygon
from django.template.loader import render_to_string
def create_poly_file_string(geometry):
"""
Converts a Polygon or Multipolygon to a polyfile compatible string
Args:
geometry: django.contrib.gis.geos.MultiPolygon or django.contrib.gis.geos.Polygon
Returns:
Osmosis polygon filter file format string
"""
if isinstance(geometry, Polygon):
geometry = MultiPolygon([geometry])
if not isinstance(geometry, MultiPolygon):
raise TypeError("The provided data is not a Polygon or MultiPolygon, but {}.".format(str(geometry.__class__)))
return render_to_string(template_name='clipping_geometry/polyfile_template.poly', context={'multipolygon': geometry})
| from django.contrib.gis.geos import MultiPolygon, Polygon
from django.template.loader import render_to_string
def create_poly_file_string(geometry):
"""
Converts a Polygon or Multipolygon to a polyfile compatible string
Args:
geometry: django.contrib.gis.geos.MultiPolygon or django.contrib.gis.geos.Polygon
Returns:
Osmosis polygon filter file format string
"""
if isinstance(geometry, Polygon):
geometry = MultiPolygon([geometry])
if not isinstance(geometry, MultiPolygon):
raise TypeError("The provided data is not a multi polygon, but {}.".format(str(geometry.__class__)))
return render_to_string(template_name='clipping_geometry/polyfile_template.poly', context={'multipolygon': geometry})
| mit | Python |
41263cfc401f7eb6f13da4ea1e288e2b6b674f52 | Fix preselection of paid orders filter | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | byceps/blueprints/admin/shop/order/models.py | byceps/blueprints/admin/shop/order/models.py | """
byceps.blueprints.admin.shop.order.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from __future__ import annotations
from enum import Enum
from typing import Optional
from .....services.shop.order.transfer.models import PaymentState
from .....util import iterables
IGNORED = None
NOT_YET_SHIPPED = False
SHIPPED = True
class OrderStateFilter(Enum):
none = (None, IGNORED)
payment_state_open = (PaymentState.open, IGNORED)
payment_state_canceled_before_paid = (PaymentState.canceled_before_paid, IGNORED)
payment_state_paid = (PaymentState.paid, IGNORED)
payment_state_canceled_after_paid = (PaymentState.canceled_after_paid, IGNORED)
waiting_for_shipping = (None, NOT_YET_SHIPPED)
def __init__(
self, payment_state: Optional[PaymentState], shipped: Optional[bool]
) -> None:
self.payment_state = payment_state
self.shipped = shipped
@classmethod
def find(
cls,
only_payment_state: Optional[PaymentState],
only_shipped: Optional[bool],
) -> OrderStateFilter:
if (only_payment_state == PaymentState.paid) and (only_shipped is not None):
return cls.waiting_for_shipping if not only_shipped else cls.none
elif only_payment_state is not None:
return cls.find_for_payment_state(only_payment_state) or cls.none
else:
return cls.none
@classmethod
def find_for_payment_state(
cls, payment_state: PaymentState
) -> Optional[OrderStateFilter]:
def match(order_state_filter):
return (
order_state_filter.payment_state == payment_state
and order_state_filter.shipped is None
)
return iterables.find(cls, match)
| """
byceps.blueprints.admin.shop.order.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from __future__ import annotations
from enum import Enum
from typing import Optional
from .....services.shop.order.transfer.models import PaymentState
from .....util import iterables
IGNORED = None
NOT_YET_SHIPPED = False
SHIPPED = True
class OrderStateFilter(Enum):
none = (None, IGNORED)
payment_state_open = (PaymentState.open, IGNORED)
payment_state_canceled_before_paid = (PaymentState.canceled_before_paid, IGNORED)
payment_state_paid = (PaymentState.paid, IGNORED)
payment_state_canceled_after_paid = (PaymentState.canceled_after_paid, IGNORED)
waiting_for_shipping = (None, NOT_YET_SHIPPED)
def __init__(
self, payment_state: Optional[PaymentState], shipped: Optional[bool]
) -> None:
self.payment_state = payment_state
self.shipped = shipped
@classmethod
def find(
cls,
only_payment_state: Optional[PaymentState],
only_shipped: Optional[bool],
) -> OrderStateFilter:
if only_payment_state == PaymentState.paid and not only_shipped:
return cls.waiting_for_shipping
elif only_payment_state is not None:
return cls.find_for_payment_state(only_payment_state) or cls.none
else:
return cls.none
@classmethod
def find_for_payment_state(
cls, payment_state: PaymentState
) -> Optional[OrderStateFilter]:
def match(order_state_filter):
return (
order_state_filter.payment_state == payment_state
and order_state_filter.shipped is None
)
return iterables.find(cls, match)
| bsd-3-clause | Python |
2633be5b9da5101ea9b5dd4449c6a5eb7405e932 | Set and clear colour for whole filelist output | mcgid/morenines,mcgid/morenines | morenines/output.py | morenines/output.py | import click
import sys
def info(message):
click.echo(message)
def good(message):
click.secho("Warning: " + message, fg='green')
def warning(message):
click.secho("WARNING: " + message, fg='yellow')
def error(message):
click.secho("ERROR: " + message, fg='red')
sys.exit(1)
def set_output_colour(colour):
# Print nothing except the ANSI escape sequence
click.secho('', nl=False, fg=colour, reset=False)
def clear_output_colour():
# Print nothing except the reset escape sequence
click.secho('', nl=False, reset=True)
def print_filelist(header, filelist, colour=None):
set_output_colour(colour)
click.echo(header)
for line in sorted(filelist):
click.echo(" {}".format(line))
clear_output_colour()
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
good("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
| import click
import sys
def info(message):
click.echo(message)
def good(message):
click.secho("Warning: " + message, fg='green')
def warning(message):
click.secho("WARNING: " + message, fg='yellow')
def error(message):
click.secho("ERROR: " + message, fg='red')
sys.exit(1)
def print_filelist(header, filelist, colour=None):
click.echo(header)
for line in sorted(filelist):
if colour:
line = click.style(line, fg=colour)
click.echo(" {}".format(line))
def print_filelists(new_files, changed_files, missing_files):
if not any([new_files, changed_files, missing_files]):
good("Index is up-to-date (no changes)")
return
if new_files:
print_filelist("Added files (not in index):", new_files, 'green')
# Print a blank space between sections
if changed_files or missing_files:
click.echo()
if changed_files:
print_filelist("Changed files (hash differs from index):", changed_files, 'red')
# Print a blank space between sections
if missing_files:
click.echo()
if missing_files:
print_filelist("Missing files:", missing_files, 'red')
| mit | Python |
65b8484810305ea19a363e14860f2f516733c244 | use django.utils.safestring.mark_safe with html templatetag html output | rsalmaso/django-babeljs,rsalmaso/django-babeljs | babeljs/templatetags/babeljs.py | babeljs/templatetags/babeljs.py | # -*- coding: utf-8 -*-
# Copyright (C) 2007-2015, Raffaele Salmaso <raffaele@salmaso.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import, division, print_function, unicode_literals
from django import template
from django.template import loader
from django.utils.encoding import iri_to_uri
from django.utils.safestring import mark_safe
import babeljs
from babeljs import conf as settings
def is_installed(module):
try:
from django.apps import apps
return apps.is_installed(module)
except ImportError:
return module in settings.INSTALLED_APPS
if is_installed('django.contrib.staticfiles'):
from django.contrib.staticfiles.templatetags.staticfiles import static as _static
else:
from django.templatetags.static import static as _static
register = template.Library()
MINIFIED = ".min" if settings.MINIFIED else ""
@register.simple_tag(takes_context=True, name="babeljs")
def tag_babeljs(context, version=settings.VERSION, minified=MINIFIED):
return mark_safe("""<script type="text/javascript" src="{babel}"></script>""".format(
babel=_static(iri_to_uri(
"babeljs/{script}-{version}{minified}.js".format(
script="browser",
version=version,
minified=minified,
)
)),
))
@register.simple_tag(takes_context=True, name="babel")
def babel(context, template_name):
from babeljs import execjs
template = loader.render_to_string(
template_name=template_name,
dictionary={},
context_instance=context,
)
try:
tag, js = "<script>", babeljs.transpile(template)
except (babeljs.TransformError, execjs.RuntimeError):
tag, js = """<script type="text/babel">""", template
return mark_safe("".join([tag, js, "</script>"]))
| # -*- coding: utf-8 -*-
# Copyright (C) 2007-2015, Raffaele Salmaso <raffaele@salmaso.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import, division, print_function, unicode_literals
from django import template
from django.template import loader
from django.utils.encoding import iri_to_uri
import babeljs
from babeljs import conf as settings
def is_installed(module):
try:
from django.apps import apps
return apps.is_installed(module)
except ImportError:
return module in settings.INSTALLED_APPS
if is_installed('django.contrib.staticfiles'):
from django.contrib.staticfiles.templatetags.staticfiles import static as _static
else:
from django.templatetags.static import static as _static
register = template.Library()
MINIFIED = ".min" if settings.MINIFIED else ""
@register.simple_tag(takes_context=True, name="babeljs")
def tag_babeljs(context, version=settings.VERSION, minified=MINIFIED):
return """<script type="text/javascript" src="{babel}"></script>""".format(
babel=_static(iri_to_uri(
"babeljs/{script}-{version}{minified}.js".format(
script="browser",
version=version,
minified=minified,
)
)),
)
@register.simple_tag(takes_context=True, name="babel")
def babel(context, template_name):
from babeljs import execjs
template = loader.render_to_string(
template_name=template_name,
dictionary={},
context_instance=context,
)
try:
tag, js = "<script>", babeljs.transpile(template)
except (babeljs.TransformError, execjs.RuntimeError):
tag, js = """<script type="text/babel">""", template
return "".join([tag, js, "</script>"])
| mit | Python |
bf1d97366802602952734a1e8874eab3b063179d | Update StructuredGrid example per https://github.com/pyvista/pyvista-support/issues/16 | akaszynski/vtkInterface | examples/00-load/create-structured-surface.py | examples/00-load/create-structured-surface.py | """
.. _ref_create_structured:
Creating a Structured Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a StructuredGrid surface from NumPy arrays
"""
# sphinx_gallery_thumbnail_number = 2
import pyvista as pv
import numpy as np
################################################################################
# From NumPy Meshgrid
# +++++++++++++++++++
#
# Create a simple meshgrid using NumPy
# Make data
x = np.arange(-10, 10, 0.25)
y = np.arange(-10, 10, 0.25)
x, y = np.meshgrid(x, y)
r = np.sqrt(x**2 + y**2)
z = np.sin(r)
################################################################################
# Now pass the NumPy meshgrid to PyVista
# Create and plot structured grid
grid = pv.StructuredGrid(x, y, z)
grid.plot()
################################################################################
# Plot mean curvature as well
grid.plot_curvature(clim=[-1, 1])
################################################################################
# Generating a structured grid is a one liner in this module, and the points from
# the resulting surface can be accessed as a NumPy array:
print(grid.points)
################################################################################
# From XYZ Points
# +++++++++++++++
#
# Quite often, you might be given a set of coordinates (XYZ points) in a simple
# tabular format where there exists some structure such that grid could be built
# between the nodes you have. A great example is found in `pyvista-support#16`_
# where a strucutred grid that is rotated from the cartesian reference frame is
# given as just XYZ points. In these cases, all that is needed to recover the
# grid is the dimensions of the grid (`nx` by `ny` by `nz`) and that the
# coordinates are ordered appropriately.
#
# .. _pyvista-support#16: https://github.com/pyvista/pyvista-support/issues/16
#
# For this example, we will create a small dataset and rotate the
# coordinates such that they are not on orthogonal to cartesian reference frame.
# Define a simple Gaussian surface
def make_point_set():
"""Ignore the contents of this function. Just know that it returns an
n by 3 numpy array of structured coordinates."""
n, m = 29, 32
x = np.linspace(-200,200, num=n) + np.random.uniform(-5, 5, size=n)
y = np.linspace(-200,200, num=m) + np.random.uniform(-5, 5, size=m)
xx, yy = np.meshgrid(x, y)
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
foo = pv.PolyData(points)
foo.rotate_z(36.6)
return foo.points
# Get the points as a 2D NumPy array (N by 3)
points = make_point_set()
print(points[0:5, :])
################################################################################
# Now pretend that the (n by 3) NumPy array above are coordinates that you have,
# possibly from a file with three columns of XYZ points.
#
# We simply need to recover the dimensions of the grid that these points make
# and then we can generate a :class:`pyvista.StructuredGrid` mesh.
#
# Let's preview the points to see what we are dealing with:
import matplotlib.pyplot as plt
plt.figure(figsize=(10,10))
plt.scatter(points[:,0], points[:,1], c=points[:,2])
plt.axis('image')
plt.xlabel('X Coordinate')
plt.ylabel('Y Coordinate')
plt.show()
################################################################################
# In the figure above, we can see some inherit structure to the points and thus
# we could connect the points as a structured gird. All we need to know are the
# dimensions of the grid present. In this case, we know (because we made this
# dataset) the dimensions are ``[29, 32, 1]``, but you might not know the
# dimensions of your pointset. There are a few ways to figure out the
# dimensionality of structured grid including:
# * manually conting the the nodes along the edges of the pointset
# * using a technique like principle component analysis to strip the rotation from the dataset and count the unique values along each axis for the new;y projected dataset.
# Once you've figured out your grid's dimensions, simple create the
# :class:`pyvista.StructuredGrid` as follows:
mesh = pv.StructuredGrid()
# Set the coordinates from the numpy array
mesh.points = points
# set the dimensions
mesh.dimensions = [29, 32, 1]
# and then inspect it!
mesh.plot(show_edges=True, show_grid=True, cpos='xy')
| """
.. _ref_create_structured:
Creating a Structured Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a StructuredGrid surface from NumPy arrays
"""
# sphinx_gallery_thumbnail_number = 2
import pyvista as pv
import numpy as np
################################################################################
# Create a simple meshgrid using NumPy
# Make data
x = np.arange(-10, 10, 0.25)
y = np.arange(-10, 10, 0.25)
x, y = np.meshgrid(x, y)
r = np.sqrt(x**2 + y**2)
z = np.sin(r)
################################################################################
# Now pass the NumPy meshgrid to PyVista
# Create and plot structured grid
grid = pv.StructuredGrid(x, y, z)
grid.plot()
################################################################################
# Plot mean curvature as well
grid.plot_curvature(clim=[-1, 1])
################################################################################
# Generating a structured grid is a one liner in this module, and the points from
# the resulting surface can be accessed as a NumPy array:
print(grid.points)
| mit | Python |
c20202a23d689e75d640c684f8c3f4f9dc599316 | Refactor passlib password hasing example | ets-labs/python-dependency-injector,ets-labs/dependency_injector,rmk135/dependency_injector,rmk135/objects | examples/miniapps/password_hashing/example.py | examples/miniapps/password_hashing/example.py | """Example of dependency injection and password hashing in Python."""
import passlib.hash
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class UsersService:
"""Users service."""
def __init__(self, password_hasher):
"""Initialize instance."""
self._password_hasher = password_hasher
def create_user(self, name, password):
"""Create user with hashed password."""
hashed_password = self._password_hasher(password)
return dict(name=name, password=hashed_password)
class Container(containers.DeclarativeContainer):
"""Inversion of control container."""
password_hasher = providers.Callable(
passlib.hash.sha256_crypt.hash,
salt_size=16,
rounds=10000)
users_service = providers.Factory(
UsersService,
password_hasher=password_hasher.provider)
if __name__ == '__main__':
container = Container()
users_service = container.users_service()
user1 = users_service.create_user(name='Roman', password='secret1')
user2 = users_service.create_user(name='Vitaly', password='secret2')
print(user1, user2)
| """Example of dependency injection and password hashing in Python."""
import passlib.hash
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class UsersService:
"""Users service."""
def __init__(self, password_hasher):
"""Initialize instance."""
self._password_hasher = password_hasher
def create_user(self, name, password):
"""Create user with hashed password."""
hashed_password = self._password_hasher(password)
return dict(name=name, password=hashed_password)
class Container(containers.DeclarativeContainer):
"""Inversion of control container."""
password_hasher = providers.Callable(
passlib.hash.sha256_crypt.encrypt,
salt_size=16,
rounds=10000)
users_service = providers.Factory(
UsersService,
password_hasher=password_hasher.provider)
if __name__ == '__main__':
container = Container()
users_service = container.users_service()
user1 = users_service.create_user(name='Roman', password='secret1')
user2 = users_service.create_user(name='Vitaly', password='secret2')
print(user1, user2)
| bsd-3-clause | Python |
8fff2f030502e011c051c26e8376ed735478d2ef | Fix example for evt readout | tamasgal/km3pipe,tamasgal/km3pipe | examples/offline_analysis/plot_evt_readout.py | examples/offline_analysis/plot_evt_readout.py | # -*- coding: utf-8 -*-
"""
=============================
Reading and Parsing EVT files
=============================
This example shows how to read and parse EVT files, which are used in our
Monte Carlo productions.
"""
# Author: Tamas Gal <tgal@km3net.de>, Moritz Lotze >mlotze@km3net.de>
# License: BSD-3
import matplotlib.pyplot as plt
import numpy as np
from km3modules.common import StatusBar
from km3pipe import Module, Pipeline
from km3pipe.dataclasses import Table
from km3pipe.calib import Calibration
from km3pipe.io import EvtPump
from km3pipe.math import pld3
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "../data/numu_cc.evt"
detx = "../data/km3net_jul13_90m_r1494_corrected.detx"
class VertexHitDistanceCalculator(Module):
"""Calculate vertex-hit-distances"""
def configure(self):
self.distances = []
def process(self, blob):
tracks = blob["TrackIns"]
muons = tracks[tracks.type == 5]
muon = Table(muons[np.argmax(muons.energy)])
hits = blob["CalibHits"]
dist = pld3(hits.pos, muon.pos, muon.dir)
self.distances.append(dist)
return blob
def finish(self):
dist_flat = np.concatenate(self.distances)
plt.hist(dist_flat)
plt.savefig("dists.png")
pipe = Pipeline()
pipe.attach(EvtPump, filename=filename, parsers=["km3"])
pipe.attach(StatusBar, every=100)
pipe.attach(Calibration, filename=detx)
pipe.attach(VertexHitDistanceCalculator)
pipe.drain(5)
| # -*- coding: utf-8 -*-
"""
=============================
Reading and Parsing EVT files
=============================
This example shows how to read and parse EVT files, which are used in our
Monte Carlo productions.
"""
# Author: Tamas Gal <tgal@km3net.de>, Moritz Lotze >mlotze@km3net.de>
# License: BSD-3
import matplotlib.pyplot as plt
import numpy as np
from km3modules.common import StatusBar
from km3pipe import Module, Pipeline
from km3pipe.dataclasses import Table
from km3pipe.calib import Calibration
from km3pipe.io import EvtPump
from km3pipe.math import pld3
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "../data/numu_cc.evt"
detx = "../data/km3net_jul13_90m_r1494_corrected.detx"
class VertexHitDistanceCalculator(Module):
"""Calculate vertex-hit-distances"""
def configure(self):
self.distances = []
def process(self, blob):
tracks = blob["TrackIns"]
muons = tracks[tracks.type == 5]
muon = Table(muons[np.argmax(muons.energy)])
hits = blob["CalibHits"]
dist = pld3(hits.pos, muon.pos, muon.dir)
self.distances.append(dist)
return blob
def finish(self):
dist_flat = np.concatenate(self.distances)
plt.hist(dist_flat)
plt.savefig("dists.pdf")
pipe = Pipeline()
pipe.attach(EvtPump, filename=filename, parsers=["km3"])
pipe.attach(StatusBar, every=100)
pipe.attach(Calibration, filename=detx)
pipe.attach(VertexHitDistanceCalculator)
pipe.drain(5)
| mit | Python |
4ddbbcbe25b7e24fcf4f342642d204a7b3442828 | Update module description to fix outdated info | treveradams/connector-telephony,brain-tec/connector-telephony,treveradams/connector-telephony,treveradams/connector-telephony,brain-tec/connector-telephony,brain-tec/connector-telephony | base_phone_popup/__openerp__.py | base_phone_popup/__openerp__.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# Base Phone Pop-up module for Odoo/OpenERP
# Copyright (C) 2014 Alexis de Lattre <alexis@via.ecp.fr>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Base Phone Pop-up',
'version': '8.0.0.4.0',
'category': 'Phone',
'license': 'AGPL-3',
'summary': 'Pop-up the related form view to the user on incoming calls',
'description': """
Base Phone Pop-up
=================
When the user receives a phone call, Odoo can automatically open the
corresponding partner/lead/employee/... in a pop-up without any action from the
user.
The module *web_action_request* can be downloaded with Mercurial:
hg clone http://bitbucket.org/anybox/web_action_request
Warning : proxying WebSockets is only supported since Nginx 1.3.13 ; the
feature provided by this module won't work with older versions of Nginx.
""",
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com/',
'depends': ['base_phone', 'web_action_request'],
'data': ['res_users_view.xml'],
}
| # -*- encoding: utf-8 -*-
##############################################################################
#
# Base Phone Pop-up module for Odoo/OpenERP
# Copyright (C) 2014 Alexis de Lattre <alexis@via.ecp.fr>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Base Phone Pop-up',
'version': '8.0.0.4.0',
'category': 'Phone',
'license': 'AGPL-3',
'summary': 'Pop-up the related form view to the user on incoming calls',
'description': """
Base Phone Pop-up
=================
When the user receives a phone call, OpenERP can automatically open the
corresponding partner/lead/employee/... in a pop-up without any action from the
user.
The module *web_action_request* can be downloaded with Mercurial:
hg clone http://bitbucket.org/anybox/web_action_request
It depends on 2 other modules, *web_longpolling* and *web_socketio*, that can
be downloaded with this command:
hg clone http://bitbucket.org/anybox/web_socketio
You will find some hints in this documentation :
https://bitbucket.org/anybox/web_action_request
Warning : proxying WebSockets is only supported since Nginx 1.3.13 ; the
feature provided by this module won't work with older versions of Nginx.
TODO : document this new feature on the Akretion Web site :
http://www.akretion.com/products-and-services/openerp-asterisk-voip-connector
""",
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com/',
'depends': ['base_phone', 'web_action_request'],
'data': [
'res_users_view.xml',
],
'images': [],
'active': False,
}
| agpl-3.0 | Python |
df7b9879a194c2909467340b72ac7c0c87f6fd77 | Remove camera configuration. | microy/PyStereoVisionToolkit,microy/StereoVision,microy/StereoVision,microy/PyStereoVisionToolkit,microy/VisionToolkit,microy/VisionToolkit | VisionToolkit/Camera.py | VisionToolkit/Camera.py | # -*- coding:utf-8 -*-
#
# Module to capture images from USB cameras
#
#
# External dependencies
#
import threading
import cv2
#
# Thread to read the images from a USB camera
#
class UsbCamera( threading.Thread ) :
#
# Initialisation
#
def __init__( self, image_callback ) :
# Initialize the thread
super( UsbCamera, self ).__init__()
# Function called when an image is received
self.image_callback = image_callback
# Initialize the camera
self.camera = cv2.VideoCapture( 0 )
# Get camera parameters
self.width = self.camera.get( cv2.cv.CV_CAP_PROP_FRAME_WIDTH )
self.height = self.camera.get( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT )
#
# Thread main loop
#
def run( self ) :
# Thread running
self.running = True
while self.running :
# Capture image from the camera
_, image = self.camera.read()
# Send the image via the external callback function
self.image_callback( image )
# Release the camera
self.camera.release()
#
# Thread to read images from two USB cameras
#
class StereoUsbCamera( threading.Thread ) :
#
# Initialisation
#
def __init__( self, image_callback ) :
# Initialize the thread
super( StereoUsbCamera, self ).__init__()
# Function called when the images are received
self.image_callback = image_callback
# Initialize the cameras
self.camera_left = cv2.VideoCapture( 0 )
self.camera_right = cv2.VideoCapture( 1 )
# Get camera parameters
self.width = self.camera_left.get( cv2.cv.CV_CAP_PROP_FRAME_WIDTH )
self.height = self.camera_left.get( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT )
#
# Thread main loop
#
def run( self ) :
# Thread running
self.running = True
while self.running :
# Capture images
self.camera_left.grab()
self.camera_right.grab()
# Get the images
_, image_left = self.camera_left.retrieve()
_, image_right = self.camera_right.retrieve()
# Send the image via the external callback function
self.image_callback( image_left, image_right )
# Release the cameras
self.camera_left.release()
self.camera_right.release()
| # -*- coding:utf-8 -*-
#
# Module to capture images from USB cameras
#
#
# External dependencies
#
import threading
import cv2
#
# Thread to read the images from a USB camera
#
class UsbCamera( threading.Thread ) :
#
# Initialisation
#
def __init__( self, image_callback ) :
# Initialize the thread
super( UsbCamera, self ).__init__()
# Function called when an image is received
self.image_callback = image_callback
# Initialize the camera
self.camera = cv2.VideoCapture( 0 )
# Set camera resolution
self.camera.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
self.camera.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
# Set camera frame rate
self.camera.set( cv2.cv.CV_CAP_PROP_FPS, 25 )
#
# Thread main loop
#
def run( self ) :
# Thread running
self.running = True
while self.running :
# Capture image from the camera
_, image = self.camera.read()
# Send the image via the external callback function
self.image_callback( image )
# Release the camera
self.camera.release()
#
# Thread to read images from two USB cameras
#
class StereoUsbCamera( threading.Thread ) :
#
# Initialisation
#
def __init__( self, image_callback ) :
# Initialize the thread
super( StereoUsbCamera, self ).__init__()
# Function called when the images are received
self.image_callback = image_callback
# Initialize the cameras
self.camera_left = cv2.VideoCapture( 0 )
self.camera_right = cv2.VideoCapture( 1 )
# Lower the camera frame rate and resolution
self.camera_left.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
self.camera_left.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
self.camera_right.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
self.camera_right.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
self.camera_left.set( cv2.cv.CV_CAP_PROP_FPS, 5 )
self.camera_right.set( cv2.cv.CV_CAP_PROP_FPS, 5 )
#
# Thread main loop
#
def run( self ) :
# Thread running
self.running = True
while self.running :
# Capture images
self.camera_left.grab()
self.camera_right.grab()
# Get the images
_, image_left = self.camera_left.retrieve()
_, image_right = self.camera_right.retrieve()
# Send the image via the external callback function
self.image_callback( image_left, image_right )
# Release the cameras
self.camera_left.release()
self.camera_right.release()
| mit | Python |
cadfdadc14f0bf41456ef32a3e339e76b887eea4 | Fix missing self.shape | toslunar/chainerrl,toslunar/chainerrl | chainerrl/explorers/additive_ou.py | chainerrl/explorers/additive_ou.py | from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from logging import getLogger
from chainer import cuda
import numpy as np
from chainerrl import explorer
class AdditiveOU(explorer.Explorer):
"""Additive Ornstein-Uhlenbeck process.
Used in https://arxiv.org/abs/1509.02971 for exploration.
Args:
mu (float): Mean of the OU process
theta (float): Friction to pull towards the mean
sigma (float): Scale of noise
"""
def __init__(self, mu=0.0, theta=0.15, sigma=0.3,
dt=1.0, logger=getLogger(__name__)):
self.mu = mu
self.theta = theta
self.sigma = sigma
self.dt = dt
self.logger = logger
self.ou_state = None
def evolve(self):
# For a Wiener process, dW ~ N(0,u)
dW = np.random.normal(size=self.ou_state.shape, loc=0,
scale=np.sqrt(self.dt))
# dx = theta (mu - x) + sigma dW
self.ou_state += self.theta * \
(self.mu - self.ou_state) * self.dt + self.sigma * dW
def select_action(self, t, greedy_action_func):
a = greedy_action_func()
if self.ou_state is None:
self.ou_state = np.full(a.shape, self.mu, dtype=np.float32)
self.evolve()
noise = self.ou_state
self.logger.debug('t:%s noise:%s', t, noise)
if isinstance(a, cuda.cupy.ndarray):
noise = cuda.to_gpu(noise)
return a + noise
def __repr__(self):
return 'AdditiveOU(mu={}, theta={}, sigma={})'.format(
self.mu, self.theta, self.sigma)
| from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from logging import getLogger
from chainer import cuda
import numpy as np
from chainerrl import explorer
class AdditiveOU(explorer.Explorer):
"""Additive Ornstein-Uhlenbeck process.
Used in https://arxiv.org/abs/1509.02971 for exploration.
Args:
mu (float): Mean of the OU process
theta (float): Friction to pull towards the mean
sigma (float): Scale of noise
"""
def __init__(self, mu=0.0, theta=0.15, sigma=0.3,
dt=1.0, logger=getLogger(__name__)):
self.mu = mu
self.theta = theta
self.sigma = sigma
self.dt = dt
self.logger = logger
self.ou_state = None
def evolve(self):
# For a Wiener process, dW ~ N(0,u)
dW = np.random.normal(size=self.shape, loc=0, scale=np.sqrt(self.dt))
# dx = theta (mu - x) + sigma dW
self.ou_state += self.theta * \
(self.mu - self.ou_state) * self.dt + self.sigma * dW
def select_action(self, t, greedy_action_func):
a = greedy_action_func()
if self.ou_state is None:
self.ou_state = np.full(a.shape, self.mu, dtype=np.float32)
self.evolve()
noise = self.ou_state
self.logger.debug('t:%s noise:%s', t, noise)
if isinstance(a, cuda.cupy.ndarray):
noise = cuda.to_gpu(noise)
return a + noise
def __repr__(self):
return 'AdditiveOU(mu={}, theta={}, sigma={})'.format(
self.mu, self.theta, self.sigma)
| mit | Python |
7b581c76eab5ae50ec830b2b306d61a4775fa393 | improve comment | coreruleset/coreruleset,SpiderLabs/owasp-modsecurity-crs,SpiderLabs/owasp-modsecurity-crs,umarfarook882/owasp-modsecurity-crs,umarfarook882/owasp-modsecurity-crs,SpiderLabs/owasp-modsecurity-crs,SpiderLabs/owasp-modsecurity-crs,SpiderLabs/owasp-modsecurity-crs,coreruleset/coreruleset,coreruleset/coreruleset,coreruleset/coreruleset,umarfarook882/owasp-modsecurity-crs,coreruleset/coreruleset,umarfarook882/owasp-modsecurity-crs,coreruleset/coreruleset,SpiderLabs/owasp-modsecurity-crs,umarfarook882/owasp-modsecurity-crs,SpiderLabs/owasp-modsecurity-crs | util/regexp-assemble/regexp-cmdline.py | util/regexp-assemble/regexp-cmdline.py | #!/usr/bin/env python
#
# Convert a word list to a list of regexps usable by Regexp::Assemble.
#
# Examples:
# cat regexp-932100.txt | ./regexp-cmdline.py unix | ./regexp-assemble.pl
# cat regexp-932110.txt | ./regexp-cmdline.py windows | ./regexp-assemble.pl
# cat regexp-932150.txt | ./regexp-cmdline.py unix | ./regexp-assemble.pl
#
# Refer to rule 932100, 932110, 932150 for documentation.
#
import fileinput, string, sys
# Convert a single line to regexp format, and insert anti-cmdline
# evasions between characters.
def regexp_str(str, evasion):
# By convention, if the line starts with ' char, copy the rest
# verbatim.
if str[0] == "'":
return str[1:]
result = ''
for i, char in enumerate(str):
if i > 0:
result += evasion
result += regexp_char(char, evasion)
return result
# Ensure that some special characters are quoted
def regexp_char(char, evasion):
char = string.replace(char, ' ', '\s')
char = string.replace(char, '.', '\.')
char = string.replace(char, '-', '\-')
char = string.replace(char, '*', '.*')
return char
# Insert these sequences between characters to prevent evasion.
# This emulates the relevant parts of t:cmdLine.
evasions = {
'unix': r'''[\\\\'\"]*''',
'windows': r'''[\"\^]*''',
}
# Parse arguments
if len(sys.argv) <= 1 or not sys.argv[1] in evasions:
print sys.argv[0] + ' unix|windows [infile]'
sys.exit(1)
evasion = evasions[sys.argv[1]]
del sys.argv[1]
# Process lines from input file, or if not specified, standard input
for line in fileinput.input():
line = line.rstrip('\n')
line = line.split('#')[0]
if line != '':
print regexp_str(line, evasion)
| #!/usr/bin/env python
#
# Convert a word list to a list of regexps usable by Regexp::Assemble.
#
# Examples:
# cat regexp-932100.txt | ./regexp-cmdline.py unix | ./regexp-assemble.pl
# cat regexp-932110.txt | ./regexp-cmdline.py windows | ./regexp-assemble.pl
# cat regexp-932150.txt | ./regexp-cmdline.py unix | ./regexp-assemble.pl
#
# Refer to rule 932100, 932110, 932150 for documentation.
#
import fileinput, string, sys
# Convert a single line to regexp format, and pad it with evasions.
def regexp_str(str, evasion):
# By convention, if the line starts with '\', keep it untouched.
if str[0] == "'":
return str[1:]
result = ''
for i, char in enumerate(str):
if i > 0:
result += evasion
result += regexp_char(char, evasion)
return result
# Ensure that some special characters are quoted
def regexp_char(char, evasion):
char = string.replace(char, ' ', '\s')
char = string.replace(char, '.', '\.')
char = string.replace(char, '-', '\-')
char = string.replace(char, '*', '.*')
return char
# Insert these sequences between characters to prevent evasion.
# This emulates the relevant parts of t:cmdLine.
evasions = {
'unix': r'''[\\\\'\"]*''',
'windows': r'''[\"\^]*''',
}
# Parse arguments
if len(sys.argv) <= 1 or not sys.argv[1] in evasions:
print sys.argv[0] + ' unix|windows [infile]'
sys.exit(1)
evasion = evasions[sys.argv[1]]
del sys.argv[1]
# Process lines from input file, or if not specified, standard input
for line in fileinput.input():
line = line.rstrip('\n')
line = line.split('#')[0]
if line != '':
print regexp_str(line, evasion)
| apache-2.0 | Python |
cf399d6daa7df64516e7fe34dceda70c8d634d57 | add formatter to error log file handler. | iDigBio/idigbio-media-appliance,iDigBio/idigbio-media-appliance,iDigBio/idigbio-media-appliance,iDigBio/idigbio-media-appliance | idigbio_media_appliance/app.py | idigbio_media_appliance/app.py | from __future__ import absolute_import, print_function, division, unicode_literals
import os
from flask.ext.sqlalchemy import SQLAlchemy
from flask import Flask
from . import config
import logging
# from gevent import monkey
# monkey.patch_all()
logging.root.setLevel(logging.DEBUG)
FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(format=FORMAT)
file_handler = logging.FileHandler(config.USER_DATA + "/error.log")
file_handler.setLevel(logging.WARNING)
file_handler.setFormatter(FORMAT)
app = Flask(__name__)
app.logger.addHandler(file_handler)
app.config.from_object(config)
db = SQLAlchemy(app)
from .models import * # noqa
from .create_db import create_or_update_db # noqa
def init_routes():
from .api.media import media_api
from .api.appuser import appuser_api
from .api.services import service_api
from .views.index import index_view
app.register_blueprint(media_api.blueprint, url_prefix='/api')
app.register_blueprint(appuser_api.blueprint, url_prefix='/api')
app.register_blueprint(service_api, url_prefix='/api')
app.register_blueprint(index_view) | from __future__ import absolute_import, print_function, division, unicode_literals
import os
from flask.ext.sqlalchemy import SQLAlchemy
from flask import Flask
from . import config
import logging
# from gevent import monkey
# monkey.patch_all()
logging.root.setLevel(logging.DEBUG)
FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(format=FORMAT)
file_handler = logging.FileHandler(config.USER_DATA + "/error.log")
file_handler.setLevel(logging.WARNING)
app = Flask(__name__)
app.logger.addHandler(file_handler)
app.config.from_object(config)
db = SQLAlchemy(app)
from .models import * # noqa
from .create_db import create_or_update_db # noqa
def init_routes():
from .api.media import media_api
from .api.appuser import appuser_api
from .api.services import service_api
from .views.index import index_view
app.register_blueprint(media_api.blueprint, url_prefix='/api')
app.register_blueprint(appuser_api.blueprint, url_prefix='/api')
app.register_blueprint(service_api, url_prefix='/api')
app.register_blueprint(index_view) | mit | Python |
0e3006bee1e23deb2b4b1ea9410d6b61991534dc | fix import | ndawe/rootpy,kreczko/rootpy,ndawe/rootpy,rootpy/rootpy,kreczko/rootpy,rootpy/rootpy,ndawe/rootpy,kreczko/rootpy,rootpy/rootpy | benchmarks/tree/io/tree_read.py | benchmarks/tree/io/tree_read.py | #!/usr/bin/env python
# this import is required to register the Tree class
import rootpy.tree
from rootpy.io import open
from time import time
from ROOT import TTreeCache
import sys
for cached in (False, True):
try:
f = open("test.root")
except IOError:
sys.exit("test.root does not exist. Please run tree_write.py first.")
tree = f.test
if cached:
TTreeCache.SetLearnEntries(1)
tree.SetCacheSize(10000000)
tree.use_cache(cached)
start_time = time()
for event in tree:
event.x
end_time = time()
print "%.2fsec to read one branch" % (end_time - start_time)
start_time = time()
for event in tree:
event.x
event.y
end_time = time()
print "%.2fsec to read two branches" % (end_time - start_time)
start_time = time()
for event in tree:
event.x
event.y
event.z
end_time = time()
print "%.2fsec to read three branches" % (end_time - start_time)
print "Reading %i bytes in %i transactions" % (f.GetBytesRead(), f.GetReadCalls())
f.close()
| #!/usr/bin/env python
from rootpy.io import open
from time import time
from ROOT import TTreeCache
import sys
for cached in (False, True):
try:
f = open("test.root")
except IOError:
sys.exit("test.root does not exist. Please run tree_write.py first.")
tree = f.test
if cached:
TTreeCache.SetLearnEntries(1)
tree.SetCacheSize(10000000)
tree.use_cache(cached)
start_time = time()
for event in tree:
event.x
end_time = time()
print "%.2fsec to read one branch" % (end_time - start_time)
start_time = time()
for event in tree:
event.x
event.y
end_time = time()
print "%.2fsec to read two branches" % (end_time - start_time)
start_time = time()
for event in tree:
event.x
event.y
event.z
end_time = time()
print "%.2fsec to read three branches" % (end_time - start_time)
print "Reading %i bytes in %i transactions" % (f.GetBytesRead(), f.GetReadCalls())
f.close()
| bsd-3-clause | Python |
b7a082752c4012ff66ed6baf76cb467960c456be | add herokuapp domain to allowed hosts in production | akurihara/impulse,akurihara/impulse | impulse/settings/production.py | impulse/settings/production.py | from .base import *
import dj_database_url
DEBUG = False
ALLOWED_HOSTS = ['.herokuapp.com']
db_from_envronment = dj_database_url.config()
DATABASES = {
'default': db_from_envronment
}
| from .base import *
import dj_database_url
DEBUG = False
db_from_envronment = dj_database_url.config()
DATABASES = {
'default': db_from_envronment
}
| mit | Python |
2f9e92c884984bb816c7996db12d84023810b073 | Disable syntax for large log files | drivnal/drivnal,drivnal/drivnal,drivnal/drivnal | drivnal/handlers/log.py | drivnal/handlers/log.py | from drivnal.constants import *
from drivnal.client import Client
import drivnal.utils as utils
from drivnal import server
@server.app.route('/log/<type>/<volume_id>/<type_id>', methods=['GET'])
def log_get(type, volume_id, type_id):
client = Client()
volume = client.get_volume(volume_id)
data = None
if not volume:
return utils.jsonify({
'error': VOLUME_NOT_FOUND,
'error_msg': VOLUME_NOT_FOUND_MSG,
}, 404)
if type == 'snapshot':
snapshot_id = type_id
snapshot = volume.get_snapshot(int(snapshot_id))
if not snapshot:
return utils.jsonify({
'error': SNAPSHOT_NOT_FOUND,
'error_msg': SNAPSHOT_NOT_FOUND_MSG,
}, 404)
# Log files > 1mb load slow with syntax highlighter
if snapshot.log_size() > 1048576:
syntax = ''
else:
syntax = 'shell'
data = {
'id': snapshot.id,
'syntax': syntax,
'data': snapshot.log_read(),
}
elif type == 'task':
task_id = type_id
# TODO
if not data:
return utils.jsonify({
'error': FILE_NOT_FOUND,
'error_msg': FILE_NOT_FOUND_MSG,
}, 404)
return utils.jsonify(data)
| from drivnal.constants import *
from drivnal.client import Client
import drivnal.utils as utils
from drivnal import server
@server.app.route('/log/<type>/<volume_id>/<type_id>', methods=['GET'])
def log_get(type, volume_id, type_id):
client = Client()
volume = client.get_volume(volume_id)
data = None
if not volume:
return utils.jsonify({
'error': VOLUME_NOT_FOUND,
'error_msg': VOLUME_NOT_FOUND_MSG,
}, 404)
if type == 'snapshot':
snapshot_id = type_id
snapshot = volume.get_snapshot(int(snapshot_id))
if not snapshot:
return utils.jsonify({
'error': SNAPSHOT_NOT_FOUND,
'error_msg': SNAPSHOT_NOT_FOUND_MSG,
}, 404)
data = {
'id': snapshot.id,
'syntax': 'shell',
'data': snapshot.read_log(),
}
elif type == 'task':
task_id = type_id
# TODO
if not data:
return utils.jsonify({
'error': FILE_NOT_FOUND,
'error_msg': FILE_NOT_FOUND_MSG,
}, 404)
return utils.jsonify(data)
| agpl-3.0 | Python |
f30c36fbc644b42af8280885d04bb71c0829bc66 | Bump version. | vmlaker/coils | coils/version.py | coils/version.py | __version__ = '2.0.1'
| __version__ = '2.0.0'
| mit | Python |
f49cc86901f4670f28abfbd6c6c0a7ac88f218f1 | support list in runlist | Fewbytes/cloudify-plugin-chef,Fewbytes/cloudify-plugin-chef | cloudify_plugin_chef/operations.py | cloudify_plugin_chef/operations.py | from cloudify.decorators import operation as _operation
from cloudify_plugin_chef.chef_client import run_chef
EXPECTED_OP_PREFIX = 'cloudify.interfaces.lifecycle'
def _extract_op(ctx):
prefix, _, op = ctx.operation.rpartition('.')
if prefix != EXPECTED_OP_PREFIX:
ctx.warn("Node operation is expected to start with '{0}' "
"but starts with '{1}'".format(EXPECTED_OP_PREFIX, prefx))
if op not in ctx.properties['chef_config']['runlists']:
raise ValueError("chef_config.runlists does not have an entry for operation '{0}', "
"only {1}".format(op, ','.join(ctx.properties['chef_config']['runlists'].keys())))
return op
# Remember: attributes
@_operation
def operation(ctx, **kwargs):
if 'runlist' in ctx.properties['chef_config']:
ctx.logger.info("Using explicitly provided Chef runlist")
runlist = ctx.properties['chef_config']['runlist']
else:
op = _extract_op(ctx)
ctx.logger.info("Using Chef runlist for operation {0}".format(op))
runlist = ctx.properties['chef_config']['runlists'][op]
if isinstance(runlist, list):
runlist = ','.join(runlist)
ctx.logger.info("Chef runlist: {0}".format(runlist))
run_chef(ctx, runlist)
| from cloudify.decorators import operation as _operation
from cloudify_plugin_chef.chef_client import run_chef
EXPECTED_OP_PREFIX = 'cloudify.interfaces.lifecycle'
def _extract_op(ctx):
prefix, _, op = ctx.operation.rpartition('.')
if prefix != EXPECTED_OP_PREFIX:
ctx.warn("Node operation is expected to start with '{0}' "
"but starts with '{1}'".format(EXPECTED_OP_PREFIX, prefx))
if op not in ctx.properties['chef_config']['runlists']:
raise ValueError("chef_config.runlists does not have an entry for operation '{0}', "
"only {1}".format(op, ','.join(ctx.properties['chef_config']['runlists'].keys())))
return op
# Remember: attributes
@_operation
def operation(ctx, **kwargs):
if 'runlist' in ctx.properties['chef_config']:
runlist = ctx.properties['chef_config']['runlist']
else:
op = _extract_op(ctx)
ctx.logger.info("Using Chef runlist for operation {0}".format(op))
runlist = ctx.properties['chef_config']['runlists'][op]
ctx.logger.info("Chef runlist: {0}".format(runlist))
run_chef(ctx, runlist)
| apache-2.0 | Python |
b8ca9de33bbb9aab87f4f620143e858154264460 | Define a default file for the input data file | ric2b/Mutual_Debt_Simplification | mutual_debt/main.py | mutual_debt/main.py | """Mutual Debt Simplification
Usage:
simplify-debts [ <data_file> ]
simplify-debts (-h | --help)
Options:
-h --help Show this screen.
"""
import json
import sys
from mutual_debt.simplification import debt_list_to_graph, \
simplify_debt_graph, draw_graph
def print_error(*args, sep=' ', end='\n'):
""" Prints values to the stderr stream """
print("ERROR:", *args, sep, end, file=sys.stderr)
DEFAULT_DATA_FILE = 'debts.json'
def main():
if len(sys.argv) > 2:
print(__doc__)
sys.exit(1)
if len(sys.argv) == 1:
print("INFO: using default data file `%s`" % DEFAULT_DATA_FILE)
data_file = DEFAULT_DATA_FILE
else:
if sys.argv[1] == '-h' or sys.argv[1] == '--help':
print(__doc__)
return
else:
data_file = sys.argv[1]
# Try to load debts from data file
# On failure: show error and exit using error code
try:
with open(data_file) as file:
debts = json.load(file)
except IOError as error:
print_error("failed to read data file:", str(error))
sys.exit(1)
initial_debt_graph = debt_list_to_graph(debts['debt_list'], debts['names'])
draw_graph(initial_debt_graph, 'Initial_Mutual_Debt', open_file=False)
simplified_debt_graph = simplify_debt_graph(initial_debt_graph)
draw_graph(simplified_debt_graph, 'Simplified_Mutual_Debt')
if __name__ == '__main__':
main()
| """Mutual Debt Simplification
Usage:
simplify-debts <data_file>
simplify-debts (-h | --help)
Options:
-h --help Show this screen.
"""
import json
import sys
from mutual_debt.simplification import debt_list_to_graph, \
simplify_debt_graph, draw_graph
def print_error(*args, sep=' ', end='\n'):
""" Prints values to the stderr stream """
print("ERROR:", *args, sep, end, file=sys.stderr)
def main():
if len(sys.argv) != 2:
print(__doc__)
sys.exit(1)
if sys.argv[1] == '-h' or sys.argv[1] == '--help':
print(__doc__)
return
data_file = sys.argv[1]
# Try to load debts from data file
# On failure: show error and exit using error code
try:
with open(data_file) as file:
debts = json.load(file)
except IOError as error:
print_error("failed to read data file:", str(error))
sys.exit(1)
initial_debt_graph = debt_list_to_graph(debts['debt_list'], debts['names'])
draw_graph(initial_debt_graph, 'Initial_Mutual_Debt', open_file=False)
simplified_debt_graph = simplify_debt_graph(initial_debt_graph)
draw_graph(simplified_debt_graph, 'Simplified_Mutual_Debt')
if __name__ == '__main__':
main()
| mit | Python |
499e4cd6ef85269881622e27417e7ff5caa62464 | change test data | soazig/project-epsilon-1,timothy1191xa/project-epsilon-1,ye-zhi/project-epsilon,berkeley-stat159/project-epsilon | code/utils/tests/test_smoothing.py | code/utils/tests/test_smoothing.py | """ Tests for smoothvoxels in smooth module
Run at the tests directory with:
nosetests test_smoothing.py
"""
import os
import sys
import numpy as np
import itertools
import scipy.ndimage
from scipy.ndimage.filters import gaussian_filter
import matplotlib.pyplot as plt
import nibabel as nib
from numpy.testing import assert_almost_equal
from nose.tools import assert_not_equals
project_path = '../../../'
# Add path to functions to the system path.
sys.path.append(os.path.join(os.path.dirname(__file__), "../functions/"))
# Load smoothing function.
from smoothing import smoothing
def test_smooth():
# Read in the image data.
img = nib.load(project_path +'data/ds114/sub009/BOLD/task002_run001/ds114_sub009_t2r1.nii')
data = img.get_data()
# Run the smoothing function with sigma 0 at time 12
non_smoothed_data = smoothing(data, 0, 12)
# assert that data at time 12 and non_smoothed_data are equal since sigma = 0
assert_almost_equal(data[..., 12], non_smoothed_data)
# Run the smoothvoxels function with sigma 1 at time 100
smoothed_data = smoothing(data, 1, 100)
# assert that data at time 16 and smoothed_data are not equal
assert_not_equals(data[..., 100].all(), smoothed_data.all())
| """ Tests for smoothvoxels in smooth module
Run at the tests directory with:
nosetests test_smoothing.py
"""
import os
import sys
import numpy as np
import itertools
import scipy.ndimage
from scipy.ndimage.filters import gaussian_filter
import matplotlib.pyplot as plt
import nibabel as nib
from numpy.testing import assert_almost_equal
from nose.tools import assert_not_equals
project_path = '../../../'
# Add path to functions to the system path.
sys.path.append(os.path.join(os.path.dirname(__file__), "../functions/"))
# Load smoothing function.
from smoothing import smoothing
def test_smooth():
# Read in the image data.
img = nib.load(project_path +'data/ds005/sub011/BOLD/task001_run003/bold.nii')
data = img.get_data()
# Run the smoothing function with sigma 0 at time 12
non_smoothed_data = smoothing(data, 0, 12)
# assert that data at time 12 and non_smoothed_data are equal since sigma = 0
assert_almost_equal(data[..., 12], non_smoothed_data)
# Run the smoothvoxels function with sigma 1 at time 100
smoothed_data = smoothing(data, 1, 100)
# assert that data at time 16 and smoothed_data are not equal
assert_not_equals(data[..., 100].all(), smoothed_data.all())
| bsd-3-clause | Python |
d6b1f7c03ec2b32823fe2c4214e6521e8074cd9f | Make sure the 'channel' argument is not Unicode when we send it, because Twisted doesn't like that | Didero/DideRobot | commands/join.py | commands/join.py | from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['join']
helptext = "Makes me join another channel, if I'm allowed to at least"
def execute(self, message):
"""
:type message: IrcMessage
"""
replytext = u""
if message.messagePartsLength < 1:
replytext = u"Please provide a channel for me to join"
else:
allowedChannels = message.bot.factory.settings.get('connection', 'allowedChannels').split(',')
channel = message.messageParts[0].encode('utf8') #Make sure it's a str and not unicode, otherwise Twisted chokes on it
if channel.startswith('#'):
channel = channel[1:]
if channel not in allowedChannels and not message.bot.factory.isUserAdmin(message.user):
replytext = u"I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission"
else:
channel = '#' + channel
replytext = u"All right, I'll go to {}. See you there!".format(channel)
message.bot.join(channel)
message.bot.say(message.source, replytext) | from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['join']
helptext = "Makes me join another channel, if I'm allowed to at least"
def execute(self, message):
"""
:type message: IrcMessage
"""
replytext = u""
if message.messagePartsLength < 1:
replytext = u"Please provide a channel for me to join"
else:
allowedChannels = message.bot.factory.settings.get('connection', 'allowedChannels').split(',')
channel = message.messageParts[0]
if channel.startswith('#'):
channel = channel[1:]
if channel not in allowedChannels and not message.bot.factory.isUserAdmin(message.user):
replytext = u"I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission"
else:
channel = '#' + channel
replytext = u"All right, I'll go to {}. See you there!".format(channel)
message.bot.join(channel)
message.bot.say(message.source, replytext) | mit | Python |
6b04b44a2d54aa557ae60caff443f1cb0e248cdf | Update __init__.py | isponline/netmiko,nvoron23/netmiko,fooelisa/netmiko,jinesh-patel/netmiko,ktbyers/netmiko,rumo/netmiko,mzbenami/netmiko,enzzzy/netmiko,isponline/netmiko,ivandgreat/netmiko,isidroamv/netmiko,shamanu4/netmiko,brutus333/netmiko,ivandgreat/netmiko,isidroamv/netmiko,mileswdavis/netmiko,MikeOfNoTrades/netmiko,jumpojoy/netmiko,enzzzy/netmiko,mileswdavis/netmiko,shsingh/netmiko,mzbenami/netmiko,fooelisa/netmiko,shsingh/netmiko,brutus333/netmiko,rumo/netmiko,MikeOfNoTrades/netmiko,jumpojoy/netmiko,jinesh-patel/netmiko,rdezavalia/netmiko,rdezavalia/netmiko,nitzmahone/netmiko,nitzmahone/netmiko,ktbyers/netmiko,shamanu4/netmiko | netmiko/__init__.py | netmiko/__init__.py | from ssh_dispatcher import ssh_dispatcher
from cisco import CiscoIosSSH
from cisco import CiscoAsaSSH
from arista import AristaSSH
from f5 import F5LtmSSH
__version__ = '0.1.2'
| from ssh_dispatcher import ssh_dispatcher
from cisco import CiscoIosSSH
from cisco import CiscoAsaSSH
from arista import AristaSSH
__version__ = '0.1.2'
| mit | Python |
263180cdc550d0114bd2b981d0950f807d314f17 | add import | mqingyn/peewee-manager | peeweemgr/__init__.py | peeweemgr/__init__.py | __author__ = 'mqingyn'
__version__ = '1.0.3'
version = tuple(map(int, __version__.split('.')))
| __author__ = 'mqingyn'
__version__ = '1.0.2'
version = tuple(map(int, __version__.split('.')))
| mit | Python |
8060fec64a8e73c6745a904f4f43f953f94e4f0d | Update Ch. 17 PracticeQuestions: imported specific function | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/CrackingCodesWithPython/Chapter17/PracticeQuestions.py | books/CrackingCodesWithPython/Chapter17/PracticeQuestions.py | # Chapter 17 Practice Questions
# 1. What is the word pattern for the word hello?
from books.CrackingCodesWithPython.Chapter17.makeWordPatterns import getWordPattern
wordPat = getWordPattern('hello')
print(wordPat)
# 2. Do mammoth and goggles have the same word pattern?
wordPat1 = getWordPattern('mammoth')
wordPat2 = getWordPattern('goggles')
if wordPat1 == wordPat2:
print("Yes: " + wordPat1)
else:
print("No: " + wordPat1 + " and " + wordPat2)
# 3. Which word could be the possible plaintext word for the cipherword
# PYYACAO? Alleged, efficiently, or poodle?
wordPat = []
words = ["PYYACAO", "Alleged", "efficiently", "poodle"]
for word in words:
wordPat.append(getWordPattern(word))
for index in range(1, len(wordPat)):
if wordPat[0] == wordPat[index]:
print("It's gotta be %s!" % words[index])
break
elif index == len(wordPat) - 1:
print("Match not found (-_-)")
| # Chapter 17 Practice Questions
# 1. What is the word pattern for the word hello?
import books.CrackingCodesWithPython.Chapter17.makeWordPatterns
wordPat = books.CrackingCodesWithPython.Chapter17.makeWordPatterns.getWordPattern('hello')
print(wordPat)
# 2. Do mammoth and goggles have the same word pattern?
wordPat1 = books.CrackingCodesWithPython.Chapter17.makeWordPatterns.getWordPattern('mammoth')
wordPat2 = books.CrackingCodesWithPython.Chapter17.makeWordPatterns.getWordPattern('goggles')
if wordPat1 == wordPat2:
print("Yes: " + wordPat1)
else:
print("No: " + wordPat1 + " and " + wordPat2)
# 3. Which word could be the possible plaintext word for the cipherword
# PYYACAO? Alleged, efficiently, or poodle?
wordPat = []
words = ["PYYACAO", "Alleged", "efficiently", "poodle"]
for word in words:
wordPat.append(books.CrackingCodesWithPython.Chapter17.makeWordPatterns.getWordPattern(word))
for index in range(1, len(wordPat)):
if wordPat[0] == wordPat[index]:
print("It's gotta be %s!" % words[index])
break
elif index == len(wordPat) - 1:
print("Match not found (-_-)")
| mit | Python |
eaea688fb9e291c83d45b08c1a34591cdca21312 | Update Ch. 18 PracticeQuestions: changed Question 2 solution | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/CrackingCodesWithPython/Chapter18/PracticeQuestions.py | books/CrackingCodesWithPython/Chapter18/PracticeQuestions.py | # Chapter 18 Practice Questions
# 1. Which cipher is the Vigenere cipher similar to, except that the Vigenere
# cipher uses multiple keys instead of just one key?
# Hint: Check page 248
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
message = "Tuw Rmxeawni Ticzav zs faimcae lk xye Psawrr pallvr." # Encrypted with ANSWER
#print(decryptMessage(blank, blank)) # Fill in the blanks
# 2. How many possible keys are there for a Vigenere key with a key length
# of 10?
# Hint: Check page 250
#
# a. Hundreds
# b. Thousands
# c. Millions
# d. More than a trillion
from math import pow # Don't do this - imports should be at the top of the file
solution = pow(26, 10)
answer = "Undefined"
if solution < 999:
answer = "a. Hundreds"
elif 999 < solution < 9999:
answer = "b. Thousands"
elif 999999 < solution < 999999999:
answer = "c. Millions"
elif solution > 1000000000:
answer = "d. More than a trillion"
else:
print("404: Answer not found (;_;)")
#print("%s: %s" % (answer, solution))
# 3. What kind of cipher is the Vigenere cipher?
# Hint: Check page 248
message = "Mft Zbetrxpt Gbnwik gh e imactjeltztxba hyuqimmsimhl rmiftv." # Encrypted with TYPE
#print(decryptMessage(blank, blank)) # Fill in the blanks
| # Chapter 18 Practice Questions
# 1. Which cipher is the Vigenere cipher similar to, except that the Vigenere
# cipher uses multiple keys instead of just one key?
# Hint: Check page 248
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
message = "Tuw Rmxeawni Ticzav zs faimcae lk xye Psawrr pallvr." # Encrypted with ANSWER
#print(decryptMessage(blank, blank)) # Fill in the blanks
# 2. How many possible keys are there for a Vigenere key with a key length
# of 10?
# Hint: Check page 250
#
# a. Hundreds
# b. Thousands
# c. Millions
# d. More than a trillion
from math import pow # Don't do this - imports should be at the top of the file
print(pow(26, 10))
# 3. What kind of cipher is the Vigenere cipher?
# Hint: Check page 248
message = "Mft Zbetrxpt Gbnwik gh e imactjeltztxba hyuqimmsimhl rmiftv." # Encrypted with TYPE
#print(decryptMessage(blank, blank)) # Fill in the blanks
| mit | Python |
fb44db3b03274efff025cb19c312d2230085033d | Update representation of exception | infoxchange/ixprofile-client,infoxchange/ixprofile-client | ixprofile_client/exceptions.py | ixprofile_client/exceptions.py | """
Exceptions raised when interacting with the profile server
"""
class ProfileServerException(Exception):
"""
Base exception for all profile server errors
"""
def __init__(self, response=None):
super(ProfileServerFailure, self).__init__()
self.response = response
try:
self.json = self.response.json()
except (AttributeError, ValueError):
pass
def __str__(self):
"""
String representation of the exception
"""
return self.__unicode__()
def __unicode__(self):
"""
Unicode representation of the exception
"""
try:
return "Profile server failure: %d %s." % (
self.response.status_code, self.response.reason)
except (AttributeError, KeyError):
return "Profile server failure: %s." % self.response
class EmailNotUnique(ProfileServerException):
"""
An email used in an interaction with the profile server is not unique
"""
def __init__(self, response, email):
super(EmailNotUnique, self).__init__(response)
self.email = email
def __str__(self):
"""
String representation of the exception
"""
return self.__unicode__()
def __unicode__(self):
"""
Unicode representation of the exception
"""
return ("Email %s is not unique on the profile server. "
"Consider installing the "
"PrintEmailNotUniqueMessage Middleware." % self.email
)
def __repr__(self):
"""
Unique representation of the exception
"""
return "EmailNotUnique('%s')" % self.email
ProfileServerFailure = ProfileServerException
| """
Exceptions raised when interacting with the profile server
"""
class ProfileServerException(Exception):
"""
Base exception for all profile server errors
"""
def __init__(self, response=None):
super(ProfileServerFailure, self).__init__()
self.response = response
try:
self.json = self.response.json()
except (AttributeError, ValueError):
pass
def __str__(self):
"""
String representation of the exception
"""
return self.__unicode__()
def __unicode__(self):
"""
Unicode representation of the exception
"""
return "Profile server failure: %d %s." % (
self.response.status_code, self.response.reason)
class EmailNotUnique(ProfileServerException):
"""
An email used in an interaction with the profile server is not unique
"""
def __init__(self, response, email):
super(EmailNotUnique, self).__init__(response)
self.email = email
def __str__(self):
"""
String representation of the exception
"""
return self.__unicode__()
def __unicode__(self):
"""
Unicode representation of the exception
"""
return ("Email %s is not unique on the profile server. "
"Consider installing the "
"PrintEmailNotUniqueMessage Middleware." % self.email
)
def __repr__(self):
"""
Unique representation of the exception
"""
return "EmailNotUnique('%s')" % self.email
ProfileServerFailure = ProfileServerException
| mit | Python |
0144b24a2482c3c23691a474e647d8be79641e12 | Update test case to new return format. | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/integration/client/standard.py | tests/integration/client/standard.py | # -*- coding: utf-8 -*-
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class StdTest(integration.ModuleCase):
'''
Test standard client calls
'''
def test_cli(self):
'''
Test cli function
'''
cmd_iter = self.client.cmd_cli(
'minion',
'test.ping',
)
for ret in cmd_iter:
self.assertTrue(ret['minion'])
def test_iter(self):
'''
test cmd_iter
'''
cmd_iter = self.client.cmd_iter(
'minion',
'test.ping',
)
for ret in cmd_iter:
self.assertTrue(ret['minion'])
def test_iter_no_block(self):
'''
test cmd_iter_no_block
'''
cmd_iter = self.client.cmd_iter_no_block(
'minion',
'test.ping',
)
for ret in cmd_iter:
if ret is None:
continue
self.assertTrue(ret['minion'])
def test_full_returns(self):
'''
test cmd_iter
'''
ret = self.client.cmd_full_return(
'minion',
'test.ping',
)
self.assertIn('minion', ret)
self.assertEqual({'ret': True, 'success': True}, ret['minion'])
ret = self.client.cmd_full_return(
'minion',
'test.pong',
)
self.assertIn('minion', ret)
self.assertEqual(
{'out': 'nested', 'ret': '\'test.pong\' is not available.', 'success': False},
ret['minion']
)
if __name__ == '__main__':
from integration import run_tests
run_tests(StdTest)
| # -*- coding: utf-8 -*-
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class StdTest(integration.ModuleCase):
'''
Test standard client calls
'''
def test_cli(self):
'''
Test cli function
'''
cmd_iter = self.client.cmd_cli(
'minion',
'test.ping',
)
for ret in cmd_iter:
self.assertTrue(ret['minion'])
def test_iter(self):
'''
test cmd_iter
'''
cmd_iter = self.client.cmd_iter(
'minion',
'test.ping',
)
for ret in cmd_iter:
self.assertTrue(ret['minion'])
def test_iter_no_block(self):
'''
test cmd_iter_no_block
'''
cmd_iter = self.client.cmd_iter_no_block(
'minion',
'test.ping',
)
for ret in cmd_iter:
if ret is None:
continue
self.assertTrue(ret['minion'])
def test_full_returns(self):
'''
test cmd_iter
'''
ret = self.client.cmd_full_return(
'minion',
'test.ping',
)
self.assertIn('minion', ret)
self.assertEqual(ret['minion'], {'ret': True, 'success': True})
ret = self.client.cmd_full_return(
'minion',
'test.pong',
)
self.assertIn('minion', ret)
self.assertEqual(
ret['minion'],
{'ret': '\'test.pong\' is not available.', 'success': False}
)
if __name__ == '__main__':
from integration import run_tests
run_tests(StdTest)
| apache-2.0 | Python |
fb7117f2acdb54560048d319235b5f3d35ae989b | Use salt.utils.versions.LooseVersion instead of distutils | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/integration/states/test_npm.py | tests/integration/states/test_npm.py | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Erik Johnson (erik@saltstack.com)`
tests.integration.states.npm
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest, requires_network
from tests.support.mixins import SaltReturnAssertsMixin
# Import salt libs
import salt.utils
import salt.modules.cmdmod as cmd
from salt.utils.versions import LooseVersion
MAX_NPM_VERSION = '5.0.0'
@skipIf(salt.utils.which('npm') is None, 'npm not installed')
class NpmStateTest(ModuleCase, SaltReturnAssertsMixin):
@requires_network()
@destructiveTest
def test_npm_installed_removed(self):
'''
Basic test to determine if NPM module was successfully installed and
removed.
'''
ret = self.run_state('npm.installed', name='pm2')
self.assertSaltTrueReturn(ret)
ret = self.run_state('npm.removed', name='pm2')
self.assertSaltTrueReturn(ret)
@requires_network()
@destructiveTest
def test_npm_install_url_referenced_package(self):
'''
Determine if URL-referenced NPM module can be successfully installed.
'''
ret = self.run_state('npm.installed', name='git://github.com/request/request')
self.assertSaltTrueReturn(ret)
ret = self.run_state('npm.removed', name='git://github.com/request/request')
self.assertSaltTrueReturn(ret)
@requires_network()
@destructiveTest
def test_npm_installed_pkgs(self):
'''
Basic test to determine if NPM module successfully installs multiple
packages.
'''
ret = self.run_state('npm.installed', name=None, pkgs=['pm2', 'grunt'])
self.assertSaltTrueReturn(ret)
@skipIf(salt.utils.which('npm') and LooseVersion(cmd.run('npm -v')) >= LooseVersion(MAX_NPM_VERSION),
'Skip with npm >= 5.0.0 until #41770 is fixed')
@destructiveTest
def test_npm_cache_clean(self):
'''
Basic test to determine if NPM successfully cleans its cached packages.
'''
ret = self.run_state('npm.cache_cleaned', name=None, force=True)
self.assertSaltTrueReturn(ret)
| # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Erik Johnson (erik@saltstack.com)`
tests.integration.states.npm
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'''
# Import Python libs
from __future__ import absolute_import
from distutils.version import LooseVersion
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest, requires_network
from tests.support.mixins import SaltReturnAssertsMixin
# Import salt libs
import salt.utils
import salt.modules.cmdmod as cmd
MAX_NPM_VERSION = '5.0.0'
@skipIf(salt.utils.which('npm') is None, 'npm not installed')
class NpmStateTest(ModuleCase, SaltReturnAssertsMixin):
@requires_network()
@destructiveTest
def test_npm_installed_removed(self):
'''
Basic test to determine if NPM module was successfully installed and
removed.
'''
ret = self.run_state('npm.installed', name='pm2')
self.assertSaltTrueReturn(ret)
ret = self.run_state('npm.removed', name='pm2')
self.assertSaltTrueReturn(ret)
@requires_network()
@destructiveTest
def test_npm_install_url_referenced_package(self):
'''
Determine if URL-referenced NPM module can be successfully installed.
'''
ret = self.run_state('npm.installed', name='git://github.com/request/request')
self.assertSaltTrueReturn(ret)
ret = self.run_state('npm.removed', name='git://github.com/request/request')
self.assertSaltTrueReturn(ret)
@requires_network()
@destructiveTest
def test_npm_installed_pkgs(self):
'''
Basic test to determine if NPM module successfully installs multiple
packages.
'''
ret = self.run_state('npm.installed', name=None, pkgs=['pm2', 'grunt'])
self.assertSaltTrueReturn(ret)
@skipIf(salt.utils.which('npm') and LooseVersion(cmd.run('npm -v')) >= LooseVersion(MAX_NPM_VERSION),
'Skip with npm >= 5.0.0 until #41770 is fixed')
@destructiveTest
def test_npm_cache_clean(self):
'''
Basic test to determine if NPM successfully cleans its cached packages.
'''
ret = self.run_state('npm.cache_cleaned', name=None, force=True)
self.assertSaltTrueReturn(ret)
| apache-2.0 | Python |
d69aaff42cd707c9930cac37a329c898523b9cbf | fix a bug where the first line is ignored | MirkoDziadzka/ofxstatement-1822direkt | src/ofxstatement/plugins/germany_1822direkt.py | src/ofxstatement/plugins/germany_1822direkt.py | from ofxstatement.plugin import Plugin
from ofxstatement.parser import CsvStatementParser
from ofxstatement.statement import StatementLine
class FrankfurterSparkasse1822Plugin(Plugin):
def get_parser(self, filename):
encoding = self.settings.get('charset', 'iso-8859-1')
f = open(filename, 'r', encoding=encoding)
parser = FrankfurterSparkasse1822Parser(f)
parser.statement.account_id = self.settings['account']
parser.statement.bank_id = self.settings.get('bank', '50050201')
parser.statement.currency = self.settings.get('currency', 'EUR')
return parser
class FrankfurterSparkasse1822Parser(CsvStatementParser):
"""
This plugin tries to parse the provided CSV data into the same
format, as the discontinued OFX export
"""
date_format = "%d.%m.%Y"
def parse_float(self, f):
# convert a number in german localization (e.g. 1.234,56) into a float
return float(f.replace('.','').replace(',','.'))
def parse_record(self, line):
# FIXME: add header validation
#print(self.cur_record, line)
if self.cur_record < 2:
return None
if len(line) < 3:
"""e.g.: ['# 1 vorgemerkte Umsätze nicht angezeigt']"""
return None
if not line[2]:
return None
sl = StatementLine()
sl.id = line[1]
sl.date = self.parse_datetime(line[2])
sl.amount = self.parse_float(line[4])
sl.trntype = 'DEBIT' if sl.amount < 0 else 'CREDIT'
sl.payee = line[7]
sl.memo = "(%s/%s): %s" % (line[8],line[9], " ".join(line[15:]).strip())
return sl
| from ofxstatement.plugin import Plugin
from ofxstatement.parser import CsvStatementParser
from ofxstatement.statement import StatementLine
class FrankfurterSparkasse1822Plugin(Plugin):
def get_parser(self, filename):
encoding = self.settings.get('charset', 'iso-8859-1')
f = open(filename, 'r', encoding=encoding)
parser = FrankfurterSparkasse1822Parser(f)
parser.statement.account_id = self.settings['account']
parser.statement.bank_id = self.settings.get('bank', '50050201')
parser.statement.currency = self.settings.get('currency', 'EUR')
return parser
class FrankfurterSparkasse1822Parser(CsvStatementParser):
"""
This plugin tries to parse the provided CSV data into the same
format, as the discontinued OFX export
"""
date_format = "%d.%m.%Y"
def parse_float(self, f):
# convert a number in german localization (e.g. 1.234,56) into a float
return float(f.replace('.','').replace(',','.'))
def parse_record(self, line):
# FIXME: add header validation
if self.cur_record <= 2:
return None
if len(line) < 3:
"""e.g.: ['# 1 vorgemerkte Umsätze nicht angezeigt']"""
return None
if not line[2]:
return None
sl = StatementLine()
sl.id = line[1]
sl.date = self.parse_datetime(line[2])
sl.amount = self.parse_float(line[4])
sl.trntype = 'DEBIT' if sl.amount < 0 else 'CREDIT'
sl.payee = line[7]
sl.memo = "(%s/%s): %s" % (line[8],line[9], " ".join(line[15:]).strip())
return sl
| bsd-2-clause | Python |
703cc73fca458898151632739128d65edd85da2e | Change Regular expression to match Game Bots | Onapsis/sandboxed-game-engine | turnboxed/basebot.py | turnboxed/basebot.py | import os
import json
import re
import traceback
class BaseBot(object):
def __init__(self):
self._turn_cookie = None
def log_exception(self, excpt):
os.write(123456789, json.dumps({"TURN_COOKIE": self._turn_cookie,
"EXCEPTION": excpt.__class__.__name__ + " : " + str(excpt),
"TRACEBACK": traceback.format_exc()}))
def on_turn(self, msg):
raise NotImplementedError
def _get_turns(self):
self._turn_cookie = None
# Now wait for the turn
msg = json.loads(os.read(123456789, 1024))
if msg['MSG'] == "QUIT":
return
else:
self._turn_cookie = msg['TURN_COOKIE']
try:
try:
feedback = msg["DATA"]
except:
feedback = None
turn_response = self.on_turn(feedback)
os.write(123456789, json.dumps({"TURN_COOKIE": self._turn_cookie,
"MSG": turn_response}))
except Exception, e:
self.log_exception(e)
self._get_turns()
if __name__ == "__main__":
import script
from script import *
with open(script.__file__, 'r') as f:
script_content = f.read()
cs = re.findall('class\ (.*?)\(GameBot', script_content)
if len(cs) > 0:
klass = globals()[cs[-1]]
bot_instance = klass()
bot_instance._get_turns()
else:
raise Exception("No valid bot found")
| import os
import json
import re
import traceback
class BaseBot(object):
def __init__(self):
self._turn_cookie = None
def log_exception(self, excpt):
os.write(123456789, json.dumps({"TURN_COOKIE": self._turn_cookie,
"EXCEPTION": excpt.__class__.__name__ + " : " + str(excpt),
"TRACEBACK": traceback.format_exc()}))
def on_turn(self, msg):
raise NotImplementedError
def _get_turns(self):
self._turn_cookie = None
# Now wait for the turn
msg = json.loads(os.read(123456789, 1024))
if msg['MSG'] == "QUIT":
return
else:
self._turn_cookie = msg['TURN_COOKIE']
try:
try:
feedback = msg["DATA"]
except:
feedback = None
turn_response = self.on_turn(feedback)
os.write(123456789, json.dumps({"TURN_COOKIE": self._turn_cookie,
"MSG": turn_response}))
except Exception, e:
self.log_exception(e)
self._get_turns()
if __name__ == "__main__":
import script
from script import *
with open(script.__file__, 'r') as f:
script_content = f.read()
cs = re.findall('class\ (.*?)\(BaseBot', script_content)
if len(cs) > 0:
klass = globals()[cs[-1]]
bot_instance = klass()
bot_instance._get_turns()
else:
raise Exception("No valid bot found")
| mit | Python |
c5331de78f9f8496f6400b3816f31bee073abf32 | Fix alignment and close an #ifdef | skeuomorf/cryptography,skeuomorf/cryptography,Hasimir/cryptography,sholsapp/cryptography,dstufft/cryptography,dstufft/cryptography,sholsapp/cryptography,Ayrx/cryptography,kimvais/cryptography,skeuomorf/cryptography,bwhmather/cryptography,kimvais/cryptography,Hasimir/cryptography,Hasimir/cryptography,kimvais/cryptography,bwhmather/cryptography,dstufft/cryptography,Ayrx/cryptography,dstufft/cryptography,bwhmather/cryptography,Hasimir/cryptography,kimvais/cryptography,dstufft/cryptography,Ayrx/cryptography,sholsapp/cryptography,Ayrx/cryptography,sholsapp/cryptography,bwhmather/cryptography,skeuomorf/cryptography | cryptography/hazmat/bindings/openssl/ecdh.py | cryptography/hazmat/bindings/openssl/ecdh.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
INCLUDES = """
#ifdef OPENSSL_NO_ECDH
#include <openssl/ecdh.h>
#endif
"""
TYPES = """
static const int Cryptography_HAS_ECDH;
typedef ... ECDH_METHOD;
"""
FUNCTIONS = """
int ECDH_compute_key(void *, size_t, const EC_POINT *, EC_KEY *,
void *(*)(const void *, size_t, void *, size_t *));
int ECDH_get_ex_new_index(long, void *, CRYPTO_EX_new *, CRYPTO_EX_dup *,
CRYPTO_EX_free *);
int ECDH_set_ex_data(EC_KEY *, int, void *);
void *ECDH_get_ex_data(EC_KEY *, int);
"""
MACROS = """
"""
CUSTOMIZATIONS = """
#ifdef OPENSSL_NO_ECDH
static const Cryptography_HAS_ECDH = 0;
typedef void ECDH_METHOD;
int (*ECDH_compute_key)(void *, size_t, const EC_POINT *, EC_KEY *,
void *(*)(const void *, size_t, void *,
size_t *)) = NULL;
int (*ECDH_get_ex_new_index)(long, void *, CRYPTO_EX_new *, CRYPTO_EX_dup *,
CRYPTO_EX_free *) = NULL;
int (*ECDH_set_ex_data)(EC_KEY *, int, void *) = NULL;
void *(*ECDH_get_ex_data)(EC_KEY *, int) = NULL;
#else
static const Cryptography_HAS_ECDH = 1;
#endif
"""
CONDITIONAL_NAMES = {
"Cryptography_HAS_ECDH": [
"ECDH_compute_key",
"ECDH_get_ex_new_index",
"ECDH_set_ex_data",
"ECDH_get_ex_data",
],
}
| # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
INCLUDES = """
#ifdef OPENSSL_NO_ECDH
#include <openssl/ecdh.h>
#endif
"""
TYPES = """
static const int Cryptography_HAS_ECDH;
typedef ... ECDH_METHOD;
"""
FUNCTIONS = """
int ECDH_compute_key(void *, size_t, const EC_POINT *, EC_KEY *,
void *(*)(const void *, size_t, void *, size_t *));
int ECDH_get_ex_new_index(long, void *, CRYPTO_EX_new *, CRYPTO_EX_dup *,
CRYPTO_EX_free *);
int ECDH_set_ex_data(EC_KEY *, int, void *);
void *ECDH_get_ex_data(EC_KEY *, int);
"""
MACROS = """
"""
CUSTOMIZATIONS = """
#ifdef OPENSSL_NO_ECDH
static const Cryptography_HAS_ECDH = 0;
typedef void ECDH_METHOD;
int (*ECDH_compute_key)(void *, size_t, const EC_POINT *, EC_KEY *,
void *(*)(const void *, size_t, void *, size_t *)) = NULL;
int (*ECDH_get_ex_new_index)(long, void *, CRYPTO_EX_new *, CRYPTO_EX_dup *,
CRYPTO_EX_free *) = NULL;
int (*ECDH_set_ex_data)(EC_KEY *, int, void *) = NULL;
void *(*ECDH_get_ex_data)(EC_KEY *, int) = NULL;
"""
CONDITIONAL_NAMES = {
"Cryptography_HAS_ECDH": [
"ECDH_compute_key",
"ECDH_get_ex_new_index",
"ECDH_set_ex_data",
"ECDH_get_ex_data",
],
}
| bsd-3-clause | Python |
3ab661fbd7fdb7f977afca5d38eb8f0ab65deaeb | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/d1ce4aeadb85fda49399d922630164ea24ce92a2. | paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "d1ce4aeadb85fda49399d922630164ea24ce92a2"
TFRT_SHA256 = "9ec2555b5ae76773634a29cef3e3bebbfd20de6cdc35c80fa0aee81c786a570e"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "4b7d8e901b5598f3739512f022e9dfe218e605cd"
TFRT_SHA256 = "e8de656c387fa8b73e461e5534e56036397dead43ae974051f539a905736e8ac"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
7dc88b6620682d3338d6a08ff4a255f32c65aae8 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/9b140cb259147e01ecfba6df57c745b6a71fd950. | tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,paolodedios/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "9b140cb259147e01ecfba6df57c745b6a71fd950"
TFRT_SHA256 = "3a6ec4127fb2d6011a298c6bb806001418c3573510bff80e57097f589fa29987"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "372edb969ec4aee247c451daf0902078cf8a3fce"
TFRT_SHA256 = "5010fb9576a1ef1c73d867a20dba3ac845796e4d3a9e03f4a9ef72323ac387f7"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
8508b747acb287898651274d8ee3e800f0a13824 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/a1b0f3671864f65c4f6270ccbbf320a8454a74e1. | tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,karllessard/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,karllessard/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "a1b0f3671864f65c4f6270ccbbf320a8454a74e1"
TFRT_SHA256 = "a10660324c96be592a5bf7ed4bfba41c6886644470f4a868d82f5ce3d56ee113"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "226dc422f0cf7636b8357e56b4fe701d969c29b7"
TFRT_SHA256 = "0c4f12c4c4fd9232f2392ebceafc9ef2b7b71af7198066b179cfd2d755ff22fe"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
5d637e92bfb00f21cefe628e933df2618b1f14ed | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/ecd68c32546434bf80f90d2ad8f32095c4cc066e. | frreiss/tensorflow-fred,frreiss/tensorflow-fred,yongtang/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,gautam1858/tensorflow,sarvex/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,sarvex/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,sarvex/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,karllessard/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,yongtang/tensorflow,sarvex/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sarvex/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,sarvex/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "ecd68c32546434bf80f90d2ad8f32095c4cc066e"
TFRT_SHA256 = "222283f436df9ca6c2d50ed3d2f1045c593457cecb130b33f731dd1c96280cfc"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "232bc420a3d766bb0d257319f03f2ab15bf3b443"
TFRT_SHA256 = "28e14a29c90871fcdd61f0d5fc7c890f49943528ea66ec53c8019b1fdaf5f367"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| apache-2.0 | Python |
cfd1581a8b8913df350e5ee09f1e7ae9cdb39850 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/0a2cdcfc4f9409e586290aff06d27d848fd46fe7. | karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,yongtang/tensorflow,yongtang/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,gautam1858/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "0a2cdcfc4f9409e586290aff06d27d848fd46fe7"
TFRT_SHA256 = "dadace8820329b088a2bc302fdde1291056921b005bc154844d1826848fd1489"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "e48f4cd1e8c2de3dacfac21835e1b6b070c0e00c"
TFRT_SHA256 = "e4d8cda2f6e10c85dee5ec3d133b4f662200fa01a9c1f69043eab8614b3039a3"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
d26ec5ec25920362f59769cb0435846c28af4fba | Remove uneeded button index map | furbrain/tingbot-python | tingbot/platform_specific/tingbot.py | tingbot/platform_specific/tingbot.py | import os
def fixup_env():
import evdev
os.environ["SDL_FBDEV"] = "/dev/fb1"
mouse_path = None
for device_path in evdev.list_devices():
device = evdev.InputDevice(device_path)
if device.name == "ADS7846 Touchscreen":
mouse_path = device_path
if mouse_path:
os.environ["SDL_MOUSEDRV"] = "TSLIB"
os.environ["SDL_MOUSEDEV"] = mouse_path
else:
print 'Mouse input device not found in /dev/input. Touch support not available.'
def create_main_surface():
import pygame
pygame.init()
surface = pygame.display.set_mode((320, 240))
import pygame.mouse
pygame.mouse.set_visible(0)
return surface
button_callback = None
def register_button_callback(callback):
global button_callback
ensure_button_setup()
button_callback = callback
button_setup_done = False
def ensure_button_setup():
global button_setup_done
if not button_setup_done:
button_setup()
button_setup_done = True
button_pins = (17, 23, 24, 18)
def button_setup():
import wiringpi
wiringpi.wiringPiSetupGpio()
for button_pin in button_pins:
wiringpi.pinMode(button_pin, wiringpi.INPUT)
wiringpi.wiringPiISR(button_pin, wiringpi.INT_EDGE_BOTH, GPIO_callback)
button_previous_states = [0, 0, 0, 0]
def GPIO_callback():
import wiringpi
global button_previous_states
button_states = [wiringpi.digitalRead(pin) for pin in button_pins]
for button_index, (old, new) in enumerate(zip(button_previous_states, button_states)):
if old != new:
button_callback(button_index, 'down' if (new == 1) else 'up')
button_previous_states = button_states
| import os
def fixup_env():
import evdev
os.environ["SDL_FBDEV"] = "/dev/fb1"
mouse_path = None
for device_path in evdev.list_devices():
device = evdev.InputDevice(device_path)
if device.name == "ADS7846 Touchscreen":
mouse_path = device_path
if mouse_path:
os.environ["SDL_MOUSEDRV"] = "TSLIB"
os.environ["SDL_MOUSEDEV"] = mouse_path
else:
print 'Mouse input device not found in /dev/input. Touch support not available.'
def create_main_surface():
import pygame
pygame.init()
surface = pygame.display.set_mode((320, 240))
import pygame.mouse
pygame.mouse.set_visible(0)
return surface
button_callback = None
def register_button_callback(callback):
global button_callback
ensure_button_setup()
button_callback = callback
button_setup_done = False
def ensure_button_setup():
global button_setup_done
if not button_setup_done:
button_setup()
button_setup_done = True
button_pins = (17, 23, 24, 18)
button_pin_to_index = {
17: 0,
23: 1,
24: 2,
18: 3
}
def button_setup():
import wiringpi
wiringpi.wiringPiSetupGpio()
for button_pin in button_pins:
wiringpi.pinMode(button_pin, wiringpi.INPUT)
wiringpi.wiringPiISR(button_pin, wiringpi.INT_EDGE_BOTH, GPIO_callback)
button_previous_states = [0, 0, 0, 0]
def GPIO_callback():
import wiringpi
global button_previous_states
button_states = [wiringpi.digitalRead(pin) for pin in button_pins]
for button_index, (old, new) in enumerate(zip(button_previous_states, button_states)):
if old != new:
button_callback(button_index, 'down' if (new == 1) else 'up')
button_previous_states = button_states
| bsd-2-clause | Python |
521e24fa115e69bca39d7cca89ce42e8efa3b077 | Use full pathname to perf_expectations in test. | meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser | tools/perf_expectations/PRESUBMIT.py | tools/perf_expectations/PRESUBMIT.py | #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for perf_expectations.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
PERF_EXPECTATIONS = 'tools/perf_expectations/perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
| #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for perf_expectations.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
PERF_EXPECTATIONS = 'perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == input_api.os_path.basename(path):
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == input_api.os_path.basename(path):
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
| bsd-3-clause | Python |
9dd71fe94b57d56a422e784c10c463c22add90c3 | Fix absolute reference to logfile location | interactomix/iis,interactomix/iis | configuration/development.py | configuration/development.py | import pathlib
_basedir = pathlib.Path(__file__).parents[1]
SQLALCHEMY_DATABASE_URI = (
'sqlite:///' + str(_basedir.joinpath(pathlib.PurePath('app.db')).resolve())
)
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = 'INSECURE'
MAIL_SERVER = 'localhost'
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = 'no-reply@localhost.localdomain'
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": '%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
},
},
"handlers": {
"file": {
"level": "DEBUG",
"formatter": "verbose",
"class": "iis.log.LockingFileHandler",
"filename": "./iis.log"
},
},
"loggers": {
"iis": {
"level": "DEBUG",
"handlers": ["file"]
},
}
}
LOGGER_NAME = "iis"
| import pathlib
_basedir = pathlib.Path(__file__).parents[1]
SQLALCHEMY_DATABASE_URI = (
'sqlite:///' + str(_basedir.joinpath(pathlib.PurePath('app.db')).resolve())
)
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = 'INSECURE'
MAIL_SERVER = 'localhost'
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = 'no-reply@localhost.localdomain'
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": '%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
},
},
"handlers": {
"file": {
"level": "DEBUG",
"formatter": "verbose",
"class": "iis.log.LockingFileHandler",
"filename": "/home/max/Projects/iis/iis.log"
},
},
"loggers": {
"iis": {
"level": "DEBUG",
"handlers": ["file"]
},
}
}
LOGGER_NAME = "iis"
| agpl-3.0 | Python |
013d6896e883845aebf03ea5830b518c918f8b1a | fix typo | ihsanudin/odoo,camptocamp/ngo-addons-backport,osvalr/odoo,Bachaco-ve/odoo,cloud9UG/odoo,ecosoft-odoo/odoo,SAM-IT-SA/odoo,BT-rmartin/odoo,bealdav/OpenUpgrade,blaggacao/OpenUpgrade,srsman/odoo,klunwebale/odoo,oihane/odoo,ehirt/odoo,collex100/odoo,nitinitprof/odoo,alhashash/odoo,cysnake4713/odoo,VitalPet/odoo,omprakasha/odoo,idncom/odoo,jfpla/odoo,jaxkodex/odoo,rgeleta/odoo,Eric-Zhong/odoo,kittiu/odoo,funkring/fdoo,pplatek/odoo,bwrsandman/OpenUpgrade,dllsf/odootest,lgscofield/odoo,OpenPymeMx/OCB,realsaiko/odoo,poljeff/odoo,QianBIG/odoo,Adel-Magebinary/odoo,simongoffin/website_version,lgscofield/odoo,x111ong/odoo,gdgellatly/OCB1,sysadminmatmoz/OCB,bplancher/odoo,BT-ojossen/odoo,fuselock/odoo,slevenhagen/odoo-npg,tvibliani/odoo,pedrobaeza/OpenUpgrade,kybriainfotech/iSocioCRM,JonathanStein/odoo,jfpla/odoo,pplatek/odoo,leoliujie/odoo,tvibliani/odoo,windedge/odoo,synconics/odoo,Maspear/odoo,rgeleta/odoo,guewen/OpenUpgrade,ShineFan/odoo,collex100/odoo,joshuajan/odoo,sv-dev1/odoo,0k/OpenUpgrade,tinkerthaler/odoo,n0m4dz/odoo,slevenhagen/odoo,andreparames/odoo,Endika/odoo,frouty/odoo_oph,TRESCLOUD/odoopub,fevxie/odoo,gavin-feng/odoo,Maspear/odoo,jusdng/odoo,ujjwalwahi/odoo,dkubiak789/odoo,hmen89/odoo,sebalix/OpenUpgrade,diagramsoftware/odoo,hbrunn/OpenUpgrade,odoo-turkiye/odoo,frouty/odoogoeen,ecosoft-odoo/odoo,guewen/OpenUpgrade,cedk/odoo,shaufi/odoo,xujb/odoo,apocalypsebg/odoo,luistorresm/odoo,dariemp/odoo,dalegregory/odoo,spadae22/odoo,bkirui/odoo,janocat/odoo,dkubiak789/odoo,BT-ojossen/odoo,makinacorpus/odoo,Noviat/odoo,guerrerocarlos/odoo,apanju/odoo,mkieszek/odoo,savoirfairelinux/odoo,fevxie/odoo,luiseduardohdbackup/odoo,Antiun/odoo,ingadhoc/odoo,Elico-Corp/odoo_OCB,rdeheele/odoo,zchking/odoo,Nick-OpusVL/odoo,gavin-feng/odoo,BT-fgarbely/odoo,Nowheresly/odoo,joariasl/odoo,nuuuboo/odoo,mustafat/odoo-1,sergio-incaser/odoo,papouso/odoo,ccomb/OpenUpgrade,deKupini/erp,ramadhane/odoo,virgree/odoo,gvb/odoo,odoo-turkiye/odoo,kirca/OpenUpgrade,fgesora/odoo,slevenhagen/odoo-npg,Endika/odoo,factorlibre/OCB,jusdng/odoo,havt/odoo,sebalix/OpenUpgrade,GauravSahu/odoo,codekaki/odoo,kybriainfotech/iSocioCRM,bakhtout/odoo-educ,Noviat/odoo,shingonoide/odoo,waytai/odoo,Noviat/odoo,leoliujie/odoo,nuuuboo/odoo,camptocamp/ngo-addons-backport,OpenPymeMx/OCB,bguillot/OpenUpgrade,Danisan/odoo-1,dfang/odoo,Adel-Magebinary/odoo,makinacorpus/odoo,OSSESAC/odoopubarquiluz,nuuuboo/odoo,NeovaHealth/odoo,grap/OpenUpgrade,thanhacun/odoo,luistorresm/odoo,oihane/odoo,tinkhaven-organization/odoo,SAM-IT-SA/odoo,csrocha/OpenUpgrade,leorochael/odoo,erkrishna9/odoo,BT-astauder/odoo,vnsofthe/odoo,gorjuce/odoo,Maspear/odoo,waytai/odoo,brijeshkesariya/odoo,cdrooom/odoo,alqfahad/odoo,idncom/odoo,joshuajan/odoo,podemos-info/odoo,luistorresm/odoo,ThinkOpen-Solutions/odoo,ygol/odoo,goliveirab/odoo,ThinkOpen-Solutions/odoo,BT-rmartin/odoo,ramadhane/odoo,hassoon3/odoo,odootr/odoo,tangyiyong/odoo,ingadhoc/odoo,numerigraphe/odoo,lsinfo/odoo,NeovaHealth/odoo,demon-ru/iml-crm,cloud9UG/odoo,aviciimaxwell/odoo,codekaki/odoo,alqfahad/odoo,odootr/odoo,guerrerocarlos/odoo,ygol/odoo,rubencabrera/odoo,agrista/odoo-saas,ovnicraft/odoo,sysadminmatmoz/OCB,jfpla/odoo,syci/OCB,apanju/GMIO_Odoo,KontorConsulting/odoo,cedk/odoo,sadleader/odoo,doomsterinc/odoo,jiachenning/odoo,shivam1111/odoo,credativUK/OCB,abdellatifkarroum/odoo,rdeheele/odoo,kittiu/odoo,aviciimaxwell/odoo,tarzan0820/odoo,florian-dacosta/OpenUpgrade,sadleader/odoo,numerigraphe/odoo,gsmartway/odoo,frouty/odoogoeen,luistorresm/odoo,fjbatresv/odoo,lsinfo/odoo,agrista/odoo-saas,wangjun/odoo,virgree/odoo,VitalPet/odoo,markeTIC/OCB,srsman/odoo,bobisme/odoo,0k/OpenUpgrade,addition-it-solutions/project-all,mszewczy/odoo,optima-ict/odoo,apocalypsebg/odoo,GauravSahu/odoo,BT-ojossen/odoo,mlaitinen/odoo,FlorianLudwig/odoo,kifcaliph/odoo,ingadhoc/odoo,jusdng/odoo,SAM-IT-SA/odoo,damdam-s/OpenUpgrade,bplancher/odoo,avoinsystems/odoo,ccomb/OpenUpgrade,csrocha/OpenUpgrade,factorlibre/OCB,ygol/odoo,numerigraphe/odoo,CatsAndDogsbvba/odoo,pedrobaeza/OpenUpgrade,naousse/odoo,charbeljc/OCB,steedos/odoo,QianBIG/odoo,provaleks/o8,fgesora/odoo,poljeff/odoo,BT-astauder/odoo,shivam1111/odoo,ihsanudin/odoo,havt/odoo,jiangzhixiao/odoo,luiseduardohdbackup/odoo,Bachaco-ve/odoo,shaufi10/odoo,odooindia/odoo,joariasl/odoo,csrocha/OpenUpgrade,gavin-feng/odoo,javierTerry/odoo,CatsAndDogsbvba/odoo,ubic135/odoo-design,omprakasha/odoo,hopeall/odoo,funkring/fdoo,jeasoft/odoo,shingonoide/odoo,ThinkOpen-Solutions/odoo,Grirrane/odoo,Endika/odoo,sysadminmatmoz/OCB,fossoult/odoo,Nick-OpusVL/odoo,ingadhoc/odoo,thanhacun/odoo,xujb/odoo,podemos-info/odoo,CopeX/odoo,mlaitinen/odoo,srimai/odoo,fuhongliang/odoo,datenbetrieb/odoo,frouty/odoogoeen,bkirui/odoo,OpenUpgrade/OpenUpgrade,rgeleta/odoo,OSSESAC/odoopubarquiluz,zchking/odoo,xujb/odoo,apanju/GMIO_Odoo,blaggacao/OpenUpgrade,cysnake4713/odoo,bealdav/OpenUpgrade,SAM-IT-SA/odoo,TRESCLOUD/odoopub,dalegregory/odoo,mustafat/odoo-1,dgzurita/odoo,jesramirez/odoo,avoinsystems/odoo,colinnewell/odoo,nhomar/odoo-mirror,VitalPet/odoo,apanju/odoo,nexiles/odoo,sinbazhou/odoo,OpenUpgrade-dev/OpenUpgrade,fossoult/odoo,elmerdpadilla/iv,abstract-open-solutions/OCB,ingadhoc/odoo,virgree/odoo,guewen/OpenUpgrade,spadae22/odoo,vrenaville/ngo-addons-backport,blaggacao/OpenUpgrade,pplatek/odoo,Endika/OpenUpgrade,dezynetechnologies/odoo,storm-computers/odoo,OpenUpgrade/OpenUpgrade,ecosoft-odoo/odoo,erkrishna9/odoo,credativUK/OCB,Gitlab11/odoo,odootr/odoo,janocat/odoo,andreparames/odoo,ChanduERP/odoo,matrixise/odoo,bwrsandman/OpenUpgrade,incaser/odoo-odoo,pedrobaeza/odoo,rowemoore/odoo,synconics/odoo,NL66278/OCB,KontorConsulting/odoo,hassoon3/odoo,mvaled/OpenUpgrade,florentx/OpenUpgrade,tinkhaven-organization/odoo,tvtsoft/odoo8,grap/OCB,CopeX/odoo,alqfahad/odoo,Bachaco-ve/odoo,Adel-Magebinary/odoo,mmbtba/odoo,jiangzhixiao/odoo,hifly/OpenUpgrade,rowemoore/odoo,steedos/odoo,JGarcia-Panach/odoo,storm-computers/odoo,nexiles/odoo,hanicker/odoo,deKupini/erp,jolevq/odoopub,srsman/odoo,shingonoide/odoo,rowemoore/odoo,kybriainfotech/iSocioCRM,hbrunn/OpenUpgrade,CubicERP/odoo,damdam-s/OpenUpgrade,Endika/odoo,zchking/odoo,grap/OCB,hubsaysnuaa/odoo,frouty/odoo_oph,avoinsystems/odoo,tangyiyong/odoo,andreparames/odoo,massot/odoo,srimai/odoo,rowemoore/odoo,gavin-feng/odoo,storm-computers/odoo,Drooids/odoo,Endika/odoo,diagramsoftware/odoo,alexcuellar/odoo,vnsofthe/odoo,osvalr/odoo,ovnicraft/odoo,joshuajan/odoo,rahuldhote/odoo,Antiun/odoo,fevxie/odoo,bplancher/odoo,Drooids/odoo,javierTerry/odoo,ubic135/odoo-design,acshan/odoo,tvibliani/odoo,0k/odoo,dfang/odoo,idncom/odoo,colinnewell/odoo,mustafat/odoo-1,Eric-Zhong/odoo,ramitalat/odoo,goliveirab/odoo,fjbatresv/odoo,Danisan/odoo-1,hassoon3/odoo,takis/odoo,nhomar/odoo,hanicker/odoo,xzYue/odoo,goliveirab/odoo,aviciimaxwell/odoo,Grirrane/odoo,tangyiyong/odoo,inspyration/odoo,Noviat/odoo,takis/odoo,bkirui/odoo,nuncjo/odoo,acshan/odoo,spadae22/odoo,RafaelTorrealba/odoo,n0m4dz/odoo,datenbetrieb/odoo,JGarcia-Panach/odoo,Nick-OpusVL/odoo,grap/OpenUpgrade,fevxie/odoo,x111ong/odoo,nagyistoce/odoo-dev-odoo,osvalr/odoo,jaxkodex/odoo,SerpentCS/odoo,Antiun/odoo,Adel-Magebinary/odoo,jusdng/odoo,OpenUpgrade/OpenUpgrade,bobisme/odoo,hubsaysnuaa/odoo,ojengwa/odoo,Endika/OpenUpgrade,draugiskisprendimai/odoo,podemos-info/odoo,nexiles/odoo,Maspear/odoo,guerrerocarlos/odoo,shaufi/odoo,gsmartway/odoo,oliverhr/odoo,Maspear/odoo,apanju/odoo,avoinsystems/odoo,chiragjogi/odoo,mvaled/OpenUpgrade,doomsterinc/odoo,jiachenning/odoo,lightcn/odoo,luiseduardohdbackup/odoo,rgeleta/odoo,kybriainfotech/iSocioCRM,idncom/odoo,jesramirez/odoo,stonegithubs/odoo,agrista/odoo-saas,odoousers2014/odoo,sysadminmatmoz/OCB,apanju/GMIO_Odoo,zchking/odoo,thanhacun/odoo,demon-ru/iml-crm,tarzan0820/odoo,sve-odoo/odoo,mvaled/OpenUpgrade,hassoon3/odoo,cedk/odoo,juanalfonsopr/odoo,BT-rmartin/odoo,addition-it-solutions/project-all,takis/odoo,jusdng/odoo,xzYue/odoo,odootr/odoo,sinbazhou/odoo,mustafat/odoo-1,Drooids/odoo,kittiu/odoo,mkieszek/odoo,sve-odoo/odoo,CatsAndDogsbvba/odoo,0k/OpenUpgrade,VielSoft/odoo,bakhtout/odoo-educ,ecosoft-odoo/odoo,provaleks/o8,steedos/odoo,poljeff/odoo,florentx/OpenUpgrade,x111ong/odoo,microcom/odoo,cedk/odoo,BT-rmartin/odoo,hifly/OpenUpgrade,sergio-incaser/odoo,christophlsa/odoo,ccomb/OpenUpgrade,shaufi10/odoo,gorjuce/odoo,xzYue/odoo,ygol/odoo,dariemp/odoo,minhtuancn/odoo,FlorianLudwig/odoo,Kilhog/odoo,luistorresm/odoo,leorochael/odoo,ThinkOpen-Solutions/odoo,bwrsandman/OpenUpgrade,nuncjo/odoo,frouty/odoo_oph,CatsAndDogsbvba/odoo,dkubiak789/odoo,xzYue/odoo,hip-odoo/odoo,salaria/odoo,ubic135/odoo-design,srimai/odoo,ojengwa/odoo,shaufi10/odoo,klunwebale/odoo,shivam1111/odoo,sergio-incaser/odoo,jpshort/odoo,lightcn/odoo,tangyiyong/odoo,avoinsystems/odoo,blaggacao/OpenUpgrade,x111ong/odoo,grap/OCB,hifly/OpenUpgrade,arthru/OpenUpgrade,BT-ojossen/odoo,hbrunn/OpenUpgrade,sergio-incaser/odoo,acshan/odoo,stephen144/odoo,patmcb/odoo,Endika/odoo,gavin-feng/odoo,mlaitinen/odoo,mmbtba/odoo,fevxie/odoo,christophlsa/odoo,Gitlab11/odoo,dkubiak789/odoo,nagyistoce/odoo-dev-odoo,sinbazhou/odoo,KontorConsulting/odoo,rgeleta/odoo,slevenhagen/odoo-npg,csrocha/OpenUpgrade,luiseduardohdbackup/odoo,laslabs/odoo,ThinkOpen-Solutions/odoo,grap/OCB,nagyistoce/odoo-dev-odoo,Kilhog/odoo,slevenhagen/odoo-npg,shaufi/odoo,stonegithubs/odoo,OpenPymeMx/OCB,aviciimaxwell/odoo,shivam1111/odoo,QianBIG/odoo,VitalPet/odoo,pedrobaeza/OpenUpgrade,guewen/OpenUpgrade,syci/OCB,lombritz/odoo,Codefans-fan/odoo,OpenPymeMx/OCB,odoousers2014/odoo,sv-dev1/odoo,rschnapka/odoo,joariasl/odoo,credativUK/OCB,hanicker/odoo,AuyaJackie/odoo,camptocamp/ngo-addons-backport,Bachaco-ve/odoo,tinkhaven-organization/odoo,srsman/odoo,rahuldhote/odoo,nitinitprof/odoo,fdvarela/odoo8,nhomar/odoo,steedos/odoo,OSSESAC/odoopubarquiluz,Ichag/odoo,ovnicraft/odoo,ShineFan/odoo,mustafat/odoo-1,Eric-Zhong/odoo,ovnicraft/odoo,laslabs/odoo,nuuuboo/odoo,alqfahad/odoo,apocalypsebg/odoo,JonathanStein/odoo,chiragjogi/odoo,microcom/odoo,optima-ict/odoo,Ichag/odoo,javierTerry/odoo,diagramsoftware/odoo,BT-fgarbely/odoo,poljeff/odoo,realsaiko/odoo,fuselock/odoo,frouty/odoogoeen,AuyaJackie/odoo,fossoult/odoo,Elico-Corp/odoo_OCB,provaleks/o8,arthru/OpenUpgrade,apanju/odoo,xujb/odoo,PongPi/isl-odoo,dezynetechnologies/odoo,florian-dacosta/OpenUpgrade,Antiun/odoo,abenzbiria/clients_odoo,ClearCorp-dev/odoo,hip-odoo/odoo,leorochael/odoo,vrenaville/ngo-addons-backport,nagyistoce/odoo-dev-odoo,apanju/odoo,codekaki/odoo,feroda/odoo,JonathanStein/odoo,hubsaysnuaa/odoo,bkirui/odoo,Elico-Corp/odoo_OCB,shaufi10/odoo,bguillot/OpenUpgrade,camptocamp/ngo-addons-backport,nhomar/odoo-mirror,steedos/odoo,odoo-turkiye/odoo,waytai/odoo,dariemp/odoo,dariemp/odoo,glovebx/odoo,dsfsdgsbngfggb/odoo,fuselock/odoo,x111ong/odoo,sysadminmatmoz/OCB,mmbtba/odoo,janocat/odoo,markeTIC/OCB,gdgellatly/OCB1,Codefans-fan/odoo,apanju/GMIO_Odoo,Ichag/odoo,ojengwa/odoo,NeovaHealth/odoo,inspyration/odoo,prospwro/odoo,mkieszek/odoo,ClearCorp-dev/odoo,matrixise/odoo,leoliujie/odoo,osvalr/odoo,prospwro/odoo,hubsaysnuaa/odoo,bplancher/odoo,rdeheele/odoo,Nowheresly/odoo,zchking/odoo,SerpentCS/odoo,abstract-open-solutions/OCB,Eric-Zhong/odoo,idncom/odoo,RafaelTorrealba/odoo,jeasoft/odoo,fossoult/odoo,bakhtout/odoo-educ,ojengwa/odoo,kittiu/odoo,dgzurita/odoo,JGarcia-Panach/odoo,shingonoide/odoo,cpyou/odoo,funkring/fdoo,draugiskisprendimai/odoo,addition-it-solutions/project-all,collex100/odoo,gsmartway/odoo,papouso/odoo,CatsAndDogsbvba/odoo,BT-ojossen/odoo,JonathanStein/odoo,OpenUpgrade/OpenUpgrade,jfpla/odoo,gdgellatly/OCB1,mmbtba/odoo,Drooids/odoo,PongPi/isl-odoo,lsinfo/odoo,oihane/odoo,ygol/odoo,ihsanudin/odoo,Antiun/odoo,ehirt/odoo,tvibliani/odoo,rubencabrera/odoo,xujb/odoo,slevenhagen/odoo-npg,QianBIG/odoo,OpenPymeMx/OCB,lombritz/odoo,hoatle/odoo,bkirui/odoo,PongPi/isl-odoo,Nick-OpusVL/odoo,abdellatifkarroum/odoo,lightcn/odoo,jesramirez/odoo,lombritz/odoo,optima-ict/odoo,gvb/odoo,mvaled/OpenUpgrade,papouso/odoo,erkrishna9/odoo,bkirui/odoo,MarcosCommunity/odoo,windedge/odoo,markeTIC/OCB,highco-groupe/odoo,christophlsa/odoo,tinkhaven-organization/odoo,VitalPet/odoo,ecosoft-odoo/odoo,synconics/odoo,lightcn/odoo,spadae22/odoo,Ernesto99/odoo,spadae22/odoo,juanalfonsopr/odoo,florentx/OpenUpgrade,andreparames/odoo,srimai/odoo,collex100/odoo,sinbazhou/odoo,tvibliani/odoo,jiangzhixiao/odoo,lsinfo/odoo,incaser/odoo-odoo,stephen144/odoo,damdam-s/OpenUpgrade,savoirfairelinux/odoo,mlaitinen/odoo,factorlibre/OCB,odoo-turkiye/odoo,eino-makitalo/odoo,Grirrane/odoo,ramitalat/odoo,pedrobaeza/OpenUpgrade,goliveirab/odoo,fgesora/odoo,virgree/odoo,dgzurita/odoo,virgree/odoo,JGarcia-Panach/odoo,pedrobaeza/odoo,NeovaHealth/odoo,osvalr/odoo,matrixise/odoo,OpusVL/odoo,optima-ict/odoo,chiragjogi/odoo,Maspear/odoo,microcom/odoo,sergio-incaser/odoo,wangjun/odoo,bobisme/odoo,VielSoft/odoo,rubencabrera/odoo,bobisme/odoo,hanicker/odoo,havt/odoo,ihsanudin/odoo,csrocha/OpenUpgrade,codekaki/odoo,elmerdpadilla/iv,tinkerthaler/odoo,oihane/odoo,brijeshkesariya/odoo,jiachenning/odoo,ApuliaSoftware/odoo,gsmartway/odoo,nhomar/odoo,charbeljc/OCB,mvaled/OpenUpgrade,agrista/odoo-saas,leoliujie/odoo,cpyou/odoo,fjbatresv/odoo,ojengwa/odoo,sebalix/OpenUpgrade,SerpentCS/odoo,tangyiyong/odoo,makinacorpus/odoo,JCA-Developpement/Odoo,jiachenning/odoo,incaser/odoo-odoo,mszewczy/odoo,odootr/odoo,realsaiko/odoo,hopeall/odoo,tvtsoft/odoo8,charbeljc/OCB,ehirt/odoo,kirca/OpenUpgrade,alexteodor/odoo,slevenhagen/odoo,sv-dev1/odoo,n0m4dz/odoo,markeTIC/OCB,dariemp/odoo,pedrobaeza/OpenUpgrade,guewen/OpenUpgrade,mmbtba/odoo,dalegregory/odoo,fgesora/odoo,alhashash/odoo,grap/OCB,bobisme/odoo,dezynetechnologies/odoo,aviciimaxwell/odoo,0k/odoo,Eric-Zhong/odoo,inspyration/odoo,frouty/odoogoeen,glovebx/odoo,klunwebale/odoo,florian-dacosta/OpenUpgrade,takis/odoo,papouso/odoo,jiangzhixiao/odoo,eino-makitalo/odoo,credativUK/OCB,jolevq/odoopub,brijeshkesariya/odoo,realsaiko/odoo,CubicERP/odoo,simongoffin/website_version,storm-computers/odoo,microcom/odoo,RafaelTorrealba/odoo,Maspear/odoo,naousse/odoo,makinacorpus/odoo,hifly/OpenUpgrade,agrista/odoo-saas,tvibliani/odoo,apanju/odoo,datenbetrieb/odoo,wangjun/odoo,gdgellatly/OCB1,simongoffin/website_version,Daniel-CA/odoo,Nowheresly/odoo,SerpentCS/odoo,mvaled/OpenUpgrade,ChanduERP/odoo,kirca/OpenUpgrade,slevenhagen/odoo,ovnicraft/odoo,CatsAndDogsbvba/odoo,eino-makitalo/odoo,gorjuce/odoo,jiachenning/odoo,christophlsa/odoo,fdvarela/odoo8,andreparames/odoo,hanicker/odoo,sve-odoo/odoo,tinkerthaler/odoo,gdgellatly/OCB1,MarcosCommunity/odoo,abdellatifkarroum/odoo,takis/odoo,Kilhog/odoo,nuncjo/odoo,Endika/OpenUpgrade,VielSoft/odoo,elmerdpadilla/iv,bakhtout/odoo-educ,slevenhagen/odoo-npg,Ichag/odoo,waytai/odoo,fuselock/odoo,nitinitprof/odoo,acshan/odoo,abenzbiria/clients_odoo,kirca/OpenUpgrade,lsinfo/odoo,JCA-Developpement/Odoo,provaleks/o8,oasiswork/odoo,SerpentCS/odoo,bguillot/OpenUpgrade,hubsaysnuaa/odoo,alqfahad/odoo,vnsofthe/odoo,florian-dacosta/OpenUpgrade,mmbtba/odoo,n0m4dz/odoo,ihsanudin/odoo,fuhongliang/odoo,tinkerthaler/odoo,aviciimaxwell/odoo,tarzan0820/odoo,abdellatifkarroum/odoo,n0m4dz/odoo,jaxkodex/odoo,gdgellatly/OCB1,CatsAndDogsbvba/odoo,ubic135/odoo-design,Ichag/odoo,joshuajan/odoo,CopeX/odoo,markeTIC/OCB,shivam1111/odoo,cdrooom/odoo,mszewczy/odoo,BT-rmartin/odoo,hoatle/odoo,rahuldhote/odoo,provaleks/o8,cedk/odoo,dsfsdgsbngfggb/odoo,oihane/odoo,andreparames/odoo,joariasl/odoo,Endika/OpenUpgrade,CopeX/odoo,odooindia/odoo,tinkerthaler/odoo,fjbatresv/odoo,sv-dev1/odoo,rgeleta/odoo,Danisan/odoo-1,xzYue/odoo,eino-makitalo/odoo,ubic135/odoo-design,abstract-open-solutions/OCB,abenzbiria/clients_odoo,hip-odoo/odoo,elmerdpadilla/iv,bguillot/OpenUpgrade,charbeljc/OCB,fuhongliang/odoo,omprakasha/odoo,prospwro/odoo,colinnewell/odoo,abstract-open-solutions/OCB,NeovaHealth/odoo,shaufi10/odoo,SerpentCS/odoo,FlorianLudwig/odoo,fuselock/odoo,dgzurita/odoo,frouty/odoogoeen,matrixise/odoo,savoirfairelinux/OpenUpgrade,gvb/odoo,datenbetrieb/odoo,optima-ict/odoo,thanhacun/odoo,VielSoft/odoo,jpshort/odoo,windedge/odoo,pplatek/odoo,fossoult/odoo,kittiu/odoo,acshan/odoo,ramitalat/odoo,MarcosCommunity/odoo,0k/odoo,Noviat/odoo,luiseduardohdbackup/odoo,massot/odoo,waytai/odoo,hbrunn/OpenUpgrade,nexiles/odoo,glovebx/odoo,tarzan0820/odoo,oliverhr/odoo,kybriainfotech/iSocioCRM,Adel-Magebinary/odoo,guerrerocarlos/odoo,alexteodor/odoo,srimai/odoo,n0m4dz/odoo,ramitalat/odoo,alexcuellar/odoo,gvb/odoo,sinbazhou/odoo,ChanduERP/odoo,dezynetechnologies/odoo,nhomar/odoo,javierTerry/odoo,kybriainfotech/iSocioCRM,JCA-Developpement/Odoo,oliverhr/odoo,pplatek/odoo,JonathanStein/odoo,tarzan0820/odoo,rschnapka/odoo,laslabs/odoo,VitalPet/odoo,juanalfonsopr/odoo,dkubiak789/odoo,xujb/odoo,doomsterinc/odoo,ramadhane/odoo,jpshort/odoo,tinkerthaler/odoo,provaleks/o8,rowemoore/odoo,lgscofield/odoo,brijeshkesariya/odoo,stonegithubs/odoo,BT-ojossen/odoo,vrenaville/ngo-addons-backport,oasiswork/odoo,ApuliaSoftware/odoo,ApuliaSoftware/odoo,bealdav/OpenUpgrade,guerrerocarlos/odoo,diagramsoftware/odoo,odoousers2014/odoo,jaxkodex/odoo,tinkerthaler/odoo,windedge/odoo,nitinitprof/odoo,nexiles/odoo,ccomb/OpenUpgrade,joshuajan/odoo,grap/OpenUpgrade,BT-fgarbely/odoo,Kilhog/odoo,odooindia/odoo,makinacorpus/odoo,havt/odoo,Gitlab11/odoo,incaser/odoo-odoo,prospwro/odoo,incaser/odoo-odoo,tvtsoft/odoo8,podemos-info/odoo,damdam-s/OpenUpgrade,ChanduERP/odoo,wangjun/odoo,florentx/OpenUpgrade,sadleader/odoo,slevenhagen/odoo,dsfsdgsbngfggb/odoo,naousse/odoo,Drooids/odoo,NL66278/OCB,MarcosCommunity/odoo,nhomar/odoo-mirror,jfpla/odoo,ujjwalwahi/odoo,storm-computers/odoo,Kilhog/odoo,glovebx/odoo,hip-odoo/odoo,BT-astauder/odoo,ccomb/OpenUpgrade,Codefans-fan/odoo,OpusVL/odoo,diagramsoftware/odoo,nagyistoce/odoo-dev-odoo,vrenaville/ngo-addons-backport,numerigraphe/odoo,factorlibre/OCB,mlaitinen/odoo,bealdav/OpenUpgrade,brijeshkesariya/odoo,datenbetrieb/odoo,pplatek/odoo,x111ong/odoo,florian-dacosta/OpenUpgrade,klunwebale/odoo,Adel-Magebinary/odoo,windedge/odoo,guerrerocarlos/odoo,glovebx/odoo,rschnapka/odoo,srsman/odoo,Ichag/odoo,nuncjo/odoo,JGarcia-Panach/odoo,nexiles/odoo,abstract-open-solutions/OCB,Nick-OpusVL/odoo,camptocamp/ngo-addons-backport,stonegithubs/odoo,kifcaliph/odoo,ramadhane/odoo,sergio-incaser/odoo,rgeleta/odoo,jiangzhixiao/odoo,synconics/odoo,fjbatresv/odoo,windedge/odoo,salaria/odoo,tinkhaven-organization/odoo,xzYue/odoo,mkieszek/odoo,jiangzhixiao/odoo,Nowheresly/odoo,minhtuancn/odoo,kifcaliph/odoo,CubicERP/odoo,factorlibre/OCB,Ernesto99/odoo,gavin-feng/odoo,rubencabrera/odoo,markeTIC/OCB,optima-ict/odoo,gvb/odoo,bkirui/odoo,rdeheele/odoo,laslabs/odoo,addition-it-solutions/project-all,virgree/odoo,fuhongliang/odoo,slevenhagen/odoo,fevxie/odoo,fuhongliang/odoo,alhashash/odoo,Noviat/odoo,joariasl/odoo,patmcb/odoo,numerigraphe/odoo,srsman/odoo,ClearCorp-dev/odoo,podemos-info/odoo,laslabs/odoo,dsfsdgsbngfggb/odoo,savoirfairelinux/OpenUpgrade,AuyaJackie/odoo,hassoon3/odoo,fevxie/odoo,bobisme/odoo,bealdav/OpenUpgrade,fdvarela/odoo8,feroda/odoo,ihsanudin/odoo,shingonoide/odoo,frouty/odoo_oph,ujjwalwahi/odoo,oliverhr/odoo,Nick-OpusVL/odoo,vrenaville/ngo-addons-backport,janocat/odoo,apocalypsebg/odoo,Ernesto99/odoo,incaser/odoo-odoo,Drooids/odoo,nuuuboo/odoo,sebalix/OpenUpgrade,luiseduardohdbackup/odoo,doomsterinc/odoo,shaufi10/odoo,alexcuellar/odoo,vnsofthe/odoo,cloud9UG/odoo,steedos/odoo,sinbazhou/odoo,deKupini/erp,draugiskisprendimai/odoo,cdrooom/odoo,leoliujie/odoo,janocat/odoo,doomsterinc/odoo,alexcuellar/odoo,oliverhr/odoo,hifly/OpenUpgrade,BT-astauder/odoo,mmbtba/odoo,codekaki/odoo,QianBIG/odoo,abenzbiria/clients_odoo,gdgellatly/OCB1,AuyaJackie/odoo,fuselock/odoo,dkubiak789/odoo,omprakasha/odoo,rschnapka/odoo,hubsaysnuaa/odoo,gorjuce/odoo,CopeX/odoo,oasiswork/odoo,csrocha/OpenUpgrade,shaufi/odoo,KontorConsulting/odoo,colinnewell/odoo,hoatle/odoo,charbeljc/OCB,Noviat/odoo,rschnapka/odoo,dezynetechnologies/odoo,chiragjogi/odoo,cloud9UG/odoo,dalegregory/odoo,cysnake4713/odoo,ehirt/odoo,hopeall/odoo,odoo-turkiye/odoo,makinacorpus/odoo,thanhacun/odoo,Nowheresly/odoo,juanalfonsopr/odoo,odoousers2014/odoo,joariasl/odoo,nagyistoce/odoo-dev-odoo,odoo-turkiye/odoo,KontorConsulting/odoo,havt/odoo,Codefans-fan/odoo,alexteodor/odoo,oihane/odoo,rubencabrera/odoo,lgscofield/odoo,avoinsystems/odoo,thanhacun/odoo,lombritz/odoo,fjbatresv/odoo,RafaelTorrealba/odoo,nitinitprof/odoo,cloud9UG/odoo,leorochael/odoo,glovebx/odoo,abdellatifkarroum/odoo,dsfsdgsbngfggb/odoo,SerpentCS/odoo,Daniel-CA/odoo,0k/OpenUpgrade,mszewczy/odoo,massot/odoo,oasiswork/odoo,oasiswork/odoo,codekaki/odoo,mlaitinen/odoo,vnsofthe/odoo,fuhongliang/odoo,pedrobaeza/odoo,gvb/odoo,OpenPymeMx/OCB,jaxkodex/odoo,papouso/odoo,fgesora/odoo,hopeall/odoo,dgzurita/odoo,draugiskisprendimai/odoo,tvtsoft/odoo8,minhtuancn/odoo,tangyiyong/odoo,dezynetechnologies/odoo,kittiu/odoo,ShineFan/odoo,odoousers2014/odoo,Daniel-CA/odoo,Bachaco-ve/odoo,GauravSahu/odoo,hmen89/odoo,dgzurita/odoo,Elico-Corp/odoo_OCB,sebalix/OpenUpgrade,omprakasha/odoo,jeasoft/odoo,kirca/OpenUpgrade,glovebx/odoo,sv-dev1/odoo,lsinfo/odoo,joariasl/odoo,NL66278/OCB,arthru/OpenUpgrade,jeasoft/odoo,bwrsandman/OpenUpgrade,salaria/odoo,jesramirez/odoo,acshan/odoo,frouty/odoogoeen,syci/OCB,spadae22/odoo,Elico-Corp/odoo_OCB,ThinkOpen-Solutions/odoo,ingadhoc/odoo,Grirrane/odoo,Ernesto99/odoo,goliveirab/odoo,demon-ru/iml-crm,shivam1111/odoo,lgscofield/odoo,dalegregory/odoo,kybriainfotech/iSocioCRM,frouty/odoo_oph,pedrobaeza/odoo,massot/odoo,sadleader/odoo,janocat/odoo,slevenhagen/odoo-npg,JGarcia-Panach/odoo,naousse/odoo,QianBIG/odoo,bakhtout/odoo-educ,lgscofield/odoo,pedrobaeza/OpenUpgrade,grap/OCB,acshan/odoo,kittiu/odoo,OpenUpgrade/OpenUpgrade,dllsf/odootest,tinkhaven-organization/odoo,guewen/OpenUpgrade,brijeshkesariya/odoo,leorochael/odoo,nitinitprof/odoo,florentx/OpenUpgrade,oihane/odoo,odooindia/odoo,SAM-IT-SA/odoo,slevenhagen/odoo,fossoult/odoo,nexiles/odoo,odootr/odoo,JonathanStein/odoo,savoirfairelinux/OpenUpgrade,0k/odoo,fdvarela/odoo8,lsinfo/odoo,hoatle/odoo,dfang/odoo,Nowheresly/odoo,omprakasha/odoo,hoatle/odoo,AuyaJackie/odoo,Danisan/odoo-1,wangjun/odoo,Daniel-CA/odoo,naousse/odoo,vnsofthe/odoo,MarcosCommunity/odoo,tvtsoft/odoo8,draugiskisprendimai/odoo,arthru/OpenUpgrade,Antiun/odoo,nuuuboo/odoo,savoirfairelinux/odoo,vrenaville/ngo-addons-backport,stonegithubs/odoo,Adel-Magebinary/odoo,hmen89/odoo,PongPi/isl-odoo,lightcn/odoo,lombritz/odoo,ShineFan/odoo,jesramirez/odoo,hoatle/odoo,takis/odoo,janocat/odoo,shingonoide/odoo,provaleks/o8,nagyistoce/odoo-dev-odoo,alexcuellar/odoo,Endika/OpenUpgrade,Danisan/odoo-1,hoatle/odoo,zchking/odoo,rubencabrera/odoo,alqfahad/odoo,oasiswork/odoo,credativUK/OCB,vrenaville/ngo-addons-backport,grap/OpenUpgrade,rowemoore/odoo,erkrishna9/odoo,hopeall/odoo,aviciimaxwell/odoo,BT-fgarbely/odoo,makinacorpus/odoo,Daniel-CA/odoo,Ernesto99/odoo,dsfsdgsbngfggb/odoo,datenbetrieb/odoo,gsmartway/odoo,gdgellatly/OCB1,damdam-s/OpenUpgrade,feroda/odoo,jpshort/odoo,synconics/odoo,apanju/GMIO_Odoo,srsman/odoo,fossoult/odoo,camptocamp/ngo-addons-backport,jeasoft/odoo,diagramsoftware/odoo,eino-makitalo/odoo,charbeljc/OCB,Daniel-CA/odoo,xujb/odoo,Ichag/odoo,windedge/odoo,ramadhane/odoo,nuncjo/odoo,srimai/odoo,addition-it-solutions/project-all,PongPi/isl-odoo,shaufi10/odoo,hanicker/odoo,syci/OCB,Gitlab11/odoo,kifcaliph/odoo,Ernesto99/odoo,Endika/odoo,luistorresm/odoo,diagramsoftware/odoo,nhomar/odoo-mirror,florian-dacosta/OpenUpgrade,chiragjogi/odoo,Gitlab11/odoo,MarcosCommunity/odoo,tarzan0820/odoo,collex100/odoo,BT-astauder/odoo,thanhacun/odoo,ramadhane/odoo,mszewczy/odoo,VitalPet/odoo,abstract-open-solutions/OCB,mvaled/OpenUpgrade,ehirt/odoo,ApuliaSoftware/odoo,doomsterinc/odoo,CopeX/odoo,jpshort/odoo,takis/odoo,savoirfairelinux/OpenUpgrade,salaria/odoo,ojengwa/odoo,TRESCLOUD/odoopub,alexteodor/odoo,alexcuellar/odoo,odoousers2014/odoo,AuyaJackie/odoo,jaxkodex/odoo,savoirfairelinux/OpenUpgrade,sinbazhou/odoo,pedrobaeza/OpenUpgrade,stephen144/odoo,storm-computers/odoo,idncom/odoo,ccomb/OpenUpgrade,markeTIC/OCB,ehirt/odoo,kirca/OpenUpgrade,hassoon3/odoo,dllsf/odootest,gavin-feng/odoo,rubencabrera/odoo,sadleader/odoo,frouty/odoogoeen,feroda/odoo,xzYue/odoo,dalegregory/odoo,OpusVL/odoo,csrocha/OpenUpgrade,Endika/OpenUpgrade,odootr/odoo,chiragjogi/odoo,bguillot/OpenUpgrade,shingonoide/odoo,ChanduERP/odoo,Kilhog/odoo,VielSoft/odoo,joshuajan/odoo,realsaiko/odoo,deKupini/erp,ramitalat/odoo,bobisme/odoo,draugiskisprendimai/odoo,sebalix/OpenUpgrade,deKupini/erp,fgesora/odoo,doomsterinc/odoo,hanicker/odoo,leorochael/odoo,savoirfairelinux/OpenUpgrade,apocalypsebg/odoo,christophlsa/odoo,FlorianLudwig/odoo,dariemp/odoo,slevenhagen/odoo,hubsaysnuaa/odoo,tarzan0820/odoo,alexcuellar/odoo,pplatek/odoo,ApuliaSoftware/odoo,papouso/odoo,elmerdpadilla/iv,dariemp/odoo,sebalix/OpenUpgrade,andreparames/odoo,grap/OpenUpgrade,patmcb/odoo,mszewczy/odoo,frouty/odoo_oph,alqfahad/odoo,pedrobaeza/odoo,FlorianLudwig/odoo,cedk/odoo,mszewczy/odoo,mkieszek/odoo,arthru/OpenUpgrade,rahuldhote/odoo,ccomb/OpenUpgrade,javierTerry/odoo,VielSoft/odoo,prospwro/odoo,JGarcia-Panach/odoo,NL66278/OCB,KontorConsulting/odoo,juanalfonsopr/odoo,gorjuce/odoo,avoinsystems/odoo,grap/OCB,srimai/odoo,steedos/odoo,codekaki/odoo,alexteodor/odoo,Danisan/odoo-1,goliveirab/odoo,GauravSahu/odoo,hopeall/odoo,hifly/OpenUpgrade,erkrishna9/odoo,jeasoft/odoo,charbeljc/OCB,gvb/odoo,PongPi/isl-odoo,guerrerocarlos/odoo,microcom/odoo,christophlsa/odoo,sve-odoo/odoo,patmcb/odoo,fgesora/odoo,bwrsandman/OpenUpgrade,dezynetechnologies/odoo,jfpla/odoo,BT-rmartin/odoo,KontorConsulting/odoo,ujjwalwahi/odoo,nuuuboo/odoo,MarcosCommunity/odoo,dllsf/odootest,nhomar/odoo,Codefans-fan/odoo,JonathanStein/odoo,salaria/odoo,simongoffin/website_version,n0m4dz/odoo,fjbatresv/odoo,rschnapka/odoo,ChanduERP/odoo,ovnicraft/odoo,waytai/odoo,sysadminmatmoz/OCB,SAM-IT-SA/odoo,RafaelTorrealba/odoo,rahuldhote/odoo,lombritz/odoo,ujjwalwahi/odoo,jpshort/odoo,TRESCLOUD/odoopub,matrixise/odoo,blaggacao/OpenUpgrade,Eric-Zhong/odoo,GauravSahu/odoo,hbrunn/OpenUpgrade,florentx/OpenUpgrade,apocalypsebg/odoo,spadae22/odoo,stonegithubs/odoo,klunwebale/odoo,prospwro/odoo,cysnake4713/odoo,stephen144/odoo,laslabs/odoo,jolevq/odoopub,funkring/fdoo,alhashash/odoo,dllsf/odootest,gsmartway/odoo,cloud9UG/odoo,CubicERP/odoo,BT-rmartin/odoo,ChanduERP/odoo,credativUK/OCB,ShineFan/odoo,ehirt/odoo,cysnake4713/odoo,collex100/odoo,bealdav/OpenUpgrade,bwrsandman/OpenUpgrade,abstract-open-solutions/OCB,zchking/odoo,funkring/fdoo,minhtuancn/odoo,shaufi/odoo,tvtsoft/odoo8,ThinkOpen-Solutions/odoo,poljeff/odoo,syci/OCB,highco-groupe/odoo,minhtuancn/odoo,abdellatifkarroum/odoo,podemos-info/odoo,shivam1111/odoo,RafaelTorrealba/odoo,ecosoft-odoo/odoo,chiragjogi/odoo,bakhtout/odoo-educ,shaufi/odoo,patmcb/odoo,odooindia/odoo,leoliujie/odoo,BT-fgarbely/odoo,ujjwalwahi/odoo,cpyou/odoo,collex100/odoo,ecosoft-odoo/odoo,dkubiak789/odoo,rschnapka/odoo,kifcaliph/odoo,PongPi/isl-odoo,sve-odoo/odoo,dsfsdgsbngfggb/odoo,jeasoft/odoo,ojengwa/odoo,lombritz/odoo,savoirfairelinux/odoo,mustafat/odoo-1,oliverhr/odoo,ovnicraft/odoo,ygol/odoo,bwrsandman/OpenUpgrade,CubicERP/odoo,dalegregory/odoo,feroda/odoo,shaufi/odoo,jiangzhixiao/odoo,feroda/odoo,osvalr/odoo,lgscofield/odoo,0k/odoo,jusdng/odoo,highco-groupe/odoo,datenbetrieb/odoo,sv-dev1/odoo,BT-fgarbely/odoo,idncom/odoo,Daniel-CA/odoo,Codefans-fan/odoo,OpenPymeMx/OCB,omprakasha/odoo,highco-groupe/odoo,dgzurita/odoo,CubicERP/odoo,ihsanudin/odoo,javierTerry/odoo,jaxkodex/odoo,nuncjo/odoo,massot/odoo,RafaelTorrealba/odoo,tvibliani/odoo,OSSESAC/odoopubarquiluz,ClearCorp-dev/odoo,damdam-s/OpenUpgrade,fuselock/odoo,abenzbiria/clients_odoo,ygol/odoo,MarcosCommunity/odoo,mustafat/odoo-1,naousse/odoo,jiachenning/odoo,blaggacao/OpenUpgrade,oliverhr/odoo,BT-ojossen/odoo,vnsofthe/odoo,jfpla/odoo,ShineFan/odoo,christophlsa/odoo,SAM-IT-SA/odoo,rdeheele/odoo,AuyaJackie/odoo,camptocamp/ngo-addons-backport,Drooids/odoo,havt/odoo,OpusVL/odoo,guewen/OpenUpgrade,inspyration/odoo,arthru/OpenUpgrade,klunwebale/odoo,microcom/odoo,Grirrane/odoo,credativUK/OCB,CubicERP/odoo,0k/OpenUpgrade,rschnapka/odoo,Gitlab11/odoo,synconics/odoo,apanju/GMIO_Odoo,incaser/odoo-odoo,alhashash/odoo,hip-odoo/odoo,apanju/odoo,patmcb/odoo,0k/OpenUpgrade,NeovaHealth/odoo,OpenUpgrade-dev/OpenUpgrade,wangjun/odoo,kirca/OpenUpgrade,JCA-Developpement/Odoo,syci/OCB,grap/OpenUpgrade,demon-ru/iml-crm,factorlibre/OCB,synconics/odoo,lightcn/odoo,gsmartway/odoo,ingadhoc/odoo,funkring/fdoo,hifly/OpenUpgrade,damdam-s/OpenUpgrade,jeasoft/odoo,juanalfonsopr/odoo,ramadhane/odoo,wangjun/odoo,juanalfonsopr/odoo,GauravSahu/odoo,hmen89/odoo,Bachaco-ve/odoo,nhomar/odoo-mirror,fuhongliang/odoo,apocalypsebg/odoo,apanju/GMIO_Odoo,GauravSahu/odoo,hmen89/odoo,eino-makitalo/odoo,OpenUpgrade-dev/OpenUpgrade,hbrunn/OpenUpgrade,bplancher/odoo,gorjuce/odoo,cpyou/odoo,javierTerry/odoo,klunwebale/odoo,salaria/odoo,papouso/odoo,Grirrane/odoo,cdrooom/odoo,nuncjo/odoo,leoliujie/odoo,leorochael/odoo,Nowheresly/odoo,nitinitprof/odoo,colinnewell/odoo,feroda/odoo,odoo-turkiye/odoo,mkieszek/odoo,ShineFan/odoo,ApuliaSoftware/odoo,FlorianLudwig/odoo,draugiskisprendimai/odoo,dfang/odoo,VitalPet/odoo,luistorresm/odoo,ujjwalwahi/odoo,bguillot/OpenUpgrade,addition-it-solutions/project-all,gorjuce/odoo,bakhtout/odoo-educ,rowemoore/odoo,jusdng/odoo,stephen144/odoo,oasiswork/odoo,minhtuancn/odoo,OpenUpgrade/OpenUpgrade,NL66278/OCB,dfang/odoo,dfang/odoo,OpenPymeMx/OCB,OpenUpgrade-dev/OpenUpgrade,highco-groupe/odoo,brijeshkesariya/odoo,stephen144/odoo,lightcn/odoo,savoirfairelinux/odoo,jolevq/odoopub,salaria/odoo,CopeX/odoo,tinkhaven-organization/odoo,hopeall/odoo,factorlibre/OCB,NeovaHealth/odoo,TRESCLOUD/odoopub,funkring/fdoo,ApuliaSoftware/odoo,x111ong/odoo,nhomar/odoo,numerigraphe/odoo,sysadminmatmoz/OCB,credativUK/OCB,cedk/odoo,tangyiyong/odoo,bplancher/odoo,codekaki/odoo,demon-ru/iml-crm,savoirfairelinux/odoo,OpenUpgrade-dev/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,ramitalat/odoo,eino-makitalo/odoo,grap/OCB,podemos-info/odoo,waytai/odoo,hip-odoo/odoo,cpyou/odoo,Eric-Zhong/odoo,colinnewell/odoo,ClearCorp-dev/odoo,minhtuancn/odoo,sv-dev1/odoo,mlaitinen/odoo,patmcb/odoo,FlorianLudwig/odoo,vrenaville/ngo-addons-backport,grap/OpenUpgrade,rahuldhote/odoo,simongoffin/website_version,alhashash/odoo,cloud9UG/odoo,Antiun/odoo,camptocamp/ngo-addons-backport,prospwro/odoo,blaggacao/OpenUpgrade,Endika/OpenUpgrade,colinnewell/odoo,abdellatifkarroum/odoo,virgree/odoo,naousse/odoo,Gitlab11/odoo,JCA-Developpement/Odoo,luiseduardohdbackup/odoo,Bachaco-ve/odoo,OpenUpgrade/OpenUpgrade,Nick-OpusVL/odoo,Danisan/odoo-1,poljeff/odoo,fdvarela/odoo8,Elico-Corp/odoo_OCB,havt/odoo,pedrobaeza/odoo,OSSESAC/odoopubarquiluz,OSSESAC/odoopubarquiluz,jpshort/odoo,Codefans-fan/odoo,jolevq/odoopub,stonegithubs/odoo,rahuldhote/odoo,numerigraphe/odoo,poljeff/odoo,BT-fgarbely/odoo,Ernesto99/odoo,goliveirab/odoo,Kilhog/odoo,bguillot/OpenUpgrade,osvalr/odoo,VielSoft/odoo | addons/portal/portal.py | addons/portal/portal.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
class portal(osv.osv):
_name = 'res.portal'
_description = 'Portal'
_columns = {
'name': fields.char(string='Name', size=64, required=True),
'user_ids': fields.one2many('res.users', 'portal_id', string='Portal users',
help='Gives the set of users associated to this portal'),
'group_ids': fields.many2many('res.groups', 'portal_group', 'portal_id', 'group_id',
string='Groups', help='Users of this portal automatically belong to those groups'),
}
portal()
class users(osv.osv):
_name = 'res.users'
_inherit = 'res.users'
_columns = {
'portal_id': fields.many2one('res.portal', string='Portal',
help='If given, the portal defines customized menu and access rules'),
}
users()
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
class portal(osv.osv):
_name = 'res.portal'
_description = 'Portal'
_columns = {
'name': fields.char(string='Name', size=64, required=True),
'user_ids': fields.one2many('res.users', 'portal_id', string='Portal users',
help='Gives the set of users associated to this portal'),
'group_ids': field.many2many('res.groups', 'portal_group', 'portal_id', 'group_id',
string='Groups', help='Users of this portal automatically belong to those groups'),
}
portal()
class users(osv.osv):
_name = 'res.users'
_inherit = 'res.users'
_columns = {
'portal_id': fields.many2one('res.portal', string='Portal',
help='If given, the portal defines customized menu and access rules'),
}
users()
| agpl-3.0 | Python |
b0b74348c54eb94c1dd1f5f1c146f2a9fa7c0534 | remove unused import | opendatazurich/ckanext-stadtzh-dwhdropzone | ckanext/stadtzhdwhdropzone/harvesters/stadtzhdwhdropzoneharvester.py | ckanext/stadtzhdwhdropzone/harvesters/stadtzhdwhdropzoneharvester.py | # coding: utf-8
from ckanext.stadtzhharvest.harvester import StadtzhHarvester
import logging
log = logging.getLogger(__name__)
class StadtzhdwhdropzoneHarvester(StadtzhHarvester):
'''
The harvester for the Stadt ZH DWH Dropzone
'''
DATA_PATH = '/usr/lib/ckan/DWH'
METADATA_DIR = 'dwh-metadata'
def info(self):
'''
Return some general info about this harvester
'''
return {
'name': 'stadtzhdwhdropzone',
'title': 'Stadtzhdwhdropzone',
'description': 'Harvests the Stadtzhdwhdropzone data',
'form_config_interface': 'Text'
}
def gather_stage(self, harvest_job):
log.debug('In StadtzhdwhdropzoneHarvester gather_stage')
return self._gather_datasets(harvest_job)
def fetch_stage(self, harvest_object):
log.debug('In StadtzhdwhdropzoneHarvester fetch_stage')
return self._fetch_datasets(harvest_object)
def import_stage(self, harvest_object):
log.debug('In StadtzhdwhdropzoneHarvester import_stage')
return self._import_datasets(harvest_object)
| # coding: utf-8
from ckanext.stadtzhharvest.harvester import StadtzhHarvester
import os
import logging
log = logging.getLogger(__name__)
class StadtzhdwhdropzoneHarvester(StadtzhHarvester):
'''
The harvester for the Stadt ZH DWH Dropzone
'''
DATA_PATH = '/usr/lib/ckan/DWH'
METADATA_DIR = 'dwh-metadata'
def info(self):
'''
Return some general info about this harvester
'''
return {
'name': 'stadtzhdwhdropzone',
'title': 'Stadtzhdwhdropzone',
'description': 'Harvests the Stadtzhdwhdropzone data',
'form_config_interface': 'Text'
}
def gather_stage(self, harvest_job):
log.debug('In StadtzhdwhdropzoneHarvester gather_stage')
return self._gather_datasets(harvest_job)
def fetch_stage(self, harvest_object):
log.debug('In StadtzhdwhdropzoneHarvester fetch_stage')
return self._fetch_datasets(harvest_object)
def import_stage(self, harvest_object):
log.debug('In StadtzhdwhdropzoneHarvester import_stage')
return self._import_datasets(harvest_object)
| agpl-3.0 | Python |
0124161cfa0064ff5517c5dff108e74d2066974e | Use cupyx.scipy._scipy_available to import scipy.linalg conditionally in test_decomp_lu.py | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | tests/cupyx_tests/scipy_tests/linalg_tests/test_decomp_lu.py | tests/cupyx_tests/scipy_tests/linalg_tests/test_decomp_lu.py | import unittest
import numpy
import cupy
from cupy import cuda
from cupy import testing
import cupyx.scipy.linalg
if cupyx.scipy._scipy_available:
import scipy.linalg
@testing.gpu
@testing.parameterize(*testing.product({
'shape': [(1, 1), (2, 2), (3, 3), (5, 5)],
}))
@testing.fix_random()
@unittest.skipUnless(
cuda.cusolver_enabled, 'Only cusolver in CUDA 8.0 is supported')
@testing.with_requires('scipy')
class TestLUFactor(unittest.TestCase):
@testing.for_float_dtypes(no_float16=True)
def test_lu_factor(self, dtype):
array = numpy.random.randn(*self.shape)
a_cpu = numpy.asarray(array, dtype=dtype)
a_gpu = cupy.asarray(array, dtype=dtype)
result_cpu = scipy.linalg.lu_factor(a_cpu)
result_gpu = cupyx.scipy.linalg.lu_factor(a_gpu)
self.assertEqual(len(result_cpu), len(result_gpu))
self.assertEqual(result_cpu[0].dtype, result_gpu[0].dtype)
self.assertEqual(result_cpu[1].dtype, result_gpu[1].dtype)
cupy.testing.assert_allclose(result_cpu[0], result_gpu[0], atol=1e-5)
cupy.testing.assert_array_equal(result_cpu[1], result_gpu[1])
@testing.gpu
@testing.parameterize(*testing.product({
'trans': [0, 1, 2],
'shapes': [((4, 4), (4,)), ((5, 5), (5, 2))],
}))
@testing.fix_random()
@unittest.skipUnless(
cuda.cusolver_enabled, 'Only cusolver in CUDA 8.0 is supported')
@testing.with_requires('scipy')
class TestLUSolve(unittest.TestCase):
@testing.for_float_dtypes(no_float16=True)
@testing.numpy_cupy_allclose(atol=1e-5, scipy_name='scp')
def test_lu_solve(self, xp, scp, dtype):
a_shape, b_shape = self.shapes
A = testing.shaped_random(a_shape, xp, dtype=dtype)
b = testing.shaped_random(b_shape, xp, dtype=dtype)
lu = scp.linalg.lu_factor(A)
return scp.linalg.lu_solve(lu, b, trans=self.trans)
| import unittest
import numpy
try:
import scipy.linalg
scipy_available = True
except ImportError:
scipy_available = False
import cupy
from cupy import cuda
from cupy import testing
import cupyx.scipy.linalg
@testing.gpu
@testing.parameterize(*testing.product({
'shape': [(1, 1), (2, 2), (3, 3), (5, 5)],
}))
@testing.fix_random()
@unittest.skipUnless(
cuda.cusolver_enabled, 'Only cusolver in CUDA 8.0 is supported')
@testing.with_requires('scipy')
class TestLUFactor(unittest.TestCase):
@testing.for_float_dtypes(no_float16=True)
def test_lu_factor(self, dtype):
array = numpy.random.randn(*self.shape)
a_cpu = numpy.asarray(array, dtype=dtype)
a_gpu = cupy.asarray(array, dtype=dtype)
result_cpu = scipy.linalg.lu_factor(a_cpu)
result_gpu = cupyx.scipy.linalg.lu_factor(a_gpu)
self.assertEqual(len(result_cpu), len(result_gpu))
self.assertEqual(result_cpu[0].dtype, result_gpu[0].dtype)
self.assertEqual(result_cpu[1].dtype, result_gpu[1].dtype)
cupy.testing.assert_allclose(result_cpu[0], result_gpu[0], atol=1e-5)
cupy.testing.assert_array_equal(result_cpu[1], result_gpu[1])
@testing.gpu
@testing.parameterize(*testing.product({
'trans': [0, 1, 2],
'shapes': [((4, 4), (4,)), ((5, 5), (5, 2))],
}))
@testing.fix_random()
@unittest.skipUnless(
cuda.cusolver_enabled, 'Only cusolver in CUDA 8.0 is supported')
@testing.with_requires('scipy')
class TestLUSolve(unittest.TestCase):
@testing.for_float_dtypes(no_float16=True)
@testing.numpy_cupy_allclose(atol=1e-5, scipy_name='scp')
def test_lu_solve(self, xp, scp, dtype):
a_shape, b_shape = self.shapes
A = testing.shaped_random(a_shape, xp, dtype=dtype)
b = testing.shaped_random(b_shape, xp, dtype=dtype)
lu = scp.linalg.lu_factor(A)
return scp.linalg.lu_solve(lu, b, trans=self.trans)
| mit | Python |
a0840998821b78dfeae8df8e5d031089393581ed | Fix module dependencies | eicher31/compassion-modules,CompassionCH/compassion-modules,Secheron/compassion-modules,CompassionCH/compassion-modules,eicher31/compassion-modules,ecino/compassion-modules,emgirardin/compassion-modules,philippe89/compassion-modules,emgirardin/compassion-modules,maxime-beck/compassion-modules,emgirardin/compassion-modules,ndtran/compassion-modules,maxime-beck/compassion-modules,eicher31/compassion-modules,MickSandoz/compassion-modules,ecino/compassion-modules,ndtran/compassion-modules,eicher31/compassion-modules,philippe89/compassion-modules,philippe89/compassion-modules,Secheron/compassion-modules,ecino/compassion-modules,MickSandoz/compassion-modules,maxime-beck/compassion-modules,maxime-beck/compassion-modules,MickSandoz/compassion-modules,ecino/compassion-modules,Secheron/compassion-modules,eicher31/compassion-modules,ndtran/compassion-modules,CompassionCH/compassion-modules,ecino/compassion-modules,CompassionCH/compassion-modules | contract_compassion/__openerp__.py | contract_compassion/__openerp__.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# ______ Releasing children from poverty _
# / ____/___ ____ ___ ____ ____ ___________(_)___ ____
# / / / __ \/ __ `__ \/ __ \/ __ `/ ___/ ___/ / __ \/ __ \
# / /___/ /_/ / / / / / / /_/ / /_/ (__ |__ ) / /_/ / / / /
# \____/\____/_/ /_/ /_/ .___/\__,_/____/____/_/\____/_/ /_/
# /_/
# in Jesus' name
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# @author: David Coninckx
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Compassion Contracts',
'version': '1.2',
'category': 'Other',
'author': 'Compassion CH',
'website': 'http://www.compassion.ch',
'depends': ['recurring_contract', 'account_banking_mandate',
'child_compassion', 'account_analytic_compassion',
'l10n_ch_payment_slip'],
'data': [
'view/end_contract_wizard_view.xml',
'view/contract_group_view.xml',
'view/contract_origin_view.xml',
'view/contract_view.xml',
'view/activate_contract_view.xml',
'workflow/contract_workflow.xml',
'workflow/invoice_workflow.xml',
'security/ir.model.access.csv',
'data/friday_invoicer_cron.xml',
'data/product.xml',
'data/payment_terms.xml',
],
'demo': [],
'installable': True,
'auto_install': False,
}
| # -*- encoding: utf-8 -*-
##############################################################################
#
# ______ Releasing children from poverty _
# / ____/___ ____ ___ ____ ____ ___________(_)___ ____
# / / / __ \/ __ `__ \/ __ \/ __ `/ ___/ ___/ / __ \/ __ \
# / /___/ /_/ / / / / / / /_/ / /_/ (__ |__ ) / /_/ / / / /
# \____/\____/_/ /_/ /_/ .___/\__,_/____/____/_/\____/_/ /_/
# /_/
# in Jesus' name
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# @author: David Coninckx
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Compassion Contracts',
'version': '1.2',
'category': 'Other',
'author': 'Compassion CH',
'website': 'http://www.compassion.ch',
'depends': ['recurring_contract', 'account_banking_mandate',
'child_compassion', 'account_analytic_compassion',
'account_accountant', 'l10n_ch_payment_slip'],
'data': [
'view/end_contract_wizard_view.xml',
'view/contract_group_view.xml',
'view/contract_origin_view.xml',
'view/contract_view.xml',
'view/activate_contract_view.xml',
'workflow/contract_workflow.xml',
'workflow/invoice_workflow.xml',
'security/ir.model.access.csv',
'data/friday_invoicer_cron.xml',
'data/product.xml',
'data/payment_terms.xml',
],
'demo': [],
'installable': True,
'auto_install': False,
}
| agpl-3.0 | Python |
3c2216db8c9caa09cbdacf094739b7bcf869c068 | Use Plotter.datastore = Propagate | tkf/compapp | compapp/utils.py | compapp/utils.py | from .core import Executable
from .plugins import PluginWrapper, AutoDump, Figure, \
SubDataStore, HashDataStore
from .properties import Propagate
class Plotter(Executable):
"""
Base plotter class.
Example
-------
::
class DensityPlotter(Plotter):
pass
class CumulativeDistributionPlotter(Plotter):
pass
class MyApp(SimulationApp):
density = DensityPlotter
dist = CumulativeDistributionPlotter
def run(self):
self.simulator.execute()
self.density.plot(self.simulator)
self.dist.plot(self.simulator)
Then ``myapp.py --datastore.dir out/`` saves density plot in
``out/density/0.png`` and cumulative distribution in
``out/dist/0.png``.
"""
figure = Figure
datastore = Propagate
# `Propagate` is used here so that figures are saved as
# ``PLOTTER_NAME/FIGURE_NAME.png`` rather than
# ``PLOTTER_NAME/figure/FIGURE_NAME.png`` (which would be the
# case if ``datastore = SubDataStore`` is used).
class Simulator(Executable):
"""
Base simulator class.
.. todo:: Do I need it? Why not just use `.SimulationApp`?
.. todo:: If I were to use this class, it's better to have
`result_names` stuff implemented in `.Application`.
Maybe separate that part into a class `Calculator` and
inherit from `Simulator` and `.Application`.
"""
datastore = SubDataStore
class plugins(PluginWrapper):
autodump = AutoDump
class Analyzer(Simulator):
datastore = HashDataStore
| from .core import Executable
from .plugins import PluginWrapper, AutoDump, Figure, \
SubDataStore, HashDataStore
class Plotter(Executable):
"""
Base plotter class.
Example
-------
::
class DensityPlotter(Plotter):
pass
class CumulativeDistributionPlotter(Plotter):
pass
class MyApp(SimulationApp):
density = DensityPlotter
dist = CumulativeDistributionPlotter
def run(self):
self.simulator.execute()
self.density.plot(self.simulator)
self.dist.plot(self.simulator)
Then ``myapp.py --datastore.dir out/`` saves density plot in
``out/density/0.png`` and cumulative distribution in
``out/dist/0.png``.
"""
figure = Figure
datastore = SubDataStore
class Simulator(Executable):
"""
Base simulator class.
.. todo:: Do I need it? Why not just use `.SimulationApp`?
.. todo:: If I were to use this class, it's better to have
`result_names` stuff implemented in `.Application`.
Maybe separate that part into a class `Calculator` and
inherit from `Simulator` and `.Application`.
"""
datastore = SubDataStore
class plugins(PluginWrapper):
autodump = AutoDump
class Analyzer(Simulator):
datastore = HashDataStore
| bsd-2-clause | Python |
189f7d81f31abf20bbd56e098bf8dacd8a933d05 | Remove unused function casp_model_names. | deepmind/alphafold,deepmind/alphafold | alphafold/model/data.py | alphafold/model/data.py | # Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convenience functions for reading data."""
import io
import os
from alphafold.model import utils
import haiku as hk
import numpy as np
# Internal import (7716).
def get_model_haiku_params(model_name: str, data_dir: str) -> hk.Params:
"""Get the Haiku parameters from a model name."""
path = os.path.join(data_dir, 'params', f'params_{model_name}.npz')
with open(path, 'rb') as f:
params = np.load(io.BytesIO(f.read()), allow_pickle=False)
return utils.flat_params_to_haiku(params)
| # Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convenience functions for reading data."""
import io
import os
from typing import List
from alphafold.model import utils
import haiku as hk
import numpy as np
# Internal import (7716).
def casp_model_names(data_dir: str) -> List[str]:
params = os.listdir(os.path.join(data_dir, 'params'))
return [os.path.splitext(filename)[0] for filename in params]
def get_model_haiku_params(model_name: str, data_dir: str) -> hk.Params:
"""Get the Haiku parameters from a model name."""
path = os.path.join(data_dir, 'params', f'params_{model_name}.npz')
with open(path, 'rb') as f:
params = np.load(io.BytesIO(f.read()), allow_pickle=False)
return utils.flat_params_to_haiku(params)
| apache-2.0 | Python |
82aa492f43b7b7c2a9aa935d4fe101743c64e144 | Add function to get stdin/stdout/stderr on MSVC | seibert/numba,stonebig/numba,IntelLabs/numba,cpcloud/numba,ssarangi/numba,stuartarchibald/numba,gmarkall/numba,jriehl/numba,ssarangi/numba,shiquanwang/numba,stuartarchibald/numba,numba/numba,pombredanne/numba,sklam/numba,sklam/numba,gmarkall/numba,stonebig/numba,stonebig/numba,sklam/numba,gmarkall/numba,gmarkall/numba,numba/numba,pombredanne/numba,stefanseefeld/numba,seibert/numba,shiquanwang/numba,jriehl/numba,shiquanwang/numba,jriehl/numba,stefanseefeld/numba,cpcloud/numba,gdementen/numba,pitrou/numba,pitrou/numba,gmarkall/numba,gdementen/numba,stefanseefeld/numba,sklam/numba,pitrou/numba,pombredanne/numba,GaZ3ll3/numba,pombredanne/numba,cpcloud/numba,GaZ3ll3/numba,gdementen/numba,IntelLabs/numba,stonebig/numba,ssarangi/numba,cpcloud/numba,ssarangi/numba,numba/numba,pombredanne/numba,stefanseefeld/numba,stuartarchibald/numba,numba/numba,IntelLabs/numba,stonebig/numba,stuartarchibald/numba,jriehl/numba,IntelLabs/numba,GaZ3ll3/numba,IntelLabs/numba,seibert/numba,numba/numba,stefanseefeld/numba,GaZ3ll3/numba,gdementen/numba,cpcloud/numba,jriehl/numba,ssarangi/numba,seibert/numba,sklam/numba,gdementen/numba,stuartarchibald/numba,pitrou/numba,pitrou/numba,seibert/numba,GaZ3ll3/numba | numba/stdio_util.py | numba/stdio_util.py | #! /usr/bin/env python
# ______________________________________________________________________
import ctypes
import ctypes.util
from numba import *
# ______________________________________________________________________
c_void_pp = ctypes.POINTER(ctypes.c_void_p)
def get_libc ():
return ctypes.CDLL(ctypes.util.find_library('c'))
def get_stdio_streams ():
'''
Returns file pointers (FILE *) as Python integers for the C stdio
stdin, stdout, and stderr streams.
'''
ret_val = None
if hasattr(ctypes.pythonapi, 'stdin'):
# Linux
_stdio_files = (ctypes.c_void_p.in_dll(ctypes.pythonapi, sym)
for sym in ('stdin', 'stdout', 'stderr'))
ret_val = tuple(c_void_pp(file_p)[0] for file_p in _stdio_files)
elif hasattr(ctypes.pythonapi, '__stdinp'):
# OSX
_stdio_files = (ctypes.c_void_p.in_dll(ctypes.pythonapi, sym)
for sym in ('__stdinp', '__stdoutp', '__stderrp'))
ret_val = tuple(c_void_pp(file_p)[0] for file_p in _stdio_files)
else:
libc = get_libc()
if hasattr(libc, '__getreent'):
# Cygwin
ret_val = tuple(ctypes.cast(libc.__getreent(), c_void_pp)[1:4])
elif hasattr(libc, '__iob_func'):
# MSVC
ret_val = tuple(ctypes.cast(libc.__iob_func(), c_void_pp)[0:3])
else:
raise NotImplementedError("Unsupported platform, don't know how to "
"find pointers to stdio streams!")
return ret_val
def get_stream_as_node(fp):
return nodes.CoercionNode(nodes.ConstNode(fp, Py_ssize_t),
void.pointer())
# ______________________________________________________________________
def main ():
_, stdout, _ = get_stdio_streams()
PyObject_Print = ctypes.pythonapi.PyObject_Print
PyObject_Print.restype = ctypes.c_int
PyObject_Print.argtypes = ctypes.py_object, ctypes.c_void_p, ctypes.c_int
PyObject_Print(get_stdio_streams, stdout, 1)
PyObject_Print('\n\n', stdout, 1)
# ______________________________________________________________________
if __name__ == "__main__":
main()
# ______________________________________________________________________
# End of stdio_util.py
| #! /usr/bin/env python
# ______________________________________________________________________
import ctypes
import ctypes.util
from numba import *
# ______________________________________________________________________
c_void_pp = ctypes.POINTER(ctypes.c_void_p)
def get_libc ():
return ctypes.CDLL(ctypes.util.find_library('c'))
def get_stdio_streams ():
'''
Returns file pointers (FILE *) as Python integers for the C stdio
stdin, stdout, and stderr streams.
'''
ret_val = None
if hasattr(ctypes.pythonapi, 'stdin'):
# Linux
_stdio_files = (ctypes.c_void_p.in_dll(ctypes.pythonapi, sym)
for sym in ('stdin', 'stdout', 'stderr'))
ret_val = tuple(c_void_pp(file_p)[0] for file_p in _stdio_files)
elif hasattr(ctypes.pythonapi, '__stdinp'):
# OSX
_stdio_files = (ctypes.c_void_p.in_dll(ctypes.pythonapi, sym)
for sym in ('__stdinp', '__stdoutp', '__stderrp'))
ret_val = tuple(c_void_pp(file_p)[0] for file_p in _stdio_files)
else:
libc = get_libc()
if hasattr(libc, '__getreent'):
# Cygwin
ret_val = tuple(ctypes.cast(libc.__getreent(), c_void_pp)[1:4])
else:
raise NotImplementedError("Unsupported platform, don't know how to "
"find pointers to stdio streams!")
return ret_val
def get_stream_as_node(fp):
return nodes.CoercionNode(nodes.ConstNode(fp, Py_ssize_t),
void.pointer())
# ______________________________________________________________________
def main ():
_, stdout, _ = get_stdio_streams()
PyObject_Print = ctypes.pythonapi.PyObject_Print
PyObject_Print.restype = ctypes.c_int
PyObject_Print.argtypes = ctypes.py_object, ctypes.c_void_p, ctypes.c_int
PyObject_Print(get_stdio_streams, stdout, 1)
PyObject_Print('\n\n', stdout, 1)
# ______________________________________________________________________
if __name__ == "__main__":
main()
# ______________________________________________________________________
# End of stdio_util.py
| bsd-2-clause | Python |
7d46b70f0201417a8abd2debed7780834d710897 | Add masked_as_lists function | cfobel/numpy_helpers | numpy_helpers/ma.py | numpy_helpers/ma.py | import numpy as np
def masked_as_lists(masked_array):
rank = len(masked_array.shape)
slices = tuple([slice(0, masked_array.shape[i]) for i in xrange(rank - 1)])
digits = int(np.ceil(np.log10(masked_array.max())))
f = '%%%dd' % digits
masked_lists = [', '.join([f % block_id if not m else digits * '-'
for block_id, m in zip(row, row.mask)])
for row in masked_array[slices]]
return masked_lists
def pformat(masked_array):
rank = len(masked_array.shape)
masked_lists = masked_as_lists(masked_array)
line = ']\n' + ((rank - 1) * ' ') + '['
return rank * '[' + line.join(masked_lists) + rank * ']'
| import numpy as np
def pformat(masked_array):
rank = len(masked_array.shape)
slices = tuple([slice(0, masked_array.shape[i]) for i in xrange(rank - 1)])
digits = int(np.ceil(np.log10(masked_array.max())))
f = '%%%dd' % digits
line = ']\n' + ((rank - 1) * ' ') + '['
formatted_masked = line.join([', '.join([f % block_id
if not m else digits * '-'
for block_id, m in zip(row,
row.mask)])
for row in masked_array[slices]])
return rank * '[' + formatted_masked + rank * ']'
| mit | Python |
f07204ba55442e13e71c28535714fb0c024aba52 | Add unique_id to ohmconnect (#78479) | w1ll1am23/home-assistant,nkgilley/home-assistant,nkgilley/home-assistant,w1ll1am23/home-assistant,mezz64/home-assistant,mezz64/home-assistant | homeassistant/components/ohmconnect/sensor.py | homeassistant/components/ohmconnect/sensor.py | """Support for OhmConnect."""
from __future__ import annotations
from datetime import timedelta
import logging
import defusedxml.ElementTree as ET
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_ID, CONF_NAME
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "OhmConnect Status"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the OhmConnect sensor."""
name = config.get(CONF_NAME)
ohmid = config.get(CONF_ID)
add_entities([OhmconnectSensor(name, ohmid)], True)
class OhmconnectSensor(SensorEntity):
"""Representation of a OhmConnect sensor."""
def __init__(self, name, ohmid):
"""Initialize the sensor."""
self._name = name
self._ohmid = ohmid
self._data = {}
self._attr_unique_id = ohmid
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def native_value(self):
"""Return the state of the sensor."""
if self._data.get("active") == "True":
return "Active"
return "Inactive"
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {"Address": self._data.get("address"), "ID": self._ohmid}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self) -> None:
"""Get the latest data from OhmConnect."""
try:
url = f"https://login.ohmconnect.com/verify-ohm-hour/{self._ohmid}"
response = requests.get(url, timeout=10)
root = ET.fromstring(response.text)
for child in root:
self._data[child.tag] = child.text
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to host/endpoint: %s", url)
self._data = {}
| """Support for OhmConnect."""
from __future__ import annotations
from datetime import timedelta
import logging
import defusedxml.ElementTree as ET
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_ID, CONF_NAME
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "OhmConnect Status"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the OhmConnect sensor."""
name = config.get(CONF_NAME)
ohmid = config.get(CONF_ID)
add_entities([OhmconnectSensor(name, ohmid)], True)
class OhmconnectSensor(SensorEntity):
"""Representation of a OhmConnect sensor."""
def __init__(self, name, ohmid):
"""Initialize the sensor."""
self._name = name
self._ohmid = ohmid
self._data = {}
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def native_value(self):
"""Return the state of the sensor."""
if self._data.get("active") == "True":
return "Active"
return "Inactive"
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {"Address": self._data.get("address"), "ID": self._ohmid}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self) -> None:
"""Get the latest data from OhmConnect."""
try:
url = f"https://login.ohmconnect.com/verify-ohm-hour/{self._ohmid}"
response = requests.get(url, timeout=10)
root = ET.fromstring(response.text)
for child in root:
self._data[child.tag] = child.text
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to host/endpoint: %s", url)
self._data = {}
| apache-2.0 | Python |
9cc290f6b5f1fd73840d02e2dc390e151ecff2db | Fix typo. | opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind | src/waldur_mastermind/analytics/serializers.py | src/waldur_mastermind/analytics/serializers.py | from datetime import timedelta
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from waldur_core.core.serializers import GenericRelatedField
from waldur_core.structure.models import Customer, Project
class DailyHistoryQuotaSerializer(serializers.Serializer):
scope = GenericRelatedField(related_models=(Project, Customer))
quota_names = serializers.ListField(child=serializers.CharField(), required=False)
start = serializers.DateField(format='%Y-%m-%d', required=False)
end = serializers.DateField(format='%Y-%m-%d', required=False)
def validate(self, attrs):
if 'quota_names' not in attrs:
attrs['quota_names'] = attrs['scope'].get_quotas_names()
if 'end' not in attrs:
attrs['end'] = timezone.now().date()
if 'start' not in attrs:
attrs['start'] = timezone.now().date() - timedelta(days=30)
if attrs['start'] >= attrs['end']:
raise serializers.ValidationError(
_('Invalid period specified. `start` should be lesser than `end`.')
)
return attrs
| from datetime import timedelta
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from waldur_core.core.serializers import GenericRelatedField
from waldur_core.structure.models import Customer, Project
class DailyHistoryQuotaSerializer(serializers.Serializer):
scope = GenericRelatedField(related_models=(Project, Customer))
quota_names = serializers.ListField(child=serializers.CharField(), required=False)
start = serializers.DateField(format='%Y-%m-%d', required=False)
end = serializers.DateField(format='%Y-%m-%d', required=False)
def validate(self, attrs):
if 'quota_names' not in attrs:
attrs['quota_names'] = attrs['scope'].get_quotas_names
if 'end' not in attrs:
attrs['end'] = timezone.now().date()
if 'start' not in attrs:
attrs['start'] = timezone.now().date() - timedelta(days=30)
if attrs['start'] >= attrs['end']:
raise serializers.ValidationError(
_('Invalid period specified. `start` should be lesser than `end`.')
)
return attrs
| mit | Python |
c88cefb1a12f35a8554fc783074883cbc558100a | Use request.access_route for determining origin of request to get through the Openshift servers. | bilderbuchi/ofCodeStyleGuard,bilderbuchi/ofCodeStyleGuard | ofCodeStyleGuard.py | ofCodeStyleGuard.py | #!/usr/bin/python
"""Make sure openFrameworks Pull Requests conform to the code style"""
import styleguard
import logging
import json
import os
from flask import Flask, request
LOGGER = logging.getLogger('webserver')
logging.basicConfig(level=styleguard.cfg['logging_level'])
APP = Flask(__name__)
APP.logger.setLevel(styleguard.cfg['logging_level'])
logging.getLogger('urllib3').setLevel(logging.WARNING)
@APP.route('/', methods=['POST'])
def api_pr():
""" React to a received POST request"""
LOGGER.info("Received POST request.")
LOGGER.debug('Access route: ' + str(request.access_route[:]))
origin = request.access_route[0]
# was using request.remote_addr. access_route could possibly be spoofed
if origin not in styleguard.cfg['github_ips']:
LOGGER.warning("Origin of request UNKNOWN: " + origin)
return 'Error'
# TODO: 'proper' return objects here and below
else:
LOGGER.debug("Origin of request: " + origin)
try:
payload = json.loads(request.form['payload'])
except KeyError:
# crutch: if an invalid request arrives locally, load a json file directly
if origin == '127.0.0.1':
location = os.getenv('OPENSHIFT_REPO_DIR', '')
with open(os.path.join(location, 'sample_payload.json'), 'r') as sample:
payload = json.load(sample)
else:
raise
styleguard.handle_payload(payload)
return 'OK'
def main():
"""Main function"""
# Instantiate a PrHandler, which start waiting on styleguard.MY_QUEUE
LOGGER.debug('In ofCodeStyleGuard main function')
_threaded_pr_worker = styleguard.PrHandler()
APP.run(host='0.0.0.0', port=styleguard.cfg['local_port'])
styleguard.MY_QUEUE.join()
if __name__ == "__main__":
main()
| #!/usr/bin/python
"""Make sure openFrameworks Pull Requests conform to the code style"""
import styleguard
import logging
import json
import os
from flask import Flask, request
LOGGER = logging.getLogger('webserver')
logging.basicConfig(level=styleguard.cfg['logging_level'])
APP = Flask(__name__)
APP.logger.setLevel(styleguard.cfg['logging_level'])
logging.getLogger('urllib3').setLevel(logging.WARNING)
@APP.route('/', methods=['POST'])
def api_pr():
""" React to a received POST request"""
LOGGER.info("Received POST request.")
if request.remote_addr not in styleguard.cfg['github_ips']:
LOGGER.warning("Origin of request UNKNOWN: " + request.remote_addr)
return 'Error'
# TODO: 'proper' return objects here and below
else:
LOGGER.debug("Origin of request: " + request.remote_addr)
try:
payload = json.loads(request.form['payload'])
except KeyError:
# crutch: if an invalid request arrives locally, load a json file directly
if request.remote_addr == '127.0.0.1':
location = os.getenv('OPENSHIFT_REPO_DIR', '')
with open(os.path.join(location, 'sample_payload.json'), 'r') as sample:
payload = json.load(sample)
else:
raise
styleguard.handle_payload(payload)
return 'OK'
def main():
"""Main function"""
# Instantiate a PrHandler, which start waiting on styleguard.MY_QUEUE
LOGGER.debug('In ofCodeStyleGuard main function')
_threaded_pr_worker = styleguard.PrHandler()
APP.run(host='0.0.0.0', port=styleguard.cfg['local_port'])
styleguard.MY_QUEUE.join()
if __name__ == "__main__":
main()
| mit | Python |
b326d43a94058390a559c4c9f55e9cd88dcac747 | Set propper mimetype for image attachment | liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4 | adhocracy4/emails/mixins.py | adhocracy4/emails/mixins.py | from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
if filename.endswith('.png'):
imagetype = 'png'
else:
imagetype = 'svg+xml'
with open(filename, 'rb') as f:
logo = MIMEImage(f.read(), imagetype)
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
| from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
with open(filename, 'rb') as f:
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
| agpl-3.0 | Python |
3afd4e86eb665ddd33538875ba69cf2e7a558b79 | put the pid in the formatter | plockaby/dart,plockaby/dart,plockaby/dart,plockaby/dart | agent/lib/dart/agent/cli.py | agent/lib/dart/agent/cli.py | #!/usr/bin/env python3
"""
This is a supervisord event listener. It does things such as:
* Forward state changes to the distributed data store.
* Forward active and pending configurations to the distributed data store.
* Forward host configuration information to the distributed data store.
* Listen for commands to execute against supervisord.
* Start processes on a schedule.
* Update the scheduler configuration from the distributed data store.
* Update the supervisord configuration from the distributed data store.
* Update the monitor configuration from the distributed data store.
* Monitors process logs and generates events from matching lines.
* Monitors process state and generates events from those changes.
"""
import sys
import argparse
import logging
import logging.handlers
import traceback
from .app import DartAgent
def main():
parser = argparse.ArgumentParser(
prog="dart-agent",
formatter_class=argparse.RawTextHelpFormatter,
description=__doc__,
)
parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="send verbose output to the console")
args = parser.parse_args()
# configure logging
logging.captureWarnings(True)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_handler = logging.handlers.SysLogHandler(address="/dev/log")
log_handler.setFormatter(logging.Formatter("dart-agent[%(process)d]: %(asctime)s %(levelname)-8s - %(message)s"))
logger.addHandler(log_handler)
# change the level and output format if we're going to be verbose
if args.verbose:
logger.setLevel(logging.DEBUG)
log_handler.setFormatter(logging.Formatter("dart-agent[%(process)d]: %(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] - %(message)s"))
# start the main program
try:
options = vars(args)
configuration = dict(verbose=options.pop("verbose"))
runnable = DartAgent(**configuration)
runnable.run(**options)
return 0
except Exception as e:
logger.error(str(e))
logger.debug(traceback.format_exc())
return 1
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/env python3
"""
This is a supervisord event listener. It does things such as:
* Forward state changes to the distributed data store.
* Forward active and pending configurations to the distributed data store.
* Forward host configuration information to the distributed data store.
* Listen for commands to execute against supervisord.
* Start processes on a schedule.
* Update the scheduler configuration from the distributed data store.
* Update the supervisord configuration from the distributed data store.
* Update the monitor configuration from the distributed data store.
* Monitors process logs and generates events from matching lines.
* Monitors process state and generates events from those changes.
"""
import sys
import argparse
import logging
import logging.handlers
import traceback
from .app import DartAgent
def main():
parser = argparse.ArgumentParser(
prog="dart-agent",
formatter_class=argparse.RawTextHelpFormatter,
description=__doc__,
)
parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="send verbose output to the console")
args = parser.parse_args()
# configure logging
logging.captureWarnings(True)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_handler = logging.handlers.SysLogHandler(address="/dev/log")
log_handler.setFormatter(logging.Formatter("dart-agent: %(asctime)s %(levelname)-8s - %(message)s"))
logger.addHandler(log_handler)
# change the level and output format if we're going to be verbose
if args.verbose:
logger.setLevel(logging.DEBUG)
log_handler.setFormatter(logging.Formatter("dart-agent: %(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] - %(message)s"))
# start the main program
try:
options = vars(args)
configuration = dict(verbose=options.pop("verbose"))
runnable = DartAgent(**configuration)
runnable.run(**options)
return 0
except Exception as e:
logger.error(str(e))
logger.debug(traceback.format_exc())
return 1
if __name__ == "__main__":
sys.exit(main())
| artistic-2.0 | Python |
ee95736e1218e3fe7139777775019e38ec6c0edb | Change tests to new syntax. | frac/django-test-utils,ericholscher/django-test-utils,acdha/django-test-utils,acdha/django-test-utils,frac/django-test-utils,ericholscher/django-test-utils | test_project/test_app/tests/testmaker_tests.py | test_project/test_app/tests/testmaker_tests.py | """
This file is to test testmaker. It will run over the polls app and with the crawler and with test maker outputting things. Hopefully this will provide a sane way to test testmaker.
"""
from django.test.testcases import TestCase
from django.template import Context, Template
from django.contrib.auth.models import User
from test_utils.crawler.base import Crawler
from test_utils.testmaker import Testmaker
from django.conf import settings
import logging, os, sys, re
class CrawlerTests(TestCase):
"""
Tests to test the Crawler API
"""
urls = "polls.urls"
fixtures = ['polls_testmaker.json']
def test_basic_crawling(self):
conf_urls = {}
verbosity = 1
c = Crawler('/', conf_urls=conf_urls, verbosity=verbosity)
c.run()
self.assertEqual(c.crawled, {'/': True, u'/1': True, u'/2': True})
class TestMakerTests(TestCase):
"""
Tests to test basic testmaker functionality.
"""
urls = "polls.urls"
fixtures = ['polls_testmaker.json']
def setUp(self):
self.tm = Testmaker()
self.tm.setup_logging('test_file', 'serialize_file')
def tearDown(self):
#Teardown logging somehow?
os.remove('test_file')
os.remove('serialize_file')
def test_basic_testmaker(self):
settings.MIDDLEWARE_CLASSES += ('test_utils.testmaker.middleware.testmaker.TestMakerMiddleware',)
self.client.get('/')
logs = open('test_file')
output = logs.read()
self.assertTrue(output.find('[<Poll: What\'s up?>, <Poll: Test poll>]') != -1)
| """
This file is to test testmaker. It will run over the polls app and with the crawler and with test maker outputting things. Hopefully this will provide a sane way to test testmaker.
"""
from django.test.testcases import TestCase
from django.template import Context, Template
from django.contrib.auth.models import User
from test_utils.crawler.base import Crawler
from test_utils.testmaker import setup_logging
from django.conf import settings
import logging, os, sys, re
class CrawlerTests(TestCase):
"""
Tests to test the Crawler API
"""
urls = "polls.urls"
fixtures = ['polls_testmaker.json']
def test_basic_crawling(self):
conf_urls = {}
verbosity = 1
c = Crawler('/', conf_urls=conf_urls, verbosity=verbosity)
c.run()
self.assertEqual(c.crawled, {'/': True, u'/1': True, u'/2': True})
class TestMakerTests(TestCase):
"""
Tests to test basic testmaker functionality.
"""
urls = "polls.urls"
fixtures = ['polls_testmaker.json']
def setUp(self):
setup_logging('test_file', 'serialize_file')
def tearDown(self):
#Teardown logging somehow?
os.remove('test_file')
os.remove('serialize_file')
def test_basic_testmaker(self):
settings.MIDDLEWARE_CLASSES += ('test_utils.testmaker.middleware.testmaker.TestMakerMiddleware',)
self.client.get('/')
logs = open('test_file')
output = logs.read()
self.assertTrue(output.find('[<Poll: What\'s up?>, <Poll: Test poll>]') != -1)
| mit | Python |
876af95676eeb75ce2077e3fc94078131153df10 | Fix dynprefs import | c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend | ctfbackend/backend/templatetags/backend_extras.py | ctfbackend/backend/templatetags/backend_extras.py | from django import template
from dynamic_preferences.registries import global_preferences_registry
register = template.Library()
@register.simple_tag
def navactive(request, urls):
if request.resolver_match.url_name in urls.split():
return "active"
return ""
@register.simple_tag
def dynprefs(prefname):
prefs = global_preferences_registry.manager()
return prefs[prefname]
@register.filter
def splitlist(l, n):
return list((l[i:i+n] for i in range(0, len(l), n)))
@register.filter
def catfilter(l, cat):
return [i for i in l if cat in i.categories.all()]
@register.filter
def isequal(i, comp):
return i == comp
@register.filter
def difficulty(i):
return ('success', 'info', 'warning', 'danger')[i-1]
@register.filter
def difficulty_pgbar(i):
return ('success', 'primary', 'warning', 'danger')[i-1]
| from django import template
from dynamic_preferences.registries import \
global_preferences_registry as dynprefs
register = template.Library()
@register.simple_tag
def navactive(request, urls):
if request.resolver_match.url_name in urls.split():
return "active"
return ""
@register.simple_tag
def dynprefs(prefname):
prefs = global_preferences_registry.manager()
return prefs[prefname]
@register.filter
def splitlist(l, n):
return list((l[i:i+n] for i in range(0, len(l), n)))
@register.filter
def catfilter(l, cat):
return [i for i in l if cat in i.categories.all()]
@register.filter
def isequal(i, comp):
return i == comp
@register.filter
def difficulty(i):
return ('success', 'info', 'warning', 'danger')[i-1]
@register.filter
def difficulty_pgbar(i):
return ('success', 'primary', 'warning', 'danger')[i-1]
| agpl-3.0 | Python |
4cecd527b62b0258c44398edf2e666adaae2db7e | Upgrade to v2.7.13 | biolink/ontobio,biolink/ontobio | ontobio/__init__.py | ontobio/__init__.py | from __future__ import absolute_import
__version__ = '2.7.13'
from .ontol_factory import OntologyFactory
from .ontol import Ontology, Synonym, TextDefinition
from .assoc_factory import AssociationSetFactory
from .io.ontol_renderers import GraphRenderer
import logging
import logging.handlers
from logging.config import dictConfig
logging.getLogger("ontobio")
DEFAULT_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
}
def configure_logging():
"""
Initialize logging defaults for Project.
:param logfile_path: logfile used to the logfile
:type logfile_path: string
This function does:
- Assign INFO and DEBUG level to logger file handler and console handler
"""
dictConfig(DEFAULT_LOGGING)
default_formatter = logging.Formatter(
"%(asctime)s [%(levelname)s] [PID:%(process)d TID:%(thread)d] [%(filename)s:%(lineno)s in `%(funcName)s`] %(message)s",
"%Y-%m-%d %H:%M:%S")
# file_handler = logging.handlers.RotatingFileHandler(logfile_path, maxBytes=10485760,backupCount=300, encoding='utf-8')
# file_handler.setLevel(logging.INFO)
if len(logging.getLogger().handlers) > 0:
for h in logging.getLogger().handlers:
if isinstance(h, logging.StreamHandler):
# Then we found a logger to the terminal
h.setLevel(logging.DEBUG)
h.setFormatter(default_formatter)
else:
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
console_handler.setFormatter(default_formatter)
logging.root.addHandler(console_handler)
logging.root.setLevel(logging.WARNING)
configure_logging()
| from __future__ import absolute_import
__version__ = '2.7.12'
from .ontol_factory import OntologyFactory
from .ontol import Ontology, Synonym, TextDefinition
from .assoc_factory import AssociationSetFactory
from .io.ontol_renderers import GraphRenderer
import logging
import logging.handlers
from logging.config import dictConfig
logging.getLogger("ontobio")
DEFAULT_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
}
def configure_logging():
"""
Initialize logging defaults for Project.
:param logfile_path: logfile used to the logfile
:type logfile_path: string
This function does:
- Assign INFO and DEBUG level to logger file handler and console handler
"""
dictConfig(DEFAULT_LOGGING)
default_formatter = logging.Formatter(
"%(asctime)s [%(levelname)s] [PID:%(process)d TID:%(thread)d] [%(filename)s:%(lineno)s in `%(funcName)s`] %(message)s",
"%Y-%m-%d %H:%M:%S")
# file_handler = logging.handlers.RotatingFileHandler(logfile_path, maxBytes=10485760,backupCount=300, encoding='utf-8')
# file_handler.setLevel(logging.INFO)
if len(logging.getLogger().handlers) > 0:
for h in logging.getLogger().handlers:
if isinstance(h, logging.StreamHandler):
# Then we found a logger to the terminal
h.setLevel(logging.DEBUG)
h.setFormatter(default_formatter)
else:
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
console_handler.setFormatter(default_formatter)
logging.root.addHandler(console_handler)
logging.root.setLevel(logging.WARNING)
configure_logging()
| bsd-3-clause | Python |
bb7c9e117a17e31afc103192c7cd13b68c7232c3 | Fix dependency_manager path hacking. | catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult | dependency_manager/dependency_manager/__init__.py | dependency_manager/dependency_manager/__init__.py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
CATAPULT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__))))
CATAPULT_THIRD_PARTY_PATH = os.path.join(CATAPULT_PATH, 'third_party')
DEPENDENCY_MANAGER_PATH = os.path.join(CATAPULT_PATH, 'dependency_manager')
def _AddDirToPythonPath(*path_parts):
path = os.path.abspath(os.path.join(*path_parts))
if os.path.isdir(path) and path not in sys.path:
sys.path.insert(0, path)
_AddDirToPythonPath(CATAPULT_PATH, 'common', 'py_utils')
_AddDirToPythonPath(CATAPULT_THIRD_PARTY_PATH, 'mock')
_AddDirToPythonPath(CATAPULT_THIRD_PARTY_PATH, 'six')
_AddDirToPythonPath(CATAPULT_THIRD_PARTY_PATH, 'pyfakefs')
_AddDirToPythonPath(DEPENDENCY_MANAGER_PATH)
# pylint: disable=unused-import,wrong-import-position
from .archive_info import ArchiveInfo
from .base_config import BaseConfig
from .cloud_storage_info import CloudStorageInfo
from .dependency_info import DependencyInfo
from .exceptions import CloudStorageError
from .exceptions import CloudStorageUploadConflictError
from .exceptions import EmptyConfigError
from .exceptions import FileNotFoundError
from .exceptions import NoPathFoundError
from .exceptions import ReadWriteError
from .exceptions import UnsupportedConfigFormatError
from .local_path_info import LocalPathInfo
from .manager import DependencyManager
# pylint: enable=unused-import
| # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
CATAPULT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__))))
CATAPULT_THIRD_PARTY_PATH = os.path.join(CATAPULT_PATH, 'third_party')
DEPENDENCY_MANAGER_PATH = os.path.join(CATAPULT_PATH, 'dependency_manager')
def _AddDirToPythonPath(*path_parts):
path = os.path.abspath(os.path.join(*path_parts))
if os.path.isdir(path) and path not in sys.path:
sys.path.append(path)
_AddDirToPythonPath(CATAPULT_PATH, 'common', 'py_utils')
_AddDirToPythonPath(CATAPULT_THIRD_PARTY_PATH, 'mock')
_AddDirToPythonPath(CATAPULT_THIRD_PARTY_PATH, 'six')
_AddDirToPythonPath(CATAPULT_THIRD_PARTY_PATH, 'pyfakefs')
_AddDirToPythonPath(DEPENDENCY_MANAGER_PATH)
# pylint: disable=unused-import,wrong-import-position
from .archive_info import ArchiveInfo
from .base_config import BaseConfig
from .cloud_storage_info import CloudStorageInfo
from .dependency_info import DependencyInfo
from .exceptions import CloudStorageError
from .exceptions import CloudStorageUploadConflictError
from .exceptions import EmptyConfigError
from .exceptions import FileNotFoundError
from .exceptions import NoPathFoundError
from .exceptions import ReadWriteError
from .exceptions import UnsupportedConfigFormatError
from .local_path_info import LocalPathInfo
from .manager import DependencyManager
# pylint: enable=unused-import
| bsd-3-clause | Python |
4163401029515ba49159356fd293b8472e2d1bff | add admin for message | elnappo/Baulicht,elnappo/Baulicht,elnappo/Baulicht,elnappo/Baulicht | Webinterface/main/admin.py | Webinterface/main/admin.py | from django.contrib import admin
from main.models import Message
def to_dbus(modeladmin, request, queryset):
for message in queryset:
message.send_to_dbus()
to_dbus.short_description = "Mark selected messages as published"
def remove_from_dbus(modeladmin, request, queryset):
for message in queryset:
message.remove_from_dbus()
remove_from_dbus.short_description = "Mark selected messages as published"
class MessageAdmin(admin.ModelAdmin):
list_display = ("name", "text", "accepted", "is_active")
list_display_links = ("name", "text")
list_filter = ("name", "email", "accepted", "is_active", "created", )
# Register your models here.
admin.site.register(Message, MessageAdmin)
| from django.contrib import admin
from main.models import Message
# Register your models here.
admin.site.register(Message)
| mit | Python |
f4c83a4dd5e8f774e976ea72fc9c4369d4e96d39 | Add exception handling for when Pi disassociates | jasonsbrooks/ysniff-software,jasonsbrooks/ysniff-software | ysniff.py | ysniff.py | #!/usr/bin/env python
import boto.rds
import fileinput
import sys
import os
from subprocess import call
mac_index = 12
time_index = 1
start_t_us = 0
start_u_us = 0
MAC_LEN = 17
SAMPLE_PERIOD = 30 # Seconds.
PUSH_TO_AWS_PERIOD = 300 # Seconds.
maclist = set()
buffer = {}
try:
domain=conn.get_domain('tmp_ysniff')
conn=boto.connect_sdb()
except:
reconnect()
# TODO: Upload buffer to AWS every collection period.
for line in fileinput.input():
splitline = line.split(" ")
if mac_index < len(splitline):
mac = splitline[mac_index]
if mac == "DA:Broadcast":
mac = splitline[mac_index+1]
ts = int(splitline[time_index][:-2])
mac = mac[len(mac)-MAC_LEN:]
# Make list of timestamps for each mac
if mac not in buffer:
buffer[mac]=[]
# Only pair timestamp to mac address once
if start_t_us not in buffer[mac]:
buffer[mac].append(start_t_us)
# Update start_t_us every SAMPLE_PERIOD
if start_t_us is 0 or ts - start_t_us > (SAMPLE_PERIOD * 1000000):
start_t_us = ts
# upload buffer to AWS every PUSH_TO_AWS_PERIOD
if start_u_us is 0:
start_u_us = ts
elif ts - start_u_us > (PUSH_TO_AWS_PERIOD * 1000000):
for key in buffer:
try:
item = domain.get_item(key)
except:
reconnect()
for timestamp in buffer[key]:
item[timestamp] = os.environ['PI_LOCATION']
try:
item.save()
except:
reconnect()
buffer = {}
start_t_us = ts
#print buffer, len(buffer)
def reconnect():
call(["sudo","iwconfig","wlan0","essid","YaleGuest"])
call(["sleep","5"])
call(["curl", "--data", "\"email=YaleGuest@yale.edu&cmd=cmd\"", "http://10.160.252.249/auth/index.html/u"])
| #!/usr/bin/env python
import boto.rds
import fileinput
import sys
import os
mac_index = 12
time_index = 1
start_t_us = 0
start_u_us = 0
MAC_LEN = 17
SAMPLE_PERIOD = 30 # Seconds.
PUSH_TO_AWS_PERIOD = 300 # Seconds.
maclist = set()
buffer = {}
conn=boto.connect_sdb()
domain=conn.get_domain('tmp_ysniff')
# TODO: Upload buffer to AWS every collection period.
for line in fileinput.input():
splitline = line.split(" ")
if mac_index < len(splitline):
mac = splitline[mac_index]
if mac == "DA:Broadcast":
mac = splitline[mac_index+1]
ts = int(splitline[time_index][:-2])
mac = mac[len(mac)-MAC_LEN:]
# Make list of timestamps for each mac
if mac not in buffer:
buffer[mac]=[]
# Only pair timestamp to mac address once
if start_t_us not in buffer[mac]:
buffer[mac].append(start_t_us)
# Update start_t_us every SAMPLE_PERIOD
if start_t_us is 0 or ts - start_t_us > (SAMPLE_PERIOD * 1000000):
start_t_us = ts
# upload buffer to AWS every PUSH_TO_AWS_PERIOD
if start_u_us is 0:
start_u_us = ts
elif ts - start_u_us > (PUSH_TO_AWS_PERIOD * 1000000):
for key in buffer:
item = domain.get_item(key)
for timestamp in buffer[key]:
item[timestamp] = os.environ['PI_LOCATION']
item.save()
buffer = {}
start_t_us = ts
#print buffer, len(buffer)
| mit | Python |
3c81d6708730f237d8b2e1462e80eba6bd5411f3 | Add logout function. | supasate/PythonZabbixApi | zabbix.py | zabbix.py | import requests
class ZabbixError(Exception):
pass
class Api(object):
def __init__(self, server='http://localhost/zabbix'):
self.session = requests.Session()
self.session.headers.update({
'Content-Type': 'application/json'
})
self.url = server + '/api_jsonrpc.php'
self.auth = ''
self.id = 0
def login(self, user='', password=''):
json_response = self.do_request('user.login', {'user': user, 'password': password})
self.auth = json_response['result']
print 'Log in successful. Welcome %s.' % (user)
def logout(self):
json_response = self.do_request('user.logout')
if (json_response['result'] == True):
print 'Logged out. Good bye'
else:
print 'Log out failed. You might already log out.'
def do_request(self, method, params=None):
json_payload = {
'jsonrpc': '2.0',
'method': method,
'params': params or {},
'auth': self.auth,
'id': self.id,
}
self.id += 1
response = self.session.post(self.url, data = json.dumps(json_payload))
if response.status_code != 200:
raise ZabbixError("HTTP ERROR %S: %S" % (response.status, response.reason))
if response.text == '':
raise ZabbixError("Received empty response")
return response.json()
| import requests
class ZabbixError(Exception):
pass
class Api(object):
def __init__(self, server='http://localhost/zabbix'):
self.session = requests.Session()
self.session.headers.update({
'Content-Type': 'application/json'
})
self.url = server + '/api_jsonrpc.php'
self.auth = ''
self.id = 0
def login(self, user='', password=''):
json_response = self.do_request('user.login', {'user': user, 'password': password})
self.auth = json_response['result']
print 'Log in successful. Welcome %s.' % (user)
def do_request(self, method, params=None):
json_payload = {
'jsonrpc': '2.0',
'method': method,
'params': params or {},
'auth': self.auth,
'id': self.id,
}
self.id += 1
response = self.session.post(self.url, data = json.dumps(json_payload))
if response.status_code != 200:
raise ZabbixError("HTTP ERROR %S: %S" % (response.status, response.reason))
if response.text == '':
raise ZabbixError("Received empty response")
return response.json()
| apache-2.0 | Python |
331be7f72afe69403208f9e936bb8ff2483eefa8 | Revert accidental revision to local.py | texastribune/emissions-database,texastribune/emissions-database,texastribune/emissions-database,texastribune/emissions-database | emission_events/emission_events/settings/local.py | emission_events/emission_events/settings/local.py | ##################
# LOCAL SETTINGS #
##################
import os
from .base import *
#######################
# DEBUG CONFIGURATION #
#######################
# https://docs.djangoproject.com/en/1.7/ref/settings/#debug
DEBUG = True
# https://docs.djangoproject.com/en/1.7/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
##########################
# DATABASE CONFIGURATION #
##########################
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config(
default=os.environ.get('EMISSIONS_DATABASE')
)
}
#######################
# CACHE CONFIGURATION #
#######################
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
######################################
# DJANGO DEBUG TOOLBAR CONFIGURATION #
######################################
INSTALLED_APPS += (
'debug_toolbar',
)
| ##################
# LOCAL SETTINGS #
##################
import os
from .base import *
#######################
# DEBUG CONFIGURATION #
#######################
# https://docs.djangoproject.com/en/1.7/ref/settings/#debug
DEBUG = True
# https://docs.djangoproject.com/en/1.7/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
##########################
# DATABASE CONFIGURATION #
##########################
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'emission_events',
'USER': 'django',
'PASSWORD': 'django',
'HOST': '0.0.0.0',
'PORT': 5432,
}
}
#######################
# CACHE CONFIGURATION #
#######################
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
######################################
# DJANGO DEBUG TOOLBAR CONFIGURATION #
######################################
INSTALLED_APPS += (
'debug_toolbar',
)
| mit | Python |
279215dae02b455bee0244cd7dcd26e3ce12f39c | fix type and add functool.wraps | johnbywater/eventsourcing,johnbywater/eventsourcing | eventsourcing/domain/services/event_subscriber.py | eventsourcing/domain/services/event_subscriber.py | from functools import wraps
from eventsourcing.domain.model.events import subscribe
def subscribe_to(event_class):
""" Decorator for making a custom event handler function subscribe to a certain event type
Args:
event_class: DomainEvent class or its child classes that the handler function should subscribe to
Example usage:
# this example shows a custom handler that reacts to Todo.Created event and saves a projection of a Todo model object
@subscribe_to(Todo.Created)
def new_todo_projection(event):
todo = TodoProjection(id=event.entity_id, title=event.title)
todo.save()
"""
def create_type_predicate():
def event_type_predicate(event):
return isinstance(event, event_class)
return event_type_predicate
def wrap(handler_func):
subscribe(create_type_predicate(), handler_func)
@wraps(handler_func)
def handler_func_wrapper(*args, **kwargs):
handler_func(*args, **kwargs)
return handler_func_wrapper
return wrap | from eventsourcing.domain.model.events import subscribe
def subscribe_to(event_class):
""" Annotation for wrapping up a custom event handler function and subscribe to a certain event type
Args:
event_class: DomainEvent class or its child classes that the handler function should subscribe to
Example usage:
# this example shows a custom handler that reacts to Todo.Created event and saves a projection of a Todo model object
@subscribe_to(Todo.Created)
def new_todo_projection(event):
todo = TodoProjection(id=event.entity_id, title=event.title)
todo.save()
"""
def create_type_predicate():
def event_type_predicate(event):
return isinstance(event, event_class)
return event_type_predicate
def wrap(handler_func):
subscribe(create_type_predicate(), handler_func)
def handler_func_wrapper(*args, **kwargs):
handler_func(*args, **kwargs)
return handler_func_wrapper
return wrap | bsd-3-clause | Python |
d62ec44e98aab1f58084b21d4c9b4c61ffd6fe36 | Add tests to cover using the JWTAuth class using ES256 in addition to RS256. | atlassian/asap-authentication-python | atlassian_jwt_auth/contrib/tests/test_requests.py | atlassian_jwt_auth/contrib/tests/test_requests.py | import unittest
import jwt
from requests import Request
import atlassian_jwt_auth
from atlassian_jwt_auth.tests import utils
from atlassian_jwt_auth.contrib.requests import JWTAuth, create_jwt_auth
class BaseRequestsTest(object):
""" tests for the contrib.requests.JWTAuth class """
def setUp(self):
self._private_key_pem = self.get_new_private_key_in_pem_format()
self._public_key_pem = utils.get_public_key_pem_for_private_key_pem(
self._private_key_pem)
def assert_authorization_header_is_valid(self, request):
""" asserts that the given request contains a valid Authorization
header.
"""
auth_header = request.headers['Authorization']
bearer = auth_header.split(b' ')[1]
# Decode the JWT (verifying the signature and aud match)
# an exception is thrown if this fails
jwt.decode(bearer, self._public_key_pem.decode(), audience='audience')
def test_JWTAuth_make_authenticated_request(self):
"""Verify a valid Authorization header is added by JWTAuth"""
jwt_auth_signer = atlassian_jwt_auth.create_signer(
'issuer',
'issuer/key',
self._private_key_pem.decode(),
algorithm=self.algorithm)
auth = JWTAuth(jwt_auth_signer, 'audience')
req = auth(Request())
self.assert_authorization_header_is_valid(req)
class RequestsRS256Test(BaseRequestsTest,
utils.RS256KeyTestMixin,
unittest.TestCase):
def test_create_jwt_auth(self):
"""Verify a valid Authorization header is added by JWTAuth"""
auth = create_jwt_auth('issuer', 'issuer/key',
self._private_key_pem.decode(), 'audience')
req = auth(Request())
self.assert_authorization_header_is_valid(req)
class RequestsES256Test(BaseRequestsTest,
utils.ES256KeyTestMixin,
unittest.TestCase):
pass
| import unittest
import jwt
from requests import Request
from atlassian_jwt_auth.tests import utils
from atlassian_jwt_auth.contrib.requests import create_jwt_auth
class RequestsTest(unittest.TestCase, utils.RS256KeyTestMixin):
""" tests for the contrib.requests.JWTAuth class """
def setUp(self):
self._private_key_pem = self.get_new_private_key_in_pem_format()
self._public_key_pem = utils.get_public_key_pem_for_private_key_pem(
self._private_key_pem)
def test_JWTAuth_make_authenticated_request(self):
"""Verify a valid Authorization header is added by JWTAuth"""
auth = create_jwt_auth('issuer', 'issuer/key',
self._private_key_pem.decode(), 'audience')
req = auth(Request())
auth_header = req.headers['Authorization']
bearer = auth_header.split(b' ')[1]
# Decode the JWT (verifying the signature and aud match)
# an exception is thrown if this fails
jwt.decode(bearer, self._public_key_pem.decode(), audience='audience')
| mit | Python |
d587fbbd962e0682c08c5b9ee49891efafc675a1 | add basic parameter filtering (requires url and label) | noxan/django-bootstrap-navtags | bootstrapnavtags/templatetags/bootstrapnavtags.py | bootstrapnavtags/templatetags/bootstrapnavtags.py | from django import template
from django.template.base import TemplateSyntaxError
register = template.Library()
@register.tag
def navitem(parser, token):
bits = token.split_contents()
template_tag = bits[0]
if len(bits) < 3:
raise template.TemplateSyntaxError, "%r tag requires at least two argument" % template_tag
try:
label = parser.compile_filter(bits[1])
except TemplateSyntaxError as exc:
exc.args = (exc.args[0] + ". The label argument has be be in single quotes.",)
raise
try:
viewname = parser.compile_filter(bits[2])
except TemplateSyntaxError as exc:
exc.args = (exc.args[0] + ". The url argument has be be in single quotes, like the url tag in Django 1.5.",)
raise
bits = bits[3:]
| from django import template
register = template.Library()
@register.tag
def navitem(parser, token):
pass
| bsd-3-clause | Python |
4df0318354119b0e7531352b5dbce2a5bb12da01 | add some quotations and ready for sql | yangshaoshun/OMOOC2py,yangshaoshun/OMOOC2py | _src/om2py4w/4wex0/main.py | _src/om2py4w/4wex0/main.py | # -*- coding: utf-8 -*-
from bottle import *
import sys
from jinja2 import Template
reload(sys)
sys.setdefaultencoding( "utf-8" )
# 打开历史文件
def print_log():
open_file = open("mydaily.log")
log = open_file.read()
return log
# 向历史文件写入数据
def new(txt_add):
target = open("mydaily.log", 'a')
target.write(txt_add + '\n')
target.close()
@route('/')
@route('/text') #text 页面
def input(): #把text页面与 input 函数绑定,每次打开text 页面,得到 input 函数的返回值
log = print_log()
return template('in.tpl', history = log)
@route('/text', method ='POST')
def do_input():
data = request.forms.get('indata')
new(data)
log = print_log()
return template('in.tpl', history = log)
@route('/log')
def history():
log = print_log()
# 使用 jinja2 的模板
# template = Template('{{history}}')
# return template.render( history = log )
# 使用 bottle 默认的模板
# return template('{{history}}', history = log)
return template('boot.tpl', history = log)
if __name__ == '__main__':
debug(True)
run(host='localhost', port=8080, reloader=True) | # -*- coding: utf-8 -*-
from bottle import *
import sys
from jinja2 import Template
reload(sys)
sys.setdefaultencoding( "utf-8" )
def print_log():
open_file = open("mydaily.log")
log = open_file.read()
return log
def new(txt_add):
target = open("mydaily.log", 'a')
target.write(txt_add + '\n')
target.close()
@route('/')
@route('/text') #text 页面
def input(): #把text页面与 input 函数绑定,每次打开text 页面,得到 input 函数的返回值
log = print_log()
return template('in.tpl', history = log)
@route('/text', method ='POST')
def do_input():
data = request.forms.get('indata')
new(data)
log = print_log()
return template('in.tpl', history = log)
@route('/log')
def history():
log = print_log()
# 使用 jinja2 的模板
# template = Template('{{history}}')
# return template.render( history = log )
# 使用 bottle 默认的模板
# return template('{{history}}', history = log)
return template('boot.tpl', history = log)
if __name__ == '__main__':
debug(True)
run(host='localhost', port=8080, reloader=True) | mit | Python |
07a04f63da897ae687fd90039d379482a13372e2 | Standardize module names on leading capital letters | ElementalAlchemist/txircd,Heufneutje/txircd | txircd/modules/rfc/response_error.py | txircd/modules/rfc/response_error.py | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ErrorResponse(ModuleData):
implements(IPlugin, IModuleData)
name = "ErrorResponse"
core = True
def actions(self):
return [ ("quit", 10, self.sendError) ]
def sendError(self, user, reason):
user.sendMessage("ERROR", ":Closing Link: {}@{} [{}]".format(user.ident, user.host, reason), to=None, prefix=None)
errorResponse = ErrorResponse() | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ErrorResponse(ModuleData):
implements(IPlugin, IModuleData)
name = "errorResponse"
core = True
def actions(self):
return [ ("quit", 10, self.sendError) ]
def sendError(self, user, reason):
user.sendMessage("ERROR", ":Closing Link: {}@{} [{}]".format(user.ident, user.host, reason), to=None, prefix=None)
errorResponse = ErrorResponse() | bsd-3-clause | Python |
011a546db976f444e76dddc172b213b0a3d5c974 | Clean up the SampleFramework class | rmccue/depthmapper | depthmapper/SampleFramework.py | depthmapper/SampleFramework.py | """
SampleFramework.py - Application framework from the PyOgre demos
Requires PyOgre from http://python-ogre.org/
See README.md for installation requirements
This code is in the Public Domain
"""
import ogre.renderer.OGRE as ogre
# We don't use the verisons but it's nice to set them up...
ogre.OgreVersion = ogre.GetOgreVersion()
ogre.OgreVersionString = ogre.OgreVersion[0] + ogre.OgreVersion[1] \
+ ogre.OgreVersion[2]
ogre.PythonOgreVersion = ogre.GetPythonOgreVersion()
from ogre.renderer.OGRE.sf_OIS import *
import logging
logger = None
def setupLogging(logfilename="demo.log", logidentifier='PythonOgre.Demo'):
global logger
# set up logging to file
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M',
filename=logfilename,
filemode='w')
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
# tell the handler to use this format
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
logger = logging.getLogger(logidentifier)
def info(msg):
if logger:
logger.info(msg)
def error(msg):
if logger:
logger.error(msg)
def debug(msg):
if logger:
logger.debug(msg)
def warning(msg):
if logger:
logger.warning(msg)
| """
SampleFramework.py - Application framework from the PyOgre demos
Requires PyOgre from http://python-ogre.org/
See README.md for installation requirements
This code is in the Public Domain
"""
import ogre.renderer.OGRE as ogre
# We don't use the verisons but it's nice to set them up...
ogre.OgreVersion = ogre.GetOgreVersion()
ogre.OgreVersionString = ogre.OgreVersion[0] + ogre.OgreVersion[1] +
ogre.OgreVersion[2]
ogre.PythonOgreVersion = ogre.GetPythonOgreVersion()
from ogre.renderer.OGRE.sf_OIS import *
import logging
logger=None
def setupLogging (logfilename="demo.log", logidentifier='PythonOgre.Demo'):
global logger
# set up logging to file
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M',
filename=logfilename,
filemode='w')
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
# tell the handler to use this format
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
logger = logging.getLogger(logidentifier)
def info ( msg ):
if logger:
logger.info( msg )
def error ( msg ):
if logger:
logger.error( msg )
def debug ( msg ):
if logger:
logger.debug( msg )
def warning ( msg ):
if logger:
logger.warning( msg )
| isc | Python |
3041fb18421f7521d0c11405ff33ea375f45d5dc | Make modules uninstallable | OCA/l10n-switzerland,OCA/l10n-switzerland | l10n_ch_states/__manifest__.py | l10n_ch_states/__manifest__.py | # Copyright 2015 Mathias Neef copadoMEDIA UG
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Switzerland Country States',
'category': 'Localisation',
'summary': '',
'version': '11.0.1.0.0',
'author': 'copado MEDIA UG, Odoo Community Association (OCA)',
'website': 'http://www.copado.de',
'license': 'AGPL-3',
'depends': [
'base',
],
'data': ['data/res_country_states.xml'],
'demo': [],
'installable': False,
'application': False,
}
| # Copyright 2015 Mathias Neef copadoMEDIA UG
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Switzerland Country States',
'category': 'Localisation',
'summary': '',
'version': '11.0.1.0.0',
'author': 'copado MEDIA UG, Odoo Community Association (OCA)',
'website': 'http://www.copado.de',
'license': 'AGPL-3',
'depends': [
'base',
],
'data': ['data/res_country_states.xml'],
'demo': [],
'installable': True,
'application': False,
}
| agpl-3.0 | Python |
29358a1d0ccb4707b0b8f19e562ea7e199a68605 | Add OCA as author of OCA addons | factorlibre/l10n-spain,factorlibre/l10n-spain,factorlibre/l10n-spain | l10n_es_partner/__openerp__.py | l10n_es_partner/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2008 Spanish Localization Team
# Copyright (c) 2009 Zikzakmedia S.L. (http://zikzakmedia.com)
# Jordi Esteve <jesteve@zikzakmedia.com>
# Copyright (c) 2013 Acysos S.L. (http://acysos.com)
# Ignacio Ibeas <ignacio@acysos.com>
# Copyright (c) 2013 Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com)
# Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
# Copyright (c) 2015 Incaser (http://www.incaser.es)
# Sergio Teruel <sergio@incaser.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Adaptación de los clientes, proveedores y bancos para España",
"version": "1.3",
"author": "Spanish localization team,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/l10n-spain",
"category": "Localisation/Europe",
"license": "AGPL-3",
"depends": [
"base",
"base_iban",
"base_vat",
"l10n_es_toponyms",
],
"data": [
"views/l10n_es_partner_view.xml",
"wizard/l10n_es_partner_wizard.xml",
],
"test": [
"test/l10n_es_partner.yml",
],
"installable": True,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2008 Spanish Localization Team
# Copyright (c) 2009 Zikzakmedia S.L. (http://zikzakmedia.com)
# Jordi Esteve <jesteve@zikzakmedia.com>
# Copyright (c) 2013 Acysos S.L. (http://acysos.com)
# Ignacio Ibeas <ignacio@acysos.com>
# Copyright (c) 2013 Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com)
# Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
# Copyright (c) 2015 Incaser (http://www.incaser.es)
# Sergio Teruel <sergio@incaser.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Adaptación de los clientes, proveedores y bancos para España",
"version": "1.3",
"author": "Spanish localization team",
"website": "https://github.com/OCA/l10n-spain",
"category": "Localisation/Europe",
"license": "AGPL-3",
"depends": [
"base",
"base_iban",
"base_vat",
"l10n_es_toponyms",
],
"data": [
"views/l10n_es_partner_view.xml",
"wizard/l10n_es_partner_wizard.xml",
],
"test": [
"test/l10n_es_partner.yml",
],
"installable": True,
}
| agpl-3.0 | Python |
cb7aeb60fcff7f8fa6ac9e12282bf7dcd71617d8 | Fix ceilometerclient mocks for 2.8.0 release | openstack/heat,noironetworks/heat,noironetworks/heat,openstack/heat | heat/tests/clients/test_ceilometer_client.py | heat/tests/clients/test_ceilometer_client.py | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometerclient.v2 import client as cc
from heat.tests import common
from heat.tests import utils
class CeilometerClientPluginTest(common.HeatTestCase):
def test_create(self):
self.patchobject(cc.Client, '_get_redirect_client')
context = utils.dummy_context()
plugin = context.clients.client_plugin('ceilometer')
client = plugin.client()
self.assertIsNotNone(client.alarms)
| #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometerclient.v2 import client as cc
from heat.tests import common
from heat.tests import utils
class CeilometerClientPluginTest(common.HeatTestCase):
def test_create(self):
self.patchobject(cc.Client, '_get_alarm_client')
context = utils.dummy_context()
plugin = context.clients.client_plugin('ceilometer')
client = plugin.client()
self.assertIsNotNone(client.alarms)
| apache-2.0 | Python |
3d92c98ad5f75d7d19972af96939b28e198499ea | remove whitespaces | rtavenar/tslearn | tslearn/docs/examples/plot_serialize_models.py | tslearn/docs/examples/plot_serialize_models.py | # -*- coding: utf-8 -*-
"""
Model Persistence
=================
Many tslearn models can be saved to disk and used for predictions
at a later time. This can be particularly useful when a model takes
a long time to train.
**Available formats:** hdf5, json, pickle
Save a model to disk::
model.to_<format>
Load a model from disk::
model.from_<format>
**Basic usage**
.. code-block:: python
# Instantiate a model
model = ModelClass(<hyper-parameters>)
# Train the model
model.fit(X_train)
# Save the model to disk
model.to_hdf5('./trained_model.hdf5')
# Load model from disk
model.from_hdf5('./trained_mode.hdf5')
# Make predictions
y = model.predict(X_test)
.. note::
For the following models the training data are saved to disk and
may result in a large model file if the trainig dataset is large:
``KNeighborsTimeSeries``, ``KNeighborsTimeSeriesClassifier``, and
``GlobalAlignmentKernelKMeans``
"""
# Example using KShape
import numpy
import matplotlib.pyplot as plt
from tslearn.clustering import KShape
from tslearn.datasets import CachedDatasets
from tslearn.preprocessing import TimeSeriesScalerMeanVariance
seed = 0
numpy.random.seed(seed)
X_train, y_train, X_test, y_test = CachedDatasets().load_dataset("Trace")
# Keep first 3 classes
X_train = X_train[y_train < 4]
numpy.random.shuffle(X_train)
# Keep only 50 time series
X_train = TimeSeriesScalerMeanVariance().fit_transform(X_train[:50])
sz = X_train.shape[1]
# Instantiate k-Shape model
ks = KShape(n_clusters=3, verbose=True, random_state=seed)
# Train
ks.fit(X_train)
# Save model
ks.to_hdf5('./ks_trained.hdf5')
# Load model
trained_ks = KShape.from_hdf5('./ks_trained.hdf5')
# Use loaded model to make predictions
y_pred = trained_ks.predict(X_train)
plt.figure()
for yi in range(3):
plt.subplot(3, 1, 1 + yi)
for xx in X_train[y_pred == yi]:
plt.plot(xx.ravel(), "k-", alpha=.2)
plt.plot(ks.cluster_centers_[yi].ravel(), "r-")
plt.xlim(0, sz)
plt.ylim(-4, 4)
plt.title("Cluster %d" % (yi + 1))
plt.tight_layout()
plt.show()
| # -*- coding: utf-8 -*-
"""
Model Persistence
=================
Many tslearn models can be saved to disk and used for predictions
at a later time. This can be particularly useful when a model takes
a long time to train.
**Available formats:** hdf5, json, pickle
Save a model to disk::
model.to_<format>
Load a model from disk::
model.from_<format>
**Basic usage**
.. code-block:: python
# Instantiate a model
model = ModelClass(<hyper-parameters>)
# Train the model
model.fit(X_train)
# Save the model to disk
model.to_hdf5('./trained_model.hdf5')
# Load model from disk
model.from_hdf5('./trained_mode.hdf5')
# Make predictions
y = model.predict(X_test)
.. note::
For the following models the training data are saved to disk and
may result in a large model file if the trainig dataset is large:
``KNeighborsTimeSeries``, ``KNeighborsTimeSeriesClassifier``, and
``GlobalAlignmentKernelKMeans``
"""
# Example using KShape
import numpy
import matplotlib.pyplot as plt
from tslearn.clustering import KShape
from tslearn.datasets import CachedDatasets
from tslearn.preprocessing import TimeSeriesScalerMeanVariance
seed = 0
numpy.random.seed(seed)
X_train, y_train, X_test, y_test = CachedDatasets().load_dataset("Trace")
# Keep first 3 classes
X_train = X_train[y_train < 4]
numpy.random.shuffle(X_train)
# Keep only 50 time series
X_train = TimeSeriesScalerMeanVariance().fit_transform(X_train[:50])
sz = X_train.shape[1]
# Instantiate k-Shape model
ks = KShape(n_clusters=3, verbose=True, random_state=seed)
# Train
ks.fit(X_train)
# Save model
ks.to_hdf5('./ks_trained.hdf5')
# Load model
trained_ks = KShape.from_hdf5('./ks_trained.hdf5')
# Use loaded model to make predictions
y_pred = trained_ks.predict(X_train)
plt.figure()
for yi in range(3):
plt.subplot(3, 1, 1 + yi)
for xx in X_train[y_pred == yi]:
plt.plot(xx.ravel(), "k-", alpha=.2)
plt.plot(ks.cluster_centers_[yi].ravel(), "r-")
plt.xlim(0, sz)
plt.ylim(-4, 4)
plt.title("Cluster %d" % (yi + 1))
plt.tight_layout()
plt.show()
| bsd-2-clause | Python |
365e51729e8d2caddc3116bc4c4d1c9f5f5d45c1 | Debug code | agendaodonto/server,agendaodonto/server | app/schedule/service/sms.py | app/schedule/service/sms.py | from datetime import datetime
from time import sleep
from django.conf import settings
from pyfcm import FCMNotification
class SMS:
def __init__(self):
self.client = FCMNotification(settings.FIREBASE_TOKEN)
def wait_for_status_change(self, schedule) -> bool:
start_time = datetime.now()
timeout = settings.SMS_TIMEOUT
previous_status = schedule.notification_status
status_changed = False
while not status_changed:
if schedule.notification_status != previous_status:
status_changed = True
print('FINALLYYY !! STATUS HAS BEEN CHANGED!')
if (datetime.now() - start_time).total_seconds() >= timeout:
schedule.notification_status = 3
raise SMSTimeoutError('Tempo excedido')
print('WAITING!!!')
sleep(1)
return True
def send_message(self, schedule_id):
from app.schedule.models import Schedule
schedule = Schedule.objects.get(pk=schedule_id)
self.client.single_device_data_message(schedule.dentist.device_token, data_message={
'sendTo': schedule.patient.phone,
'content': schedule.get_message(),
'scheduleId': schedule.id
})
print('AHHHH WORKING!!!')
return self.wait_for_status_change(schedule)
class FakeSMS:
def send_message(self, schedule):
return True
class SMSTimeoutError(BaseException):
pass
| from datetime import datetime
from time import sleep
from django.conf import settings
from pyfcm import FCMNotification
class SMS:
def __init__(self):
self.client = FCMNotification(settings.FIREBASE_TOKEN)
def wait_for_status_change(self, schedule) -> bool:
start_time = datetime.now()
timeout = settings.SMS_TIMEOUT
previous_status = schedule.notification_status
status_changed = False
while not status_changed:
if schedule.notification_status != previous_status:
status_changed = True
if (datetime.now() - start_time).total_seconds() >= timeout:
schedule.notification_status = 3
raise SMSTimeoutError('Tempo excedido')
sleep(1)
return True
def send_message(self, schedule_id):
from app.schedule.models import Schedule
schedule = Schedule.objects.get(pk=schedule_id)
self.client.single_device_data_message(schedule.dentist.device_token, data_message={
'sendTo': schedule.patient.phone,
'content': schedule.get_message(),
'scheduleId': schedule.id
})
return self.wait_for_status_change(schedule)
class FakeSMS:
def send_message(self, schedule):
return True
class SMSTimeoutError(BaseException):
pass
| agpl-3.0 | Python |
cc6c00656f8a7bd969c400904647fc9fc6125486 | add space | 0sw4l/villas-de-san-pablo,0sw4l/villas-de-san-pablo,0sw4l/villas-de-san-pablo,0sw4l/villas-de-san-pablo | apps/empleabilidad/forms.py | apps/empleabilidad/forms.py | from django import forms
from . import models
from apps.utils import forms as utils, constants
from django.forms import models as models_form
from apps.personas import models as persona_models
class VacanteForm(utils.BaseFormAllFields):
title = 'Vacante'
fecha = forms.DateField(input_formats=constants.INPUT_FORMATS)
class Meta(utils.BaseFormAllFields.Meta):
model = models.Vacante
def clean(self):
print(self.cleaned_data)
return self.cleaned_data
class VacantePersonaForm(utils.BaseFormAllFields):
title = 'Vacante Persona'
class Meta(utils.BaseFormAllFields.Meta):
model = models.VacantePersona
def get_vacante_persona_formset(form,
formset=models_form.BaseInlineFormSet,
**kwargs):
return models_form.inlineformset_factory(
persona_models.Persona,
models.VacantePersona,
form,
formset,
**kwargs
)
class FormacionTrabajoForm(utils.BaseFormAllFields):
title = 'Formacion para el trabajo'
fecha_creacion = forms.DateField(input_formats=constants.INPUT_FORMATS)
class Meta(utils.BaseFormAllFields.Meta):
model = models.FormacionTrabajo
class FormacionTrabajoPersonasForm(utils.BaseFormAllFields):
title = 'Formacion Trabajo Persona'
fecha_inscripcion = forms.DateField(input_formats=constants.INPUT_FORMATS)
fecha_proceso = forms.DateField(input_formats=constants.INPUT_FORMATS)
class Meta(utils.BaseFormAllFields.Meta):
model = models.FormacionTrabajoPersona
def get_formacion_trabajo_persona_formset(form,
formset=models_form.BaseInlineFormSet,
**kwargs):
return models_form.inlineformset_factory(
persona_models.Persona,
models.FormacionTrabajoPersona,
form,
formset,
**kwargs
)
| from django import forms
from . import models
from apps.utils import forms as utils, constants
from django.forms import models as models_form
from apps.personas import models as persona_models
class VacanteForm(utils.BaseFormAllFields):
title = 'Vacante'
fecha = forms.DateField(input_formats=constants.INPUT_FORMATS)
class Meta(utils.BaseFormAllFields.Meta):
model = models.Vacante
def clean(self):
print(self.cleaned_data)
return self.cleaned_data
class VacantePersonaForm(utils.BaseFormAllFields):
title = 'Vacante Persona'
class Meta(utils.BaseFormAllFields.Meta):
model = models.VacantePersona
def get_vacante_persona_formset(form,
formset=models_form.BaseInlineFormSet,
**kwargs):
return models_form.inlineformset_factory(
persona_models.Persona,
models.VacantePersona,
form,
formset,
**kwargs
)
class FormacionTrabajoForm(utils.BaseFormAllFields):
title = 'Formacion para el trabajo'
fecha_creacion = forms.DateField(input_formats=constants.INPUT_FORMATS)
class Meta(utils.BaseFormAllFields.Meta):
model = models.FormacionTrabajo
class FormacionTrabajoPersonasForm(utils.BaseFormAllFields):
title = 'Formacion Trabajo Persona'
fecha_inscripcion = forms.DateField(input_formats=constants.INPUT_FORMATS)
fecha_proceso = forms.DateField(input_formats=constants.INPUT_FORMATS)
class Meta(utils.BaseFormAllFields.Meta):
model = models.FormacionTrabajoPersona
def get_formacion_trabajo_persona_formset(form,
formset=models_form.BaseInlineFormSet,
**kwargs):
return models_form.inlineformset_factory(
persona_models.Persona,
models.FormacionTrabajoPersona,
form,
formset,
**kwargs
) | mit | Python |
4e379e5813f8686578397043554819fbb5dbb410 | add ALMA beta server | ceb8/astroquery,imbasimba/astroquery,imbasimba/astroquery,ceb8/astroquery | astroquery/alma/__init__.py | astroquery/alma/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
ALMA Archive service.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.alma`.
"""
timeout = _config.ConfigItem(60, "Timeout in seconds")
archive_url = _config.ConfigItem(['http://almascience.org',
'http://almascience.eso.org',
'http://almascience.nrao.edu',
'http://almascience.nao.ac.jp',
'http://beta.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/aq', # the beta server (for testing)
],
'The ALMA Archive mirror to use')
conf = Conf()
from .core import Alma, AlmaClass
__all__ = ['Alma', 'AlmaClass',
'Conf', 'conf',
]
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
ALMA Archive service.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.alma`.
"""
timeout = _config.ConfigItem(60, "Timeout in seconds")
archive_url = _config.ConfigItem(['http://almascience.org',
'http://almascience.eso.org',
'http://almascience.nrao.edu',
'http://almascience.nao.ac.jp'],
'The ALMA Archive mirror to use')
conf = Conf()
from .core import Alma, AlmaClass
__all__ = ['Alma', 'AlmaClass',
'Conf', 'conf',
]
| bsd-3-clause | Python |
937d0d731d03348696046a9783524ad5959dcb31 | Fix exception in doorbird logbook during startup (#74649) | mezz64/home-assistant,w1ll1am23/home-assistant,nkgilley/home-assistant,w1ll1am23/home-assistant,nkgilley/home-assistant,mezz64/home-assistant | homeassistant/components/doorbird/logbook.py | homeassistant/components/doorbird/logbook.py | """Describe logbook events."""
from __future__ import annotations
from typing import Any
from homeassistant.components.logbook.const import (
LOGBOOK_ENTRY_ENTITY_ID,
LOGBOOK_ENTRY_MESSAGE,
LOGBOOK_ENTRY_NAME,
)
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import callback
from .const import DOMAIN, DOOR_STATION, DOOR_STATION_EVENT_ENTITY_IDS
@callback
def async_describe_events(hass, async_describe_event):
"""Describe logbook events."""
@callback
def async_describe_logbook_event(event):
"""Describe a logbook event."""
doorbird_event = event.event_type.split("_", 1)[1]
return {
LOGBOOK_ENTRY_NAME: "Doorbird",
LOGBOOK_ENTRY_MESSAGE: f"Event {event.event_type} was fired",
LOGBOOK_ENTRY_ENTITY_ID: hass.data[DOMAIN][
DOOR_STATION_EVENT_ENTITY_IDS
].get(doorbird_event, event.data.get(ATTR_ENTITY_ID)),
}
domain_data: dict[str, Any] = hass.data[DOMAIN]
for data in domain_data.values():
if DOOR_STATION not in data:
# We need to skip door_station_event_entity_ids
continue
for event in data[DOOR_STATION].doorstation_events:
async_describe_event(
DOMAIN, f"{DOMAIN}_{event}", async_describe_logbook_event
)
| """Describe logbook events."""
from homeassistant.components.logbook.const import (
LOGBOOK_ENTRY_ENTITY_ID,
LOGBOOK_ENTRY_MESSAGE,
LOGBOOK_ENTRY_NAME,
)
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import callback
from .const import DOMAIN, DOOR_STATION, DOOR_STATION_EVENT_ENTITY_IDS
@callback
def async_describe_events(hass, async_describe_event):
"""Describe logbook events."""
@callback
def async_describe_logbook_event(event):
"""Describe a logbook event."""
doorbird_event = event.event_type.split("_", 1)[1]
return {
LOGBOOK_ENTRY_NAME: "Doorbird",
LOGBOOK_ENTRY_MESSAGE: f"Event {event.event_type} was fired",
LOGBOOK_ENTRY_ENTITY_ID: hass.data[DOMAIN][
DOOR_STATION_EVENT_ENTITY_IDS
].get(doorbird_event, event.data.get(ATTR_ENTITY_ID)),
}
domain_data = hass.data[DOMAIN]
for config_entry_id in domain_data:
door_station = domain_data[config_entry_id][DOOR_STATION]
for event in door_station.doorstation_events:
async_describe_event(
DOMAIN, f"{DOMAIN}_{event}", async_describe_logbook_event
)
| apache-2.0 | Python |
e011e3b9b6a846f7e32fe978e5205554c64fc631 | Add baz() | moreati/coveralls-trial | coveralls_trial/coveralls_trial.py | coveralls_trial/coveralls_trial.py | # -*- coding: utf-8 -*-
def foo():
return True
def bar():
return False
def baz(x)
if x:
return x**2
elif x == 0:
return x + 17
else:
return x
| # -*- coding: utf-8 -*-
def foo():
return True
def bar():
return False
| bsd-3-clause | Python |
481142127970960cda4ae29f3a24713c2a46991e | Update __init__.py | rshipp/python-appassure | appassure/core/__init__.py | appassure/core/__init__.py | """This package contains the AppAssureAPI class and its subclasses,
which provide direct access to the AppAssure Core API's interfaces
and services, as described at http://docs.appassure.com/display/AA50D/.
"""
| """This package contains the AppAssureAPI class and its subclasses,
which provide direct access to the AppAssure Core API's interfaces
and services, as described at http://docs.appassure.com/display/AA50D/.
"""
| bsd-3-clause | Python |
60193bd891a819d519d181a6f31b263b01cf50a7 | revert to the correct imports | hackday-profilers/flocker-docker-plugin,hackday-profilers/flocker-docker-plugin,moypray/flocker-docker-plugin,wallnerryan/flocker-docker-plugin,mbrukman/flocker-docker-plugin,moypray/flocker-docker-plugin,mbrukman/flocker-docker-plugin,hackday-profilers/flocker-docker-plugin,mbrukman/flocker-docker-plugin,moypray/flocker-docker-plugin,wallnerryan/flocker-docker-plugin,wallnerryan/flocker-docker-plugin | powerstripflocker.tac | powerstripflocker.tac | # Copyright ClusterHQ Inc. See LICENSE file for details.
from twisted.web import server, resource
from twisted.application import service, internet
from powerstripflocker.adapter import HandshakeResource, AdapterResource
def getAdapter():
root = resource.Resource()
v1 = resource.Resource()
root.putChild("v1", v1)
volume = resource.Resource()
v1.putChild("volume", volume)
volume.putChild("volumes", AdapterResource())
v1.putChild("handshake", HandshakeResource())
site = server.Site(root)
return site
application = service.Application("Powerstrip Flocker Adapter")
adapterServer = internet.UNIXServer("/var/run/docker-plugin/plugin.sock", getAdapter())
adapterServer.setServiceParent(application)
| # Copyright ClusterHQ Inc. See LICENSE file for details.
from twisted.web import server, resource
from twisted.application import service, internet
from powerstripflocker.adapter import (HandshakeResource, CreateResource,
DestroyResource, MountResource, UnmountResource)
def getAdapter():
root = resource.Resource()
v1 = resource.Resource()
root.putChild("v1", v1)
volume = resource.Resource()
v1.putChild("volume", volume)
volume.putChild("volumes", AdapterResource())
v1.putChild("handshake", HandshakeResource())
site = server.Site(root)
return site
application = service.Application("Powerstrip Flocker Adapter")
adapterServer = internet.UNIXServer("/var/run/docker-plugin/plugin.sock", getAdapter())
adapterServer.setServiceParent(application)
| apache-2.0 | Python |
fd4c7640cbc9add7ca141e1df9e9aa02dc97c1b5 | Make password write only | LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha | application/auth/models.py | application/auth/models.py | from werkzeug.security import generate_password_hash
from werkzeug.security import check_password_hash
from application import db
class User(db.Model):
__tablename__ = 'users'
email = db.Column(db.String(255), primary_key=True)
_password = db.Column(db.String(255))
authenticated = db.Column(db.Boolean, default=False)
def __repr__(self):
return str({
'email': self.email
})
def is_authenticated(self):
return self.authenticated
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.email
@property
def password(self):
raise AttributeError("Password not readable")
@password.setter
def password(self, password):
self._password = generate_password_hash(password)
def check_password(self , password):
return check_password_hash(self._password, password)
| from werkzeug.security import generate_password_hash
from werkzeug.security import check_password_hash
from application import db
class User(db.Model):
__tablename__ = 'users'
email = db.Column(db.String(255), primary_key=True)
_password = db.Column(db.String(255))
authenticated = db.Column(db.Boolean, default=False)
def __repr__(self):
return str({
'email': self.email
})
def is_authenticated(self):
return self.authenticated
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.email
@property
def password(self):
return self._password
@password.setter
def password(self, password):
self._password = generate_password_hash(password)
def check_password(self , password):
return check_password_hash(self.password, password)
| mit | Python |
e6e1429b9e1c8b654f688b5ac2dd0681717d39ab | change SSH to HTTPS URIs and remove private repo from test | bethgelab/foolbox,bethgelab/foolbox | foolbox/tests/test_model_zoo.py | foolbox/tests/test_model_zoo.py | from foolbox import zoo
import numpy as np
import foolbox
import sys
import pytest
from foolbox.zoo.model_loader import ModelLoader
@pytest.fixture(autouse=True)
def unload_foolbox_model_module():
# reload foolbox_model from scratch for every run
# to ensure atomic tests without side effects
module_names = ['foolbox_model', 'model']
for module_name in module_names:
if module_name in sys.modules:
del sys.modules[module_name]
test_data = [
# private repo won't work on travis
# ('https://github.com/bethgelab/AnalysisBySynthesis.git', (1, 28, 28)),
('https://github.com/bethgelab/convex_adversarial.git', (1, 28, 28)),
('https://github.com/bethgelab/mnist_challenge.git', 784)
]
@pytest.mark.parametrize("url, dim", test_data)
def test_loading_model(url, dim):
# download model
model = zoo.get_model(url)
# create a dummy image
x = np.zeros(dim, dtype=np.float32)
x[:] = np.random.randn(*x.shape)
# run the model
logits = model.predictions(x)
probabilities = foolbox.utils.softmax(logits)
predicted_class = np.argmax(logits)
# sanity check
assert predicted_class >= 0
assert np.sum(probabilities) >= 0.9999
def test_non_default_module_throws_error():
with pytest.raises(RuntimeError):
ModelLoader.get(key='other')
| from foolbox import zoo
import numpy as np
import foolbox
import sys
import pytest
from foolbox.zoo.model_loader import ModelLoader
@pytest.fixture(autouse=True)
def unload_foolbox_model_module():
# reload foolbox_model from scratch for every run
# to ensure atomic tests without side effects
module_names = ['foolbox_model', 'model']
for module_name in module_names:
if module_name in sys.modules:
del sys.modules[module_name]
test_data = [
('git@github.com:bethgelab/analysis-by-synthesis-model.git', (1, 28, 28)),
('git@github.com:bethgelab/convex_adversarial.git', (1, 28, 28)),
('git@github.com:bethgelab/mnist_challenge.git', 784)
]
@pytest.mark.parametrize("url, dim", test_data)
def test_loading_model(url, dim):
# download model
model = zoo.get_model(url)
# create a dummy image
x = np.zeros(dim, dtype=np.float32)
x[:] = np.random.randn(*x.shape)
# run the model
logits = model.predictions(x)
probabilities = foolbox.utils.softmax(logits)
predicted_class = np.argmax(logits)
# sanity check
assert predicted_class >= 0
assert np.sum(probabilities) >= 0.9999
def test_non_default_module_throws_error():
with pytest.raises(RuntimeError):
ModelLoader.get(key='other')
| mit | Python |
ebc3990091c4d2290a01033761e484d788381309 | add filtering of datasets | almey/policycompass-services,policycompass/policycompass-services,policycompass/policycompass-services,almey/policycompass-services,mmilaprat/policycompass-services,mmilaprat/policycompass-services,almey/policycompass-services,mmilaprat/policycompass-services,policycompass/policycompass-services | apps/datasetmanager/api.py | apps/datasetmanager/api.py | __author__ = 'fki'
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.request import Request
from rest_framework import status
from rest_framework import generics
from .models import *
from .serializers import *
from .file_encoder import FileEncoder
class Base(APIView):
def get(self, request):
"""
:type request: Request
:param request:
:return:
"""
result = {
"Datasets": reverse('dataset-list', request=request),
"Converter": reverse('converter', request=request),
}
return Response(result)
class DatasetList(generics.ListCreateAPIView):
model = Dataset
serializer_class = BaseDatasetSerializer
paginate_by = 10
paginate_by_param = 'page_size'
def post(self, request, *args, **kwargs):
self.serializer_class = DetailDatasetSerializer
return super(DatasetList, self).post(request, args, kwargs)
def get_queryset(self):
queryset = Dataset.objects.all()
indicator_id = self.request.GET.get('indicator_id', '')
if indicator_id:
queryset = queryset.filter(indicator_id=indicator_id)
return queryset
class DatasetDetail(generics.RetrieveUpdateDestroyAPIView):
model = Dataset
serializer_class = DetailDatasetSerializer
class Converter(APIView):
"""
Serves the converter resource.
"""
def post(self, request, *args, **kwargs):
"""
Processes a POST request
"""
files = request.FILES
if 'file' in files:
# File has to be named file
file = files['file']
encoder = FileEncoder(file)
# Check if the file extension is supported
if not encoder.is_supported():
return Response({'error': 'File Extension is not supported'}, status=status.HTTP_400_BAD_REQUEST)
# Encode the file
try:
encoding = encoder.encode()
except:
return Response({'error': "Invalid File"}, status=status.HTTP_400_BAD_REQUEST)
# Build the result
result = {
'filename': file.name,
'filesize': file.size,
'result': encoding
}
return Response(result)
return Response({'error': "No Form field 'file'"}, status=status.HTTP_400_BAD_REQUEST) | __author__ = 'fki'
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.request import Request
from rest_framework import status
from rest_framework import generics
from .models import *
from .serializers import *
from .file_encoder import FileEncoder
class Base(APIView):
def get(self, request):
"""
:type request: Request
:param request:
:return:
"""
result = {
"Datasets": reverse('dataset-list', request=request),
"Converter": reverse('converter', request=request),
}
return Response(result)
class DatasetList(generics.ListCreateAPIView):
model = Dataset
serializer_class = BaseDatasetSerializer
paginate_by = 10
paginate_by_param = 'page_size'
def post(self, request, *args, **kwargs):
self.serializer_class = DetailDatasetSerializer
return super(DatasetList, self).post(request, args, kwargs)
class DatasetDetail(generics.RetrieveUpdateDestroyAPIView):
model = Dataset
serializer_class = DetailDatasetSerializer
class Converter(APIView):
"""
Serves the converter resource.
"""
def post(self, request, *args, **kwargs):
"""
Processes a POST request
"""
files = request.FILES
if 'file' in files:
# File has to be named file
file = files['file']
encoder = FileEncoder(file)
# Check if the file extension is supported
if not encoder.is_supported():
return Response({'error': 'File Extension is not supported'}, status=status.HTTP_400_BAD_REQUEST)
# Encode the file
try:
encoding = encoder.encode()
except:
return Response({'error': "Invalid File"}, status=status.HTTP_400_BAD_REQUEST)
# Build the result
result = {
'filename': file.name,
'filesize': file.size,
'result': encoding
}
return Response(result)
return Response({'error': "No Form field 'file'"}, status=status.HTTP_400_BAD_REQUEST) | agpl-3.0 | Python |
60c29aab1b0bd0ae237e023fe4587326f21daa47 | Improve code of 'import_test' | reimandlab/ActiveDriverDB,reimandlab/ActiveDriverDB,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations | website/tests/test_import_results.py | website/tests/test_import_results.py | """"Tests in this module should be pass after data import and fail before"""
from database import bdb
from database import make_snv_key
from database import decode_csv
import app # this will take some time (stats initialization)
from models import Protein
def test_mappings():
"""This is a simple inclusion test for genome -> proteme mutation mappigns.
Knowing the data, we demand the items from the right side (of test data)
to be in the results of queries specified on the left side.
"""
test_data = (
# (chrom, dna_pos, dna_ref, dna_alt), (name, pos, ref, alt)
(('17', '7572934', 'G', 'A'), ('TP53', 353, 'S', 'L')),
(('17', '19282215', 't', 'a'), ('MAPK7', 1, 'M', 'K')),
(('21', '40547520', 'g', 'a'), ('PSMG1', 283, 'T', 'I')),
(('9', '125616157', 't', 'a'), ('RC3H2', 1064, 'Y', 'F')),
(('11', '120198175', 'g', 'a'), ('TMEM136', 31, 'V', 'M')),
(('10', '81838457', 't', 'a'), ('TMEM254', 1, 'M', 'K')),
(('13', '111267940', 't', 'a'), ('CARKD', 1, 'M', 'K')),
(('6', '30539266', 't', 'a'), ('ABCF1', 1, 'M', 'K')),
(('6', '36765430', 'g', 'a'), ('CPNE5', 140, 'L', 'F')),
(('12', '123464753', 't', 'a'), ('ARL6IP4', 1, 'M', 'K')),
)
for genomic_data, protein_data in test_data:
snv = make_snv_key(*genomic_data)
items = [
decode_csv(item)
for item in bdb[snv]
]
for item in items:
retrieved_data = (
Protein.query.get(item['protein_id']).gene.name,
item['pos'],
item['ref'],
item['alt']
)
if retrieved_data == protein_data:
break
else:
raise Exception(retrieved_data, protein_data)
| """"Tests in this module should be pass after data import and fail before"""
from database import bdb
from database import make_snv_key
from database import decode_csv
from models import Protein
def test_mappings():
"""This is a simple inclusion test for genome -> proteme mutation mappigns.
Knowing the data, we demand the items from the right side (of test data)
to be in the results of queries specified on the left side.
"""
test_data = (
# (chrom, dna_pos, dna_ref, dna_alt), (name, pos, ref, alt)
(('chr17', '7572934', 'G', 'A'), ('TP53', 353, 'S', 'L')),
(('chr17', '19282215', 't', 'a'), ('MAPK7', 1, 'M', 'K')),
(('chr21', '40547520', 'g', 'a'), ('PSMG1', 283, 'T', 'I')),
(('chr9', '125616157', 't', 'a'), ('RC3H2', 1064, 'Y' 'F')),
(('chr11', '120198175', 'g', 'a'), ('TMEM136', 31, 'V', 'M')),
(('chr10', '81838457', 't', 'a'), ('TMEM254', 1, 'M', 'K')),
(('chr13', '111267940', 't', 'a'), ('CARKD', 1, 'M', 'K')),
(('chr6', '30539266', 't', 'a'), ('ABCF1', 1, 'M', 'K')),
(('chr6', '36765430', 'g', 'a'), ('CPNE5', 140, 'L', 'F')),
(('chr12', '123464753', 't', 'a'), ('ARL6IP4', 1, 'M', 'K')),
)
for genomic_data, protein_data in test_data:
snv = make_snv_key(*genomic_data)
items = [
decode_csv(item)
for item in bdb[snv]
]
hit = False
for item in items:
if (
Protein.get(item['protein_id']).gene.name == protein_data[0] and
item['pos'] == protein_data[1] and
item['ref'] == protein_data[2] and
item['alt'] == protein_data[3]
):
hit = True
assert hit
| lgpl-2.1 | Python |
82ac173a782637ba89bda7b20a7132d54947cd10 | add birthday to json | rohitdatta/pepper,rohitdatta/pepper,rohitdatta/pepper | pepper/api/views.py | pepper/api/views.py | from pepper.app import DB
from pepper.users import User
from flask import url_for, jsonify, request, Response
import json, os, urllib
from pepper import settings
from pepper.utils import calculate_age
def schedule():
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
json_url = os.path.join(SITE_ROOT, "../static/api", "schedule.json")
data = json.load(open(json_url))
return Response(json.dumps(data), mimetype='application/json')
def schedule_day(day):
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
if day == '1':
json_url = os.path.join(SITE_ROOT, "../static/api", "schedule-1.json")
else:
json_url = os.path.join(SITE_ROOT, "../static/api", "schedule-2.json")
data = json.load(open(json_url))
return Response(json.dumps(data), mimetype='application/json')
def partner_list():
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
json_url = os.path.join(SITE_ROOT, "../static/api", "partners.json")
data = json.load(open(json_url))
return Response(json.dumps(data), mimetype='application/json')
def passbook():
email = request.get_json()['email']
user = User.query.filter_by(email=email).first()
if user is None or user.status != 'CONFIRMED':
data = {'success': False}
else:
data = {'success': True,
'email': user.email,
'name': '{0} {1}'.format(user.fname, user.lname),
'school': user.school_name}
return jsonify(data)
def check_in():
# Check if secret token matches
if request.method == 'GET':
email = urllib.unquote(request.args.get('email'))
volunteer_email = urllib.unquote(request.args.get('volunteer_email'))
else:
data = request.json
email = data['email']
volunteer_email = data['volunteer_email']
# if data['secret'] != settings.CHECK_IN_SECRET:
# message = 'Unauthorized'
# return jsonify(message=message), 401
# Get the user email and check them in
user = User.query.filter_by(email=email).first()
if user is not None:
message = 'Found user'
if request.method == 'POST':
# check the user in
if user.checked_in: # User is already checked in
message = 'Attendee is already checked in'
else:
if user.status == 'CONFIRMED':
user.checked_in = True
DB.session.add(user)
DB.session.commit()
message = 'Attendee successfully checked in'
else:
message = 'Attendee has not been confirmed to attend {}'.format(settings.HACKATHON_NAME)
# return back success to the check in app
return jsonify(name="{0} {1}".format(user.fname, user.lname), school=user.school_name, email=user.email, age=calculate_age(user.birthday), checked_in=user.checked_in, confirmed=user.status=='CONFIRMED', birthday=user.birthday)
else:
return jsonify(message='User does not exist'), 404 | from pepper.app import DB
from pepper.users import User
from flask import url_for, jsonify, request, Response
import json, os, urllib
from pepper import settings
from pepper.utils import calculate_age
def schedule():
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
json_url = os.path.join(SITE_ROOT, "../static/api", "schedule.json")
data = json.load(open(json_url))
return Response(json.dumps(data), mimetype='application/json')
def schedule_day(day):
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
if day == '1':
json_url = os.path.join(SITE_ROOT, "../static/api", "schedule-1.json")
else:
json_url = os.path.join(SITE_ROOT, "../static/api", "schedule-2.json")
data = json.load(open(json_url))
return Response(json.dumps(data), mimetype='application/json')
def partner_list():
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
json_url = os.path.join(SITE_ROOT, "../static/api", "partners.json")
data = json.load(open(json_url))
return Response(json.dumps(data), mimetype='application/json')
def passbook():
email = request.get_json()['email']
user = User.query.filter_by(email=email).first()
if user is None or user.status != 'CONFIRMED':
data = {'success': False}
else:
data = {'success': True,
'email': user.email,
'name': '{0} {1}'.format(user.fname, user.lname),
'school': user.school_name}
return jsonify(data)
def check_in():
# Check if secret token matches
if request.method == 'GET':
email = urllib.unquote(request.args.get('email'))
volunteer_email = urllib.unquote(request.args.get('volunteer_email'))
else:
data = request.json
email = data['email']
volunteer_email = data['volunteer_email']
# if data['secret'] != settings.CHECK_IN_SECRET:
# message = 'Unauthorized'
# return jsonify(message=message), 401
# Get the user email and check them in
user = User.query.filter_by(email=email).first()
if user is not None:
message = 'Found user'
if request.method == 'POST':
# check the user in
if user.checked_in: # User is already checked in
message = 'Attendee is already checked in'
else:
if user.status == 'CONFIRMED':
user.checked_in = True
DB.session.add(user)
DB.session.commit()
message = 'Attendee successfully checked in'
else:
message = 'Attendee has not been confirmed to attend {}'.format(settings.HACKATHON_NAME)
# return back success to the check in app
return jsonify(name="{0} {1}".format(user.fname, user.lname), school=user.school_name, email=user.email, age=calculate_age(user.birthday), checked_in=user.checked_in, confirmed=user.status=='CONFIRMED')
else:
return jsonify(message='User does not exist'), 404 | agpl-3.0 | Python |
2fe0353d0ad7f2afc87cd85c7dd1e1174112807d | Update cmd2 fix to still work with 0.6.7 | openstack/cliff,dtroyer/cliff,dtroyer/cliff,openstack/cliff | cliff/tests/test_interactive.py | cliff/tests/test_interactive.py | # -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cmd2
from cliff.interactive import InteractiveApp
class FakeApp(object):
NAME = 'Fake'
def make_interactive_app(*command_names):
fake_command_manager = [(x, None) for x in command_names]
return InteractiveApp(FakeApp, fake_command_manager,
stdin=None, stdout=None)
def _test_completenames(expecteds, prefix):
app = make_interactive_app('hips', 'hippo', 'nonmatching')
assert set(app.completenames(prefix)) == set(expecteds)
def test_cmd2_completenames():
# cmd2.Cmd define do_help method
_test_completenames(['help'], 'he')
def test_cliff_completenames():
_test_completenames(['hips', 'hippo'], 'hip')
def test_no_completenames():
_test_completenames([], 'taz')
def test_both_completenames():
# cmd2.Cmd define do_history method
# NOTE(dtroyer): Before release 0.7.0 do_hi was also defined so we need
# to account for that in the list of possible responses.
# Remove this check after cmd2 0.7.0 is the minimum
# requirement.
if hasattr(cmd2.Cmd, "do_hi"):
_test_completenames(['hi', 'history', 'hips', 'hippo'], 'hi')
else:
_test_completenames(['history', 'hips', 'hippo'], 'hi')
def _test_completedefault(expecteds, line, begidx):
command_names = set(['show file', 'show folder', 'show long', 'list all'])
app = make_interactive_app(*command_names)
observeds = app.completedefault(None, line, begidx, None)
assert set(observeds) == set(expecteds)
assert set([line[:begidx] + x for x in observeds]) <= command_names
def test_empty_text_completedefault():
# line = 'show ' + begidx = 5 implies text = ''
_test_completedefault(['file', 'folder', ' long'], 'show ', 5)
def test_nonempty_text_completedefault2():
# line = 'show f' + begidx = 6 implies text = 'f'
_test_completedefault(['file', 'folder'], 'show f', 5)
def test_long_completedefault():
_test_completedefault(['long'], 'show ', 6)
def test_no_completedefault():
_test_completedefault([], 'taz ', 4)
| # -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cliff.interactive import InteractiveApp
class FakeApp(object):
NAME = 'Fake'
def make_interactive_app(*command_names):
fake_command_manager = [(x, None) for x in command_names]
return InteractiveApp(FakeApp, fake_command_manager,
stdin=None, stdout=None)
def _test_completenames(expecteds, prefix):
app = make_interactive_app('hips', 'hippo', 'nonmatching')
assert set(app.completenames(prefix)) == set(expecteds)
def test_cmd2_completenames():
# cmd2.Cmd define do_help method
_test_completenames(['help'], 'he')
def test_cliff_completenames():
_test_completenames(['hips', 'hippo'], 'hip')
def test_no_completenames():
_test_completenames([], 'taz')
def test_both_completenames():
# cmd2.Cmd defines do_history method
_test_completenames(['history', 'hips', 'hippo'], 'hi')
def _test_completedefault(expecteds, line, begidx):
command_names = set(['show file', 'show folder', 'show long', 'list all'])
app = make_interactive_app(*command_names)
observeds = app.completedefault(None, line, begidx, None)
assert set(observeds) == set(expecteds)
assert set([line[:begidx] + x for x in observeds]) <= command_names
def test_empty_text_completedefault():
# line = 'show ' + begidx = 5 implies text = ''
_test_completedefault(['file', 'folder', ' long'], 'show ', 5)
def test_nonempty_text_completedefault2():
# line = 'show f' + begidx = 6 implies text = 'f'
_test_completedefault(['file', 'folder'], 'show f', 5)
def test_long_completedefault():
_test_completedefault(['long'], 'show ', 6)
def test_no_completedefault():
_test_completedefault([], 'taz ', 4)
| apache-2.0 | Python |
7e4d32fdbdadb8a331d13f28da1a003b2fe8acf6 | Fix keepass importer csv errors | scorphus/passpie,marcwebbie/passpie,eiginn/passpie,eiginn/passpie,marcwebbie/passpie,scorphus/passpie | tests/test_importers/test_keepass_importer.py | tests/test_importers/test_keepass_importer.py | import pytest
from passpie.importers.keepass_importer import KeepassImporter
def test_keepass_importer_returns_false_when_csv_files_hasnt_expected_headers(mocker, mock_open):
headers = reversed(['Group', 'Title', 'Username', 'Password', 'URL', 'Notes'])
mocker.patch('passpie.importers.keepass_importer.unicode_csv_reader',
return_value=iter([headers]))
mocker.patch('passpie.importers.keepass_importer.open', mock_open(), create=True)
result = KeepassImporter().match('filepath')
assert result is False
def test_keepass_importer_with_empty_reader_raises_value_error(mocker, mock_open):
mocker.patch('passpie.importers.keepass_importer.open', mock_open(), create=True)
mocker.patch('passpie.importers.keepass_importer.unicode_csv_reader',
return_value=iter([]))
importer = KeepassImporter()
with pytest.raises(ValueError):
importer.match('filepath')
with pytest.raises(ValueError):
importer.handle('filepath')
def test_keepass_importer_returns_true_when_csv_files_has_expected_headers(mocker, mock_open):
headers = ['Group', 'Title', 'Username', 'Password', 'URL', 'Notes']
mocker.patch('passpie.importers.keepass_importer.unicode_csv_reader',
return_value=iter([headers]))
mocker.patch('passpie.importers.keepass_importer.open', mock_open(), create=True)
result = KeepassImporter().match('filepath')
assert result is True
def test_keepass_importer_returns_expected_credentials_for_row(mocker, mock_open):
rows = [
['Group', 'Title', 'Username', 'Password', 'URL', 'Notes'],
['Root', 'Email', 'foo', 'password', 'example.org', ''],
['Root', 'Email', 'foo', 'password', 'example.com', 'Some comment'],
]
credential1 = {
'login': 'foo',
'name': 'example.org',
'comment': '',
'password': 'password'
}
credential2 = {
'login': 'foo',
'name': 'example.com',
'comment': 'Some comment',
'password': 'password'
}
mocker.patch('passpie.importers.keepass_importer.unicode_csv_reader',
return_value=iter(rows))
mocker.patch('passpie.importers.keepass_importer.open', mock_open(), create=True)
result = KeepassImporter().handle('filepath')
credentials = result
assert credentials
assert credential1 in credentials
assert credential2 in credentials
| import pytest
from passpie.importers.keepass_importer import KeepassImporter
def test_keepass_importer_returns_false_when_csv_files_hasnt_expected_headers(mocker, mock_open):
headers = reversed(['Group', 'Title', 'Username', 'Password', 'URL', 'Notes'])
mocker.patch('passpie.importers.keepass_importer.csv.reader',
return_value=iter([headers]))
mocker.patch('passpie.importers.keepass_importer.open', mock_open(), create=True)
result = KeepassImporter().match('filepath')
assert result is False
def test_keepass_importer_with_empty_reader_raises_value_error(mocker, mock_open):
mocker.patch('passpie.importers.keepass_importer.open', mock_open(), create=True)
mocker.patch('passpie.importers.keepass_importer.csv.reader', return_value=iter([]))
importer = KeepassImporter()
with pytest.raises(ValueError):
importer.match('filepath')
with pytest.raises(ValueError):
importer.handle('filepath')
def test_keepass_importer_returns_true_when_csv_files_has_expected_headers(mocker, mock_open):
headers = ['Group', 'Title', 'Username', 'Password', 'URL', 'Notes']
mocker.patch('passpie.importers.keepass_importer.csv.reader',
return_value=iter([headers]))
mocker.patch('passpie.importers.keepass_importer.open', mock_open(), create=True)
result = KeepassImporter().match('filepath')
assert result is True
def test_keepass_importer_returns_expected_credentials_for_row(mocker, mock_open):
rows = [
['Group', 'Title', 'Username', 'Password', 'URL', 'Notes'],
['Root', 'Email', 'foo', 'password', 'example.org', ''],
['Root', 'Email', 'foo', 'password', 'example.com', 'Some comment'],
]
credential1 = {
'login': 'foo',
'name': 'example.org',
'comment': '',
'password': 'password'
}
credential2 = {
'login': 'foo',
'name': 'example.com',
'comment': 'Some comment',
'password': 'password'
}
mocker.patch('passpie.importers.keepass_importer.csv.reader',
return_value=iter(rows))
mocker.patch('passpie.importers.keepass_importer.open', mock_open(), create=True)
result = KeepassImporter().handle('filepath')
credentials = result
assert credentials
assert credential1 in credentials
assert credential2 in credentials
| mit | Python |
37a74b2b4b19282d892820983885003d42095130 | disable algolia indexing | dbinetti/barberscore-django,dbinetti/barberscore-django,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore | project/api/config.py | project/api/config.py |
# Standard Library
import os
# Django
from django.apps import AppConfig
class ApiConfig(AppConfig):
"""Sets the configuration for the api app."""
name = 'api'
def ready(self):
import algoliasearch_django as algoliasearch
import api.signals
from .indexes import AwardIndex
from .indexes import ChartIndex
from .indexes import ConventionIndex
# from .indexes import PersonIndex
from .indexes import GroupIndex
Award = self.get_model('award')
Chart = self.get_model('chart')
Convention = self.get_model('convention')
# Person = self.get_model('person')
Group = self.get_model('group')
algoliasearch.register(Award, AwardIndex)
algoliasearch.register(Chart, ChartIndex)
algoliasearch.register(Convention, ConventionIndex)
# algoliasearch.register(Person, PersonIndex)
algoliasearch.register(Group, GroupIndex)
return
|
# Standard Library
import os
# Django
from django.apps import AppConfig
class ApiConfig(AppConfig):
"""Sets the configuration for the api app."""
name = 'api'
def ready(self):
import algoliasearch_django as algoliasearch
import api.signals
from .indexes import AwardIndex
from .indexes import ChartIndex
from .indexes import ConventionIndex
from .indexes import PersonIndex
from .indexes import GroupIndex
Award = self.get_model('award')
Chart = self.get_model('chart')
Convention = self.get_model('convention')
Person = self.get_model('person')
Group = self.get_model('group')
algoliasearch.register(Award, AwardIndex)
algoliasearch.register(Chart, ChartIndex)
algoliasearch.register(Convention, ConventionIndex)
algoliasearch.register(Person, PersonIndex)
algoliasearch.register(Group, GroupIndex)
return
| bsd-2-clause | Python |
e5051e1d21a7dfecb5194d34ed0df8c3d56647e7 | change fallback canvas url to canvas.dev | penzance/ab-testing-tool,penzance/ab-testing-tool,penzance/ab-testing-tool,penzance/ab-testing-tool | ab_testing_tool/settings/local.py | ab_testing_tool/settings/local.py | from .base import *
from logging.config import dictConfig
ALLOWED_HOSTS = ['*']
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# For local development, allow setting a token in ENV_SETTINGS
COURSE_OAUTH_TOKEN = SECURE_SETTINGS.get("course_oauth_token")
INSTALLED_APPS += ('debug_toolbar', 'sslserver')
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
# For Django Debug Toolbar:
INTERNAL_IPS = ('127.0.0.1', '10.0.2.2',)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
}
SELENIUM_CONFIG = {
'selenium_username': SECURE_SETTINGS.get('selenium_user'),
'selenium_password': SECURE_SETTINGS.get('selenium_password'),
'selenium_grid_url': SECURE_SETTINGS.get('selenium_grid_url'),
'base_url': 'https://canvas.dev.tlt.harvard.edu',
}
# Log to console instead of a file when running locally
LOGGING['handlers']['default'] = {
'level': logging.DEBUG,
'class': 'logging.StreamHandler',
'formatter': 'simple',
}
dictConfig(LOGGING)
| from .base import *
from logging.config import dictConfig
ALLOWED_HOSTS = ['*']
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# For local development, allow setting a token in ENV_SETTINGS
COURSE_OAUTH_TOKEN = SECURE_SETTINGS.get("course_oauth_token")
INSTALLED_APPS += ('debug_toolbar', 'sslserver')
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
# For Django Debug Toolbar:
INTERNAL_IPS = ('127.0.0.1', '10.0.2.2',)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
}
SELENIUM_CONFIG = {
'selenium_username': SECURE_SETTINGS.get('selenium_user'),
'selenium_password': SECURE_SETTINGS.get('selenium_password'),
'selenium_grid_url': SECURE_SETTINGS.get('selenium_grid_url'),
'base_url': 'https://canvas.icommons.harvard.edu',
}
# Log to console instead of a file when running locally
LOGGING['handlers']['default'] = {
'level': logging.DEBUG,
'class': 'logging.StreamHandler',
'formatter': 'simple',
}
dictConfig(LOGGING)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.