commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
b45c0cc0e9f2964ad442115f7a83292fb83611ec
|
test/vim_autopep8.py
|
test/vim_autopep8.py
|
"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
|
"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
|
Put code in main function
|
Put code in main function
|
Python
|
mit
|
SG345/autopep8,MeteorAdminz/autopep8,Vauxoo/autopep8,hhatto/autopep8,Vauxoo/autopep8,MeteorAdminz/autopep8,SG345/autopep8,vauxoo-dev/autopep8,vauxoo-dev/autopep8,hhatto/autopep8
|
222935ffc347f9787f08b50cccb1981151db5cec
|
test_jeni_python3.py
|
test_jeni_python3.py
|
import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
|
import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
def test_annotate_without_dunder_annotations(self):
# Unclear when this would come up; testing it given Python 2 support.
class NoDunderAnnotations(object):
def __getattr__(self, name):
if name == '__annotations__':
raise AttributeError()
return super().__getattr__(name)
def __call__(self):
"unused"
fn = NoDunderAnnotations()
self.assertTrue(hasattr(fn, '__call__'))
self.assertFalse(hasattr(fn, '__annotations__'))
self.assertFalse(hasattr(fn, 'fake')) # coverage
with self.assertRaises(AttributeError):
jeni.annotate(fn)
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
|
Test for missing __annotations__ in Python 3.
|
Test for missing __annotations__ in Python 3.
|
Python
|
bsd-2-clause
|
groner/jeni-python,rduplain/jeni-python
|
1fc2e747f1c02d5b8559f03187464eecda008190
|
fernet_fields/test/testmigrate/migrations/0004_copy_values.py
|
fernet_fields/test/testmigrate/migrations/0004_copy_values.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
obj.save(force_update=True)
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
obj.save(force_update=True)
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
Fix test migration to actually save updates.
|
Fix test migration to actually save updates.
|
Python
|
bsd-3-clause
|
orcasgit/django-fernet-fields
|
e9941e34253768e33cbfa54ff2bb9cf2e8267e1d
|
workflow-diagnosetargets.py
|
workflow-diagnosetargets.py
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples, samples[sample]['bam'],
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples,
"{}.recalibrated.sorted.bam".format(sample),
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
Tweak to bam file name calling
|
Tweak to bam file name calling
|
Python
|
mit
|
dgaston/ddb-scripts,GastonLab/ddb-scripts,dgaston/ddb-ngsflow-scripts
|
dc6d9ec75ffb2ac776d10a924395d05284bc031e
|
tests/test_compat.py
|
tests/test_compat.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import unittest, which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
|
Remove unused import of compat unittest
|
Remove unused import of compat unittest
|
Python
|
bsd-3-clause
|
luzfcb/cookiecutter,jhermann/cookiecutter,christabor/cookiecutter,terryjbates/cookiecutter,audreyr/cookiecutter,agconti/cookiecutter,vincentbernat/cookiecutter,kkujawinski/cookiecutter,atlassian/cookiecutter,foodszhang/cookiecutter,lucius-feng/cookiecutter,nhomar/cookiecutter,benthomasson/cookiecutter,ionelmc/cookiecutter,cguardia/cookiecutter,sp1rs/cookiecutter,kkujawinski/cookiecutter,hackebrot/cookiecutter,audreyr/cookiecutter,tylerdave/cookiecutter,Vauxoo/cookiecutter,sp1rs/cookiecutter,ramiroluz/cookiecutter,michaeljoseph/cookiecutter,stevepiercy/cookiecutter,lgp171188/cookiecutter,dajose/cookiecutter,janusnic/cookiecutter,lgp171188/cookiecutter,takeflight/cookiecutter,terryjbates/cookiecutter,benthomasson/cookiecutter,hackebrot/cookiecutter,vincentbernat/cookiecutter,cguardia/cookiecutter,stevepiercy/cookiecutter,janusnic/cookiecutter,nhomar/cookiecutter,moi65/cookiecutter,cichm/cookiecutter,ramiroluz/cookiecutter,venumech/cookiecutter,christabor/cookiecutter,pjbull/cookiecutter,agconti/cookiecutter,luzfcb/cookiecutter,pjbull/cookiecutter,dajose/cookiecutter,venumech/cookiecutter,cichm/cookiecutter,Springerle/cookiecutter,drgarcia1986/cookiecutter,willingc/cookiecutter,vintasoftware/cookiecutter,takeflight/cookiecutter,tylerdave/cookiecutter,lucius-feng/cookiecutter,foodszhang/cookiecutter,jhermann/cookiecutter,moi65/cookiecutter,michaeljoseph/cookiecutter,Vauxoo/cookiecutter,vintasoftware/cookiecutter,ionelmc/cookiecutter,0k/cookiecutter,drgarcia1986/cookiecutter,willingc/cookiecutter,atlassian/cookiecutter,0k/cookiecutter,Springerle/cookiecutter
|
55af2016102ec16a4ec3878f45306e3ac4d520e6
|
qingcloud/cli/iaas_client/actions/instance/reset_instances.py
|
qingcloud/cli/iaas_client/actions/instance/reset_instances.py
|
# coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {'instances': instances}
|
# coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file> -m <login_mode> -p <login_passwd> -k <login_keypair>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
parser.add_argument('-l', '--login_mode', dest='login_mode',
action='store', type=str, default=None,
help='SSH login mode: keypair or passwd')
parser.add_argument('-p', '--login_passwd', dest='login_passwd',
action='store', type=str, default=None,
help='login_passwd, should specified when SSH login mode is "passwd".')
parser.add_argument('-k', '--login_keypair', dest='login_keypair',
action='store', type=str, default=None,
help='login_keypair, should specified when SSH login mode is "keypair".')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {
'instances': instances,
'login_mode': options.login_mode,
'login_passwd': options.login_passwd,
'login_keypair': options.login_keypair,
}
|
Add login mode to reset-instances
|
Add login mode to reset-instances
|
Python
|
apache-2.0
|
yunify/qingcloud-cli
|
270d06c880fe72987b82fe00f234852e8d49eca0
|
icekit/plugins/image_gallery/content_plugins.py
|
icekit/plugins/image_gallery/content_plugins.py
|
"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
class FrontendMedia:
js = (
'plugins/image_gallery/init.js',
)
|
"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
|
Remove invalid reference to unnecessary JS for image gallery plugin
|
Remove invalid reference to unnecessary JS for image gallery plugin
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
8f4d0247b56ae157e8486c37e38992015e55ac3e
|
skimage/io/_plugins/matplotlib_plugin.py
|
skimage/io/_plugins/matplotlib_plugin.py
|
import matplotlib.pyplot as plt
def imshow(*args, **kwargs):
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
plt.imshow(*args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
|
import matplotlib.pyplot as plt
def imshow(im, *args, **kwargs):
"""Show the input image and return the current axes.
Parameters
----------
im : array, shape (M, N[, 3])
The image to display.
*args, **kwargs : positional and keyword arguments
These are passed directly to `matplotlib.pyplot.imshow`.
Returns
-------
ax : `matplotlib.pyplot.Axes`
The axes showing the image.
"""
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
return plt.imshow(im, *args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
|
Add docstring to matplotlib imshow plugin
|
Add docstring to matplotlib imshow plugin
The image is now named as an argument, and the axes are returned, in
keeping with matplotlib convention.
|
Python
|
bsd-3-clause
|
emon10005/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,bennlich/scikit-image,chriscrosscutler/scikit-image,dpshelio/scikit-image,emon10005/scikit-image,ofgulban/scikit-image,newville/scikit-image,WarrenWeckesser/scikits-image,Britefury/scikit-image,juliusbierk/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,blink1073/scikit-image,vighneshbirodkar/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,bsipocz/scikit-image,youprofit/scikit-image,rjeli/scikit-image,ajaybhat/scikit-image,pratapvardhan/scikit-image,Hiyorimi/scikit-image,michaelpacer/scikit-image,Midafi/scikit-image,pratapvardhan/scikit-image,jwiggins/scikit-image,michaelaye/scikit-image,bennlich/scikit-image,ofgulban/scikit-image,warmspringwinds/scikit-image,blink1073/scikit-image,oew1v07/scikit-image,keflavich/scikit-image,robintw/scikit-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,paalge/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,Hiyorimi/scikit-image,rjeli/scikit-image,oew1v07/scikit-image,newville/scikit-image,robintw/scikit-image,youprofit/scikit-image,dpshelio/scikit-image,keflavich/scikit-image,jwiggins/scikit-image,bsipocz/scikit-image,michaelpacer/scikit-image,paalge/scikit-image,GaZ3ll3/scikit-image,ajaybhat/scikit-image,WarrenWeckesser/scikits-image,juliusbierk/scikit-image,ClinicalGraphics/scikit-image,Britefury/scikit-image
|
1c3d4488566576e3181f7acbf902f0adab3876dd
|
api/spawner/templates/constants.py
|
api/spawner/templates/constants.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
from polyaxon_schemas.polyaxonfile import constants
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(container_job_name, pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
Update naming for spawner jobs
|
Update naming for spawner jobs
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
3607309193c5d8b2b5ce0fd98d976b6e6aa49644
|
test/test_client.py
|
test/test_client.py
|
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
|
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization, VisualizationLocal
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_local_mode(self, lgn):
lgn.local = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, VisualizationLocal)
assert hasattr(viz, 'id')
|
Add test for local visualization
|
Add test for local visualization
|
Python
|
mit
|
garretstuber/lightning-python,garretstuber/lightning-python,peterkshultz/lightning-python,lightning-viz/lightning-python,garretstuber/lightning-python,lightning-viz/lightning-python,peterkshultz/lightning-python,peterkshultz/lightning-python
|
427931c5c8847d01e4ce563a9c605a78eceb39f3
|
amplpy/amplpython/__init__.py
|
amplpy/amplpython/__init__.py
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system().startswith(('Windows', 'MSYS', 'CYGWIN', 'MINGW')):
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
Add basic support for MSYS, CYGWIN, and MINGW
|
Add basic support for MSYS, CYGWIN, and MINGW
|
Python
|
bsd-3-clause
|
ampl/amplpy,ampl/amplpy,ampl/amplpy
|
0673df239d14edb8d65c17eaa8291ac26fd0b976
|
test_skewstudent.py
|
test_skewstudent.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for ARG class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for SkewStudent class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
|
Fix title in the test
|
Fix title in the test
|
Python
|
mit
|
khrapovs/skewstudent
|
3d5de4b69be9d99fec4a8ffb46338f0684ffac26
|
api/base/waffle_decorators.py
|
api/base/waffle_decorators.py
|
import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
|
import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether waffle flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
def require_switch(switch_name):
"""
Decorator to check whether waffle switch is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_switch(*args,**kwargs):
if waffle.switch_is_active(switch_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_switch
return wrapper
def require_sample(sample_name):
"""
Decorator to check whether waffle sample is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_sample(*args,**kwargs):
if waffle.sample_is_active(sample_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_sample
return wrapper
|
Add switch and sample decorators.
|
Add switch and sample decorators.
|
Python
|
apache-2.0
|
brianjgeiger/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,laurenrevere/osf.io,adlius/osf.io,mfraezz/osf.io,binoculars/osf.io,pattisdr/osf.io,binoculars/osf.io,brianjgeiger/osf.io,felliott/osf.io,laurenrevere/osf.io,aaxelb/osf.io,cslzchen/osf.io,aaxelb/osf.io,caseyrollins/osf.io,pattisdr/osf.io,erinspace/osf.io,mfraezz/osf.io,sloria/osf.io,baylee-d/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,felliott/osf.io,chennan47/osf.io,chennan47/osf.io,pattisdr/osf.io,saradbowman/osf.io,aaxelb/osf.io,binoculars/osf.io,adlius/osf.io,icereval/osf.io,erinspace/osf.io,mattclark/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,icereval/osf.io,erinspace/osf.io,leb2dg/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,mfraezz/osf.io,baylee-d/osf.io,adlius/osf.io,leb2dg/osf.io,cslzchen/osf.io,saradbowman/osf.io,sloria/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,felliott/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,mattclark/osf.io,mattclark/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,felliott/osf.io,brianjgeiger/osf.io,adlius/osf.io
|
66e16d6e3d80ab81967232d5d154c64c8e277def
|
robotpy_ext/misc/periodic_filter.py
|
robotpy_ext/misc/periodic_filter.py
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno > logging.INFO
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
Allow user to select bypass level
|
Allow user to select bypass level
|
Python
|
bsd-3-clause
|
Twinters007/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities
|
56a89d57824d3bd25ac235a8e360d528edd9a7cf
|
test/factories/blogpost_factory.py
|
test/factories/blogpost_factory.py
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(lambda blogpost: blogpost.owner.id)
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(
lambda blogpost: blogpost.owner.id if blogpost.owner else None)
|
Fix for nullable author in blogpost factory
|
Fix for nullable author in blogpost factory
|
Python
|
agpl-3.0
|
OpenNewsLabs/pybossa,proyectos-analizo-info/pybossa-analizo-info,proyectos-analizo-info/pybossa-analizo-info,geotagx/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,proyectos-analizo-info/pybossa-analizo-info,harihpr/tweetclickers,stefanhahmann/pybossa,OpenNewsLabs/pybossa,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,harihpr/tweetclickers,stefanhahmann/pybossa,PyBossa/pybossa,Scifabric/pybossa,geotagx/pybossa,PyBossa/pybossa
|
4831c45b53d53d31a6514d5c3e2d0465283b4076
|
topological_sort.py
|
topological_sort.py
|
def topological_sort():
pass
def main():
pass
if __name__ == '__main__':
main()
|
def topological_sort_recur():
"""Topological Sorting by Recursion."""
pass
def topological_sort():
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
pass
def main():
# DAG.
adjacency_dict = {
'0': {},
'1': {},
'2': {'3'},
'3': {'1'},
'4': {'0', '1'},
'5': {'0', '2'}
}
if __name__ == '__main__':
main()
|
Add topolofical_sort_recur(), 2 functions’s doc strings and DAG
|
Add topolofical_sort_recur(), 2 functions’s doc strings and DAG
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
7c69d30de5aa58d330a183a0e5015e67c36ca7bc
|
spacy/tests/regression/test_issue4674.py
|
spacy/tests/regression/test_issue4674.py
|
# coding: utf-8
from __future__ import unicode_literals
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from spacy.tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
kb.set_entities(
entity_list=["Q1", "Q1"], freq_list=[32, 111], vector_list=[vector1, vector2]
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from ..tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
with pytest.warns(UserWarning):
kb.set_entities(
entity_list=["Q1", "Q1"],
freq_list=[32, 111],
vector_list=[vector1, vector2],
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
|
Tidy up and expect warning
|
Tidy up and expect warning
|
Python
|
mit
|
honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy
|
455cf39de018762d22b5d212f3a2c08491840bbf
|
tests/integration/cli/sync_test.py
|
tests/integration/cli/sync_test.py
|
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
super(TestSyncCLI, self).tearDown()
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
|
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
super(TestSyncCLI, self).tearDown()
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
|
Fix ordering problem in tearDown
|
Fix ordering problem in tearDown
|
Python
|
mit
|
gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty
|
6a330523ad683b7883cefa3878c7690fcb5dbd75
|
TalkingToYouBot.py
|
TalkingToYouBot.py
|
from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def main():
token = getToken()
print(token)
if __name__ == '__main__':
main()
|
from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def echo(bot, update):
'''
Simple function that echos every received message back to the user.
'''
bot.sendMessage(chat_id=update.message.chat_id, text=update.message.text)
def main():
token = getToken()
print("Starting Bot...")
# Start the Bot with the token
updater = Updater(token=token)
dispatcher = updater.dispatcher
# Add the echo-Message Handler to the Dispatcher
dispatcher.addTelegramMessageHandler(echo)
# Make the bot listen for commands
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
|
Add simple Echo function and Bot initialisation
|
Add simple Echo function and Bot initialisation
|
Python
|
mit
|
h4llow3En/IAmTalkingToYouBot
|
86a7b0e989e983063a1ff5afd098600bf34da401
|
ixwsauth_server/api.py
|
ixwsauth_server/api.py
|
"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
|
"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
def get_identifier(self, request):
"""
Return a combination of the consumer, the IP address and the host
"""
consumer = getattr(request, 'consumer', None)
return '%s_%s' % (
consumer.key(),
super(ApplicationAuthentication, self).get_identifier(request))
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
|
Add the consumer key to the identifier
|
Add the consumer key to the identifier
Used for rate limiting by API key.
Refs #17338
|
Python
|
mit
|
infoxchange/ixwsauth
|
07d62f1e9525719be48d862a86f3623368c02d9d
|
kuryr/lib/constants.py
|
kuryr/lib/constants.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'kuryr:container'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'compute:kuryr'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
|
Change DEVICE_OWNER to make it more Neutron compliant
|
Change DEVICE_OWNER to make it more Neutron compliant
Change-Id: Id7a2973928c6df9e134e7b91000e90f244066703
|
Python
|
apache-2.0
|
openstack/kuryr,openstack/kuryr
|
eb63b0979763375522bc71ce2f06fb625151ea08
|
MoMMI/Modules/wyci.py
|
MoMMI/Modules/wyci.py
|
import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
|
import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
if not channel.server_config("wyci.enabled", True):
return
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
|
Add config to disable WYCI.
|
Add config to disable WYCI.
|
Python
|
mit
|
PJB3005/MoMMI,PJB3005/MoMMI,PJB3005/MoMMI
|
da9058064e2a94f717abe2f97af80d2daa4fa292
|
likert_field/models.py
|
likert_field/models.py
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from six import string_types
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def get_prep_value(self, value):
"""The field expects a number as a string (ie. '2').
Unscored fields are empty strings and are stored as NULL
"""
if value is None:
return None
if isinstance(value, string_types) and len(value) == 0:
return None
return int(value)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
|
Handle empty strings from unanswered items
|
Handle empty strings from unanswered items
|
Python
|
bsd-3-clause
|
kelvinwong-ca/django-likert-field,kelvinwong-ca/django-likert-field
|
22855458c7c683353f2ed7b577289b63da8bc9c6
|
src/scikit-cycling/skcycling/utils/io_fit.py
|
src/scikit-cycling/skcycling/utils/io_fit.py
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
raise ValueError('There record without power values. Check what is happening.')
return power_rec
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
# raise ValueError('There record without power values. Check what is happening.')
# We put the value to 0 since that it will not influence
# the computation of the RPP
power_rec[idx_rec] = 0.
return power_rec
|
Solve the issue of the power got disconnected during the ride
|
Solve the issue of the power got disconnected during the ride
|
Python
|
mit
|
glemaitre/power-profile,glemaitre/power-profile,clemaitre58/power-profile,clemaitre58/power-profile
|
844e3635aeb0144f7e4cc0d9de3bfc219312bbe5
|
ocradmin/plugins/views.py
|
ocradmin/plugins/views.py
|
"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(task_id=async.task_id, status=async.status,
results=async.result)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
|
"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(
node=evalnode,
task_id=async.task_id,
status=async.status,
results=async.result
)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
|
Include the eval'd node type in the async return
|
Include the eval'd node type in the async return
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
b052c2fd93cd578723c93dbe7357f1f3c82273eb
|
src/poliastro/patched_conics.py
|
src/poliastro/patched_conics.py
|
# coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
print("To compute the semimajor axis for Moon",
" and Pluto use the JPL ephemeris: ")
print(">>> from astropy.coordinates import solar_system_ephemeris")
print('>>> solar_system_ephemeris.set("jpl")')
pass
|
# coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
raise RuntimeError(
"""To compute the semimajor axis for Moon and Pluto use the JPL ephemeris:
>>> from astropy.coordinates import solar_system_ephemeris
>>> solar_system_ephemeris.set("jpl")""")
|
Replace prints by an exception
|
Replace prints by an exception
|
Python
|
mit
|
anhiga/poliastro,Juanlu001/poliastro,anhiga/poliastro,poliastro/poliastro,newlawrence/poliastro,newlawrence/poliastro,Juanlu001/poliastro,newlawrence/poliastro,anhiga/poliastro,Juanlu001/poliastro
|
d156beeaf0638e585c616d697e1ecd76a98d8a3f
|
axelrod/tests/test_reflex.py
|
axelrod/tests/test_reflex.py
|
"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
def test_initial_nice_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_representation(self):
""" How do we appear? """
p1 = axelrod.Reflex()
self.assertEqual(str(p1), "Reflex")
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
def test_stochastic(self):
""" We are not stochastic. """
self.assertFalse(axelrod.Reflex().stochastic)
|
"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
name = "Reflex"
player = axelrod.Reflex
stochastic = False
def test_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
|
Simplify tests to new format.
|
Simplify tests to new format.
|
Python
|
mit
|
marcharper/Axelrod,ranjinidas/Axelrod,marcharper/Axelrod,ranjinidas/Axelrod
|
9f208fd476c8864a1b4c294b80d5d8191c400fb5
|
admin_sso/admin.py
|
admin_sso/admin.py
|
from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
|
from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
raw_id_fields = ('user',)
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
|
Add user to raw_id_fields, drastically improves UX on sites with many users
|
Add user to raw_id_fields, drastically improves UX on sites with many users
|
Python
|
bsd-3-clause
|
matthiask/django-admin-sso,diegobz/django-admin-sso,diegobz/django-admin-sso,matthiask/django-admin-sso
|
e255b92589000c2d485d35f9008b78e0313b4374
|
pystache/template_spec.py
|
pystache/template_spec.py
|
# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "Spec" in TemplateSpec
stands for template information that is "special" or "specified".
"""
# TODO: finish the class docstring.
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
template_rel_directory: the directory containing the template file, relative
to the directory containing the module defining the class.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
"""
template = None
template_rel_path = None
template_rel_directory = None
template_name = None
template_extension = None
template_encoding = None
|
# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "spec" in TemplateSpec
stands for "special" or "specified" template information.
"""
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_rel_directory = None
template_rel_path = None
|
Reorder TemplateSpec attributes and add to docstring.
|
Reorder TemplateSpec attributes and add to docstring.
|
Python
|
mit
|
nitish116/pystache,rismalrv/pystache,charbeljc/pystache,rismalrv/pystache,harsh00008/pystache,arlenesr28/pystache,defunkt/pystache,beni55/pystache,nitish116/pystache,nitish116/pystache,rismalrv/pystache,jrnold/pystache,jrnold/pystache,harsh00008/pystache,harsh00008/pystache,charbeljc/pystache,arlenesr28/pystache,beni55/pystache,arlenesr28/pystache
|
a3ca99ab519401df8f2418ce877065dc3aa63146
|
app/parsers/models.py
|
app/parsers/models.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
@property
def age(self):
return int(time.time()) - self.timestamp
|
Add message_age property to ParsedMessage
|
Add message_age property to ParsedMessage
|
Python
|
apache-2.0
|
forseti-security/real-time-enforcer
|
9cf29c769e3902c44914d3e216ae9457aa7e5fef
|
api/api/config_settings/redis_settings.py
|
api/api/config_settings/redis_settings.py
|
import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
|
import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
TO_STREAM = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_TO_STREAM_URL'))
|
Add to stream redis db
|
Add to stream redis db
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
66b49f913513545e5ae0484963412e965c8f9aa1
|
saleor/dashboard/category/forms.py
|
saleor/dashboard/category/forms.py
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
if self.instance in parent.get_ancestors():
raise forms.ValidationError(_('A category may not be made a child of any of its descendants.'))
return parent
|
Check if new parent is not a descendant of current category
|
Check if new parent is not a descendant of current category
|
Python
|
bsd-3-clause
|
rchav/vinerack,HyperManTT/ECommerceSaleor,rchav/vinerack,taedori81/saleor,jreigel/saleor,tfroehlich82/saleor,avorio/saleor,paweltin/saleor,laosunhust/saleor,UITools/saleor,spartonia/saleor,arth-co/saleor,spartonia/saleor,taedori81/saleor,arth-co/saleor,rchav/vinerack,tfroehlich82/saleor,avorio/saleor,josesanch/saleor,UITools/saleor,arth-co/saleor,jreigel/saleor,maferelo/saleor,UITools/saleor,tfroehlich82/saleor,car3oon/saleor,itbabu/saleor,Drekscott/Motlaesaleor,dashmug/saleor,car3oon/saleor,paweltin/saleor,KenMutemi/saleor,rodrigozn/CW-Shop,laosunhust/saleor,jreigel/saleor,paweltin/saleor,taedori81/saleor,mociepka/saleor,itbabu/saleor,avorio/saleor,Drekscott/Motlaesaleor,josesanch/saleor,HyperManTT/ECommerceSaleor,Drekscott/Motlaesaleor,mociepka/saleor,dashmug/saleor,UITools/saleor,HyperManTT/ECommerceSaleor,Drekscott/Motlaesaleor,UITools/saleor,arth-co/saleor,laosunhust/saleor,KenMutemi/saleor,itbabu/saleor,spartonia/saleor,paweltin/saleor,josesanch/saleor,mociepka/saleor,spartonia/saleor,rodrigozn/CW-Shop,dashmug/saleor,rodrigozn/CW-Shop,maferelo/saleor,taedori81/saleor,maferelo/saleor,KenMutemi/saleor,avorio/saleor,car3oon/saleor,laosunhust/saleor
|
c7d9287b770a0033cb54f9c1f9ac5f8beb25d528
|
scripts/cronRefreshEdxQualtrics.py
|
scripts/cronRefreshEdxQualtrics.py
|
from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
Revert "Added script for cron job to load surveys to database."
|
Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf.
|
Python
|
bsd-3-clause
|
paepcke/json_to_relation,paepcke/json_to_relation,paepcke/json_to_relation,paepcke/json_to_relation
|
d5049edc8567cebf936bb07847906c5400f9a6d9
|
ceph_deploy/tests/unit/hosts/test_suse.py
|
ceph_deploy/tests/unit/hosts/test_suse.py
|
from ceph_deploy.hosts import suse
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
|
from ceph_deploy.hosts import suse
from ceph_deploy.hosts.suse.install import map_components
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
class TestSuseMapComponents(object):
def test_valid(self):
pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw'])
assert 'ceph' in pkgs
assert 'ceph-common' in pkgs
assert 'ceph-radosgw' in pkgs
assert 'ceph-osd' not in pkgs
def test_invalid(self):
pkgs = map_components(['not-provided', 'ceph-mon'])
assert 'not-provided' not in pkgs
assert 'ceph' in pkgs
|
Add tests for component to SUSE package mapping
|
Add tests for component to SUSE package mapping
Signed-off-by: David Disseldorp <589a549dc9f982d9f46aeeb82a09ab6d87ccf1d8@suse.de>
|
Python
|
mit
|
zhouyuan/ceph-deploy,shenhequnying/ceph-deploy,ceph/ceph-deploy,ghxandsky/ceph-deploy,zhouyuan/ceph-deploy,imzhulei/ceph-deploy,SUSE/ceph-deploy,Vicente-Cheng/ceph-deploy,ceph/ceph-deploy,branto1/ceph-deploy,trhoden/ceph-deploy,trhoden/ceph-deploy,osynge/ceph-deploy,ghxandsky/ceph-deploy,SUSE/ceph-deploy,branto1/ceph-deploy,codenrhoden/ceph-deploy,isyippee/ceph-deploy,isyippee/ceph-deploy,Vicente-Cheng/ceph-deploy,shenhequnying/ceph-deploy,osynge/ceph-deploy,imzhulei/ceph-deploy,codenrhoden/ceph-deploy
|
d8f33c46b6462788ef6e38dc5aefcdda2144eb66
|
camoco/__init__.py
|
camoco/__init__.py
|
"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
|
"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
Camoco.create()
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
|
Fix initial create for camoco class
|
Fix initial create for camoco class
|
Python
|
mit
|
schae234/Camoco,schae234/Camoco
|
a1bcb99691f5a0238f6a34a5579df3e89e8d6823
|
child_sync_gp/model/project_compassion.py
|
child_sync_gp/model/project_compassion.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
if not isinstance(ids, list):
ids = [ids]
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
Fix bug in write project.
|
Fix bug in write project.
|
Python
|
agpl-3.0
|
CompassionCH/compassion-switzerland,ndtran/compassion-switzerland,MickSandoz/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,ecino/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland
|
4ad92bfcbfd2145b008cd18e934ebd6dc3be53e9
|
pytest/test_prefork.py
|
pytest/test_prefork.py
|
from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
|
import os
import shutil
import os.path
import tempfile
from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
def test_WriteAndFlushFile():
"""
Make sure we can write to and read from a file.
"""
try:
# Create a directory. Make sure to remove it at the end.
dirname = tempfile.mkdtemp()
filename = 'filename.txt'
text1 = 'The quick brown fox\n'
text2 = 'The lazy dog'
full_path = os.path.join(dirname, filename)
# Open a file and write using both changed methods
f = prefork.WriteAndFlushFile(full_path, 'w')
f.write(text1)
f.writelines(text2)
f.close()
# Read everything back
f = open(full_path, 'r')
data = f.readlines()
f.close()
assert data[0] == text1
assert data[1] == text2
finally:
# Always remove it
shutil.rmtree(dirname)
|
Add a test for the file object
|
Add a test for the file object
|
Python
|
bsd-3-clause
|
markrwilliams/tectonic
|
cb8bf92ab2f71767de8b471992d79131e4dde9a1
|
quicksort/quicksort.py
|
quicksort/quicksort.py
|
def sort(arr):
return arr;
if __name__ == '__main__':
unsorted = list(reversed(range(1000)));
print sort(unsorted);
|
def sort(arr, length):
if length == 1:
return
return (arr, length)
if __name__ == '__main__':
unsorted = list(reversed(range(1000)))
initial_len = len(unsorted)
print sort(unsorted, initial_len)
|
Add length parameter to sort and remove semicolons
|
Add length parameter to sort and remove semicolons
The sort function requires a length parameter, so the function
declaration and the initial call were modified to reflect that.
A length of just 1 element represents the base case of the
recursion, so the function simply returns in this case.
Also I forgot how python works for a minute there and included a
bunch of unecessary semicolons. Those are gone now
|
Python
|
mit
|
timpel/stanford-algs,timpel/stanford-algs
|
9256844b08edaff1b9755a6ffc25acc0df76934d
|
MoodJournal/entries/serializers.py
|
MoodJournal/entries/serializers.py
|
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='category-detail'
)
# TODO Should I just either not include it, or go with the user name?
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = UserDefinedCategory
fields = ('url', 'user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
|
from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.ModelSerializer):
class Meta:
model = UserDefinedCategory
fields = ('user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
|
Revert "beginning hyperlink model serialization"
|
Revert "beginning hyperlink model serialization"
This reverts commit 6d41c54397512da69604f7e730757f4aff96374f.
|
Python
|
mit
|
swpease/MoodJournal,swpease/MoodJournal,swpease/MoodJournal
|
5a12f027079d109228456c6f3e4912317721246a
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
)
|
from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'],
)
|
Declare that cyrtranslit supports Python 3.7
|
Declare that cyrtranslit supports Python 3.7
|
Python
|
mit
|
opendatakosovo/cyrillic-transliteration
|
aea05ee76193ac0abe2f6673910917bf13a3b339
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='simplecrypto',
version=open('CHANGES.txt').read().split()[0],
author='Lucas Boppre Niehues',
author_email='lucasboppre@gmail.com',
packages=['simplecrypto'],
url='http://pypi.python.org/pypi/simplecrypto/',
license='LICENSE.txt',
description='simplecrypto',
long_description=open('README.md').read(),
install_requires=[
'PyCrypto',
],
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
],
)
|
from distutils.core import setup
setup(
name='simplecrypto',
version=open('CHANGES.txt').read().split()[0],
author='Lucas Boppre Niehues',
author_email='lucasboppre@gmail.com',
packages=['simplecrypto'],
url='https://github.com/boppreh/simplecrypto',
license='LICENSE.txt',
description='simplecrypto',
long_description=open('README.md').read(),
install_requires=[
'PyCrypto',
],
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
],
)
|
Change homepage to github URL
|
Change homepage to github URL
|
Python
|
mit
|
boppreh/simplecrypto
|
5bb90727efb62525995caad3b52fd588d8b08298
|
pregnancy/urls.py
|
pregnancy/urls.py
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import contractions.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pregnancy.views.home', name='home'),
# url(r'^pregnancy/', include('pregnancy.foo.urls')),
url(r'^contractions/$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^update_intensity/(?P<pk>\d+)/$', contractions.views.UpdateIntensity.as_view(), name='UpdateIntensity'),
url(r'^update_intensity2/(?P<pk>\d+)/$', contractions.views.UpdateIntensity2.as_view(), name='UpdateIntensity2'),
url(r'^ContractionListTable/$', contractions.views.ContractionListTable.as_view(), name='ContractionListTable'),
url(r'^StartContraction/$', contractions.views.StartContraction.as_view(), name='StartContraction'),
url(r'^StopContraction/(?P<pk>\d+)/$', contractions.views.StopContraction.as_view(), name='StopContraction'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import contractions.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pregnancy.views.home', name='home'),
# url(r'^pregnancy/', include('pregnancy.foo.urls')),
url(r'^$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^contractions/$', contractions.views.ContractionList.as_view(), name='ContractionList'),
url(r'^update_intensity/(?P<pk>\d+)/$', contractions.views.UpdateIntensity.as_view(), name='UpdateIntensity'),
url(r'^update_intensity2/(?P<pk>\d+)/$', contractions.views.UpdateIntensity2.as_view(), name='UpdateIntensity2'),
url(r'^ContractionListTable/$', contractions.views.ContractionListTable.as_view(), name='ContractionListTable'),
url(r'^StartContraction/$', contractions.views.StartContraction.as_view(), name='StartContraction'),
url(r'^StopContraction/(?P<pk>\d+)/$', contractions.views.StopContraction.as_view(), name='StopContraction'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
Update url to point / to the contractions app
|
Update url to point / to the contractions app
|
Python
|
bsd-2-clause
|
dreinhold/pregnancy,dreinhold/pregnancy,dreinhold/pregnancy
|
9fa76b8e9d7fb9309a49d46b9bbd43e9b65418ad
|
pytest_cookies.py
|
pytest_cookies.py
|
# -*- coding: utf-8 -*-
import pytest
def pytest_addoption(parser):
group = parser.getgroup('cookies')
group.addoption(
'--foo',
action='store',
dest='dest_foo',
default=2015,
help='Set the value for the fixture "bar".'
)
parser.addini('HELLO', 'Dummy pytest.ini setting')
@pytest.fixture
def bar(request):
return request.config.option.dest_foo
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter.main import cookiecutter
class Cookies(object):
"""Class to provide convenient access to the cookiecutter API."""
error = None
project = None
def __init__(self, template, output_dir):
self._template = template
self._output_dir = output_dir
def bake(self, extra_context=None):
try:
project_dir = cookiecutter(
self._template,
no_input=True,
extra_context=extra_context,
output_dir=self._output_dir
)
except Exception as e:
self.error = e
else:
self.project = project_dir
@pytest.fixture
def cookies(request, tmpdir):
output_dir = request.config.option.output_dir
if not output_dir:
output_dir = str(tmpdir.mkdir('cookies_output'))
_cookies = Cookies('.', output_dir)
return _cookies
def pytest_addoption(parser):
group = parser.getgroup('cookies')
group.addoption(
'--output-dir',
action='store',
dest='output_dir',
help='Set the output directory for Cookiecutter'
)
parser.addini('HELLO', 'Dummy pytest.ini setting')
|
Implement cookies fixture along with Helper class
|
Implement cookies fixture along with Helper class
|
Python
|
mit
|
hackebrot/pytest-cookies
|
eec004dd34ffc977e29481c94345e20cae867238
|
views.py
|
views.py
|
from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
|
from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views', 'models'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
|
Expand pre loading on warmup
|
Expand pre loading on warmup
|
Python
|
bsd-3-clause
|
adieu/djangoappengine
|
ba1bfc262e023a01d6e201d48d234640a443ed96
|
raven/__init__.py
|
raven/__init__.py
|
"""
raven
~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception, e:
VERSION = 'unknown'
from base import *
from conf import *
|
"""
raven
~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception, e:
VERSION = 'unknown'
from raven.base import *
from raven.conf import *
|
Use absolute imports, not relative ones.
|
Use absolute imports, not relative ones.
|
Python
|
bsd-3-clause
|
hzy/raven-python,akheron/raven-python,akalipetis/raven-python,nikolas/raven-python,arthurlogilab/raven-python,inspirehep/raven-python,recht/raven-python,akheron/raven-python,arthurlogilab/raven-python,arthurlogilab/raven-python,lepture/raven-python,percipient/raven-python,collective/mr.poe,Goldmund-Wyldebeast-Wunderliebe/raven-python,someonehan/raven-python,recht/raven-python,inspirehep/raven-python,jbarbuto/raven-python,johansteffner/raven-python,recht/raven-python,icereval/raven-python,lepture/raven-python,smarkets/raven-python,hzy/raven-python,arthurlogilab/raven-python,jmp0xf/raven-python,ronaldevers/raven-python,inspirehep/raven-python,ewdurbin/raven-python,jbarbuto/raven-python,nikolas/raven-python,jmagnusson/raven-python,akheron/raven-python,nikolas/raven-python,Photonomie/raven-python,dbravender/raven-python,akalipetis/raven-python,getsentry/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,smarkets/raven-python,ronaldevers/raven-python,danriti/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,percipient/raven-python,jmagnusson/raven-python,someonehan/raven-python,jmp0xf/raven-python,lepture/raven-python,danriti/raven-python,smarkets/raven-python,smarkets/raven-python,danriti/raven-python,someonehan/raven-python,getsentry/raven-python,jmp0xf/raven-python,hzy/raven-python,icereval/raven-python,getsentry/raven-python,nikolas/raven-python,percipient/raven-python,inspirehep/raven-python,jbarbuto/raven-python,jmagnusson/raven-python,akalipetis/raven-python,dbravender/raven-python,Photonomie/raven-python,dbravender/raven-python,ewdurbin/raven-python,ronaldevers/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,jbarbuto/raven-python,johansteffner/raven-python,johansteffner/raven-python,icereval/raven-python,Photonomie/raven-python,icereval/raven-python,ewdurbin/raven-python
|
ee0c852d494a0952d51b7f5ddde77ec2b46deca3
|
lambdas/update_ecs_service_size.py
|
lambdas/update_ecs_service_size.py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Change the size of an ECS service.
This is used to schedule our service applications: by setting the desired
size to 0/greater-than-0, Amazon will do the work of spinning up or scaling
down the tasks.
The script is triggered by notifications to an SNS topic, in which the
message should be a JSON string that includes "cluster", "service" and
"desired_count" as attributes.
"""
import json
import boto3
def change_desired_count(cluster, service, desired_count):
"""
Given an ECS cluster, service name and desired instance count, change
the instance count on AWS.
"""
ecs = boto3.client('ecs')
resp = ecs.update_service(
cluster=cluster,
service=service,
desiredCount=desired_count
)
print('ECS response: %r' % resp)
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
def main(event, _):
print('Received event: %r' % event)
message = event['Message']
message_data = json.loads(message)
change_desired_count(
cluster=message_data['cluster'],
service=message_data['service'],
desired_count=message_data['desired_count']
)
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Change the size of an ECS service.
This is used to schedule our service applications: by setting the desired
size to 0/greater-than-0, Amazon will do the work of spinning up or scaling
down the tasks.
The script is triggered by notifications to an SNS topic, in which the
message should be a JSON string that includes "cluster", "service" and
"desired_count" as attributes.
"""
import json
import boto3
def change_desired_count(cluster, service, desired_count):
"""
Given an ECS cluster, service name and desired instance count, change
the instance count on AWS.
"""
ecs = boto3.client('ecs')
resp = ecs.update_service(
cluster=cluster,
service=service,
desiredCount=desired_count
)
print('ECS response: %r' % resp)
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
def main(event, _):
print('Received event: %r' % event)
message = event['Records'][0]['Sns']['Message']
message_data = json.loads(message)
change_desired_count(
cluster=message_data['cluster'],
service=message_data['service'],
desired_count=message_data['desired_count']
)
|
Fix the Update ECS Service size Lambda
|
Fix the Update ECS Service size Lambda
|
Python
|
mit
|
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
|
dfe84075109620481cac493c1d0dba69d9ca19df
|
vesper/tests/test_case_mixin.py
|
vesper/tests/test_case_mixin.py
|
"""
Unit test test case mixin class.
This mixin class is intended for use with a subclass of either
`unittest.TestCase` or `django.test.TestCase`. It includes several
convenience `_assert...` methods.
"""
import vesper.util.numpy_utils as numpy_utils
class TestCaseMixin:
def assert_raises(self, exception_class, function, *args, **kwargs):
self.assertRaises(exception_class, function, *args, **kwargs)
try:
function(*args, **kwargs)
except exception_class as e:
pass
# print(str(e))
def assert_arrays_equal(self, x, y):
self.assertTrue(numpy_utils.arrays_equal(x, y))
def assert_arrays_close(self, x, y):
self.assertTrue(numpy_utils.arrays_close(x, y))
|
"""
Unit test test case mixin class.
This mixin class is intended for use with a subclass of either
`unittest.TestCase` or `django.test.TestCase`. It includes several
convenience `_assert...` methods.
"""
import vesper.util.numpy_utils as numpy_utils
SHOW_EXCEPTION_MESSAGES = False
class TestCaseMixin:
def assert_raises(self, exception_class, function, *args, **kwargs):
try:
function(*args, **kwargs)
except exception_class as e:
if SHOW_EXCEPTION_MESSAGES:
print(str(e))
else:
raise AssertionError(
f'{exception_class.__name__} not raised by '
f'{function.__name__}')
async def assert_raises_async(
self, exception_class, function, *args, **kwargs):
try:
await function(*args, **kwargs)
except exception_class as e:
if SHOW_EXCEPTION_MESSAGES:
print(str(e))
else:
raise AssertionError(
f'{exception_class.__name__} not raised by '
f'{function.__name__}')
def assert_arrays_equal(self, x, y):
self.assertTrue(numpy_utils.arrays_equal(x, y))
def assert_arrays_close(self, x, y):
self.assertTrue(numpy_utils.arrays_close(x, y))
|
Add method for testing async function errors.
|
Add method for testing async function errors.
|
Python
|
mit
|
HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper
|
33ba6400768b759180d7602c14e6f947d1c8e771
|
djangosaml2/templatetags/idplist.py
|
djangosaml2/templatetags/idplist.py
|
# Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
self.conf = config_settings_loader()
def render(self, context):
context[self.variable_name] = self.conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
# Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
def render(self, context):
conf = config_settings_loader()
context[self.variable_name] = conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant
|
Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant
|
Python
|
apache-2.0
|
WiserTogether/djangosaml2,sdelements/djangosaml2,kradalby/djangosaml2,kradalby/djangosaml2,WiserTogether/djangosaml2
|
d83ed858dab0991e4829a7f249260ae1f1140b41
|
rave/main.py
|
rave/main.py
|
import rave.events
import rave.modularity
import rave.backends
import rave.resources
import rave.rendering
def init_game(game):
rave.events.emit('game.init', game)
with game.env:
rave.modularity.load_all()
rave.backends.select_all()
def run_game(game):
running = True
# Stop the event loop when a stop event was caught.
def stop(event):
nonlocal running
running = False
game.events.hook('game.stop', stop)
rave.events.emit('game.start', game)
with game.env:
# Typical handle events -> update game state -> render loop.
while running:
with game.active_lock:
# Suspend main loop while lock is active: useful for when the OS requests an application suspend.
pass
rave.backends.handle_events(game)
if game.mixer:
game.mixer.render(None)
if game.window:
game.window.render(None)
|
import rave.events
import rave.modularity
import rave.backends
import rave.resources
import rave.rendering
def init_game(game):
rave.modularity.load_all()
rave.events.emit('game.init', game)
with game.env:
rave.backends.select_all()
def run_game(game):
running = True
# Stop the event loop when a stop event was caught.
def stop(event):
nonlocal running
running = False
game.events.hook('game.stop', stop)
rave.events.emit('game.start', game)
with game.env:
# Typical handle events -> update game state -> render loop.
while running:
with game.active_lock:
# Suspend main loop while lock is active: useful for when the OS requests an application suspend.
pass
rave.backends.handle_events(game)
if game.mixer:
game.mixer.render(None)
if game.window:
game.window.render(None)
|
Load modules in engine context.
|
core: Load modules in engine context.
|
Python
|
bsd-2-clause
|
rave-engine/rave
|
a37ef5af5a28207d21b11f08990e233a34afa768
|
acme/utils/loggers/__init__.py
|
acme/utils/loggers/__init__.py
|
# python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Acme loggers."""
from acme.utils.loggers.aggregators import Dispatcher
from acme.utils.loggers.asynchronous import AsyncLogger
from acme.utils.loggers.base import Logger
from acme.utils.loggers.base import to_numpy
from acme.utils.loggers.csv import CSVLogger
from acme.utils.loggers.filters import NoneFilter
from acme.utils.loggers.filters import TimeFilter
from acme.utils.loggers.default import make_default_logger # pylint: disable=g-bad-import-order
from acme.utils.loggers.terminal import TerminalLogger
|
# python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Acme loggers."""
from acme.utils.loggers.aggregators import Dispatcher
from acme.utils.loggers.asynchronous import AsyncLogger
from acme.utils.loggers.base import Logger
from acme.utils.loggers.base import LoggingData
from acme.utils.loggers.base import to_numpy
from acme.utils.loggers.csv import CSVLogger
from acme.utils.loggers.filters import NoneFilter
from acme.utils.loggers.filters import TimeFilter
from acme.utils.loggers.default import make_default_logger # pylint: disable=g-bad-import-order
from acme.utils.loggers.terminal import TerminalLogger
|
Add LoggingData annotation to Logger base import so users can type-annotate Logger subclasses properly.
|
Add LoggingData annotation to Logger base import so users can type-annotate Logger subclasses properly.
PiperOrigin-RevId: 315308368
Change-Id: I608c9f6f5f4b9edbbf504ec321fc4c8e90ed8193
|
Python
|
apache-2.0
|
deepmind/acme,deepmind/acme
|
7014bfb976524e95b6e13eb44cf62401568bff1a
|
examples/web_demo/exifutil.py
|
examples/web_demo/exifutil.py
|
"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
return np.asarray(im).astype(np.float32) / 255.
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
|
"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
img = np.asarray(im).astype(np.float32) / 255.
if img.ndim == 2:
img = img[:, :, np.newaxis]
img = np.tile(img, (1, 1, 3))
elif img.shape[2] == 4:
img = img[:, :, :3]
return img
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
|
FIX web_demo upload was not processing grayscale correctly
|
FIX web_demo upload was not processing grayscale correctly
|
Python
|
agpl-3.0
|
tackgeun/caffe,CZCV/s-dilation-caffe,longjon/caffe,gnina/gnina,CZCV/s-dilation-caffe,tackgeun/caffe,gnina/gnina,gnina/gnina,gogartom/caffe-textmaps,CZCV/s-dilation-caffe,gogartom/caffe-textmaps,wangg12/caffe,tackgeun/caffe,wangg12/caffe,gnina/gnina,gnina/gnina,gogartom/caffe-textmaps,CZCV/s-dilation-caffe,longjon/caffe,wangg12/caffe,longjon/caffe,longjon/caffe,gogartom/caffe-textmaps,wangg12/caffe,gnina/gnina,tackgeun/caffe
|
527c414da01dd40425086253dec2007c54e30675
|
send_reminders.py
|
send_reminders.py
|
from twilio.rest import TwilioRestClient
import project.utils.reminders
ACCOUNT_SID = "AC6a9746370384b26236aae71013aa35b2"
AUTH_TOKEN = "38b0bcc37788e553978c840929d54aa2"
def send_reminder(text, phone):
client = TwilioRestClient(ACCOUNT_SID, AUTH_TOKEN)
client.messages.create(to=phone, from_="+15713646776", body=text)
def send_all_reminders():
x = project.utils.reminders.get_needed_reminders()
for i in x:
send_reminder(i.text, i.phone)
send_all_reminders()
|
from twilio.rest import TwilioRestClient
import project.utils.reminders
ACCOUNT_SID = "ayylmao"
AUTH_TOKEN = "ayylmao"
def send_reminder(text, phone):
client = TwilioRestClient(ACCOUNT_SID, AUTH_TOKEN)
client.messages.create(to=phone, from_="+15172194225", body=text)
def send_all_reminders():
x = project.utils.reminders.get_needed_reminders()
for i in x:
send_reminder(i.text, i.phone)
send_all_reminders()
|
Update API keys and phone number
|
Update API keys and phone number
|
Python
|
apache-2.0
|
tjcsl/mhacksiv
|
e7865a22eb2e7433f3c36cd571aae3ac65436423
|
signage/models.py
|
signage/models.py
|
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeFramedModel
from taggit.managers import TaggableManager
@python_2_unicode_compatible
class Slide(TimeFramedModel):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
image = models.ImageField(
upload_to='slides/',
)
duration = models.PositiveIntegerField(
default=7,
)
weight = models.SmallIntegerField(
default=0,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:slide_update', args=[self.pk])
def get_displays(self):
return Display.objects.filter(tags__name__in=self.tags.names()).distinct()
@python_2_unicode_compatible
class Display(models.Model):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:display_update', args=[self.pk])
def get_slides(self):
return Slide.objects.filter(tags__name__in=self.tags.names()).distinct()
|
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeFramedModel
from taggit.managers import TaggableManager
@python_2_unicode_compatible
class Slide(TimeFramedModel):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
image = models.ImageField(
upload_to='slides/',
)
duration = models.PositiveIntegerField(
default=7,
)
weight = models.SmallIntegerField(
default=0,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:slide_update', args=[self.pk])
def get_displays(self):
return Display.objects.filter(tags__name__in=self.tags.names()).distinct()
@python_2_unicode_compatible
class Display(models.Model):
"""
"""
name = models.CharField(
max_length=255,
)
description = models.TextField(
blank=True,
)
tags = TaggableManager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signage:display_update', args=[self.pk])
def get_slides(self):
return Slide.objects.filter(tags__name__in=self.tags.names()).order_by('weight').distinct()
|
Order displayed slides by weight
|
Order displayed slides by weight
|
Python
|
bsd-3-clause
|
jbittel/django-signage,jbittel/django-signage,jbittel/django-signage
|
cf52a7c83e1479a99e95ab2125958a67febfccf5
|
dataviews/__init__.py
|
dataviews/__init__.py
|
import sys, os
# Add param submodule to sys.path
cwd = os.path.abspath(os.path.split(__file__)[0])
sys.path.insert(0, os.path.join(cwd, '..', 'param'))
from .views import * # pyflakes:ignore (API import)
from .dataviews import * # pyflakes:ignore (API import)
from .sheetviews import * # pyflakes:ignore (API import)
from .ndmapping import * # pyflakes:ignore (API import)
def public(obj):
if not isinstance(obj, type): return False
baseclasses = [NdMapping, View, Dimension]
return any([issubclass(obj, bc) for bc in baseclasses])
_public = list(set([_k for _k, _v in locals().items() if public(_v)]))
__all__ = _public + ["boundingregion", "ipython", "plots", "sheetcoords" ]
|
import sys, os
# Add param submodule to sys.path
cwd = os.path.abspath(os.path.split(__file__)[0])
sys.path.insert(0, os.path.join(cwd, '..', 'param'))
import param
__version__ = param.Version(release=(0,7), fpath=__file__)
from .views import * # pyflakes:ignore (API import)
from .dataviews import * # pyflakes:ignore (API import)
from .sheetviews import * # pyflakes:ignore (API import)
from .ndmapping import * # pyflakes:ignore (API import)
def public(obj):
if not isinstance(obj, type): return False
baseclasses = [NdMapping, View, Dimension]
return any([issubclass(obj, bc) for bc in baseclasses])
_public = list(set([_k for _k, _v in locals().items() if public(_v)]))
__all__ = _public + ["boundingregion", "ipython", "plots", "sheetcoords" ]
|
Set __version__ using param.Version (commit tagged as 'v0.7')
|
Set __version__ using param.Version (commit tagged as 'v0.7')
|
Python
|
bsd-3-clause
|
mjabri/holoviews,basnijholt/holoviews,ioam/holoviews,mjabri/holoviews,ioam/holoviews,vascotenner/holoviews,vascotenner/holoviews,ioam/holoviews,basnijholt/holoviews,basnijholt/holoviews,vascotenner/holoviews,mjabri/holoviews
|
0236ad9090f7b218fc7515fdc8d919b2fc048a72
|
simple_counter.py
|
simple_counter.py
|
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A module implementing a simple sharded counter."""
import random
from google.appengine.ext import ndb
NUM_SHARDS = 20
class SimpleCounterShard(ndb.Model):
"""Shards for the counter"""
count = ndb.IntegerProperty(required=True, default=0)
def get_count():
"""Retrieve the value for a given sharded counter."""
total = 0
for counter in SimpleCounterShard.query():
total += counter.count
return total
@ndb.transactional
def increment():
"""Increment the value for a given sharded counter."""
shard_index = random.randint(0, NUM_SHARDS - 1)
counter = SimpleCounterShard.get_by_id(shard_index)
if counter is None:
counter = SimpleCounterShard(id=shard_index)
counter.count += 1
counter.put()
|
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A module implementing a simple sharded counter."""
import random
from google.appengine.ext import ndb
NUM_SHARDS = 20
class SimpleCounterShard(ndb.Model):
"""Shards for the counter"""
count = ndb.IntegerProperty(required=True, default=0)
def get_count():
"""Retrieve the value for a given sharded counter."""
total = 0
for counter in SimpleCounterShard.query():
total += counter.count
return total
@ndb.transactional
def increment():
"""Increment the value for a given sharded counter."""
shard_index = random.randint(0, NUM_SHARDS - 1)
counter = SimpleCounterShard.get_by_id(shard_index)
if counter is None:
counter = SimpleCounterShard(id=shard_index)
counter.count += 1
counter.put()
|
Indent only (PEP8) commit of simple counter.
|
Indent only (PEP8) commit of simple counter.
|
Python
|
apache-2.0
|
GoogleCloudPlatform/appengine-sharded-counters-python
|
da93d78d141e0e07581b2a00cd6a4fb4058dcf56
|
scikits/learn/setup.py
|
scikits/learn/setup.py
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('learn',parent_package,top_path)
config.add_subpackage('datasets')
config.add_subpackage('common')
config.add_subpackage('machine')
config.add_subpackage('utils')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('learn',parent_package,top_path)
config.add_subpackage('datasets')
config.add_subpackage('machine')
config.add_subpackage('utils')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
Remove references to deleted submodule common/
|
Remove references to deleted submodule common/
From: Fabian Pedregosa <fabian.pedregosa@inria.fr>
git-svn-id: a2d1b0e147e530765aaf3e1662d4a98e2f63c719@384 22fbfee3-77ab-4535-9bad-27d1bd3bc7d8
|
Python
|
bsd-3-clause
|
jayflo/scikit-learn,toastedcornflakes/scikit-learn,Aasmi/scikit-learn,kjung/scikit-learn,macks22/scikit-learn,trungnt13/scikit-learn,sgenoud/scikit-learn,ldirer/scikit-learn,aetilley/scikit-learn,elkingtonmcb/scikit-learn,IshankGulati/scikit-learn,zhenv5/scikit-learn,fzalkow/scikit-learn,petosegan/scikit-learn,mojoboss/scikit-learn,belltailjp/scikit-learn,jseabold/scikit-learn,terkkila/scikit-learn,BiaDarkia/scikit-learn,ominux/scikit-learn,fyffyt/scikit-learn,glouppe/scikit-learn,MohammedWasim/scikit-learn,aewhatley/scikit-learn,cauchycui/scikit-learn,fyffyt/scikit-learn,MatthieuBizien/scikit-learn,altairpearl/scikit-learn,zuku1985/scikit-learn,Djabbz/scikit-learn,shusenl/scikit-learn,bnaul/scikit-learn,Windy-Ground/scikit-learn,liberatorqjw/scikit-learn,ltiao/scikit-learn,yanlend/scikit-learn,Nyker510/scikit-learn,trankmichael/scikit-learn,sanketloke/scikit-learn,DonBeo/scikit-learn,akionakamura/scikit-learn,xubenben/scikit-learn,0x0all/scikit-learn,mlyundin/scikit-learn,zaxtax/scikit-learn,icdishb/scikit-learn,wazeerzulfikar/scikit-learn,rajat1994/scikit-learn,sonnyhu/scikit-learn,espg/scikit-learn,abimannans/scikit-learn,jmetzen/scikit-learn,vshtanko/scikit-learn,ningchi/scikit-learn,0x0all/scikit-learn,Garrett-R/scikit-learn,fzalkow/scikit-learn,saiwing-yeung/scikit-learn,ky822/scikit-learn,imaculate/scikit-learn,TomDLT/scikit-learn,jjx02230808/project0223,marcocaccin/scikit-learn,schets/scikit-learn,LiaoPan/scikit-learn,gotomypc/scikit-learn,stylianos-kampakis/scikit-learn,voxlol/scikit-learn,michigraber/scikit-learn,aewhatley/scikit-learn,hlin117/scikit-learn,ZENGXH/scikit-learn,ogrisel/scikit-learn,frank-tancf/scikit-learn,rishikksh20/scikit-learn,luo66/scikit-learn,ldirer/scikit-learn,mikebenfield/scikit-learn,hainm/scikit-learn,procoder317/scikit-learn,OshynSong/scikit-learn,huzq/scikit-learn,0x0all/scikit-learn,jpautom/scikit-learn,alvarofierroclavero/scikit-learn,sgenoud/scikit-learn,nikitasingh981/scikit-learn,JsNoNo/scikit-learn,rrohan/scikit-learn,jorge2703/scikit-learn,JPFrancoia/scikit-learn,betatim/scikit-learn,henrykironde/scikit-learn,AlexanderFabisch/scikit-learn,rahul-c1/scikit-learn,pianomania/scikit-learn,costypetrisor/scikit-learn,ssaeger/scikit-learn,ssaeger/scikit-learn,PrashntS/scikit-learn,samzhang111/scikit-learn,eg-zhang/scikit-learn,JosmanPS/scikit-learn,russel1237/scikit-learn,krez13/scikit-learn,wzbozon/scikit-learn,nrhine1/scikit-learn,Fireblend/scikit-learn,q1ang/scikit-learn,massmutual/scikit-learn,yask123/scikit-learn,nelson-liu/scikit-learn,nvoron23/scikit-learn,adamgreenhall/scikit-learn,nvoron23/scikit-learn,voxlol/scikit-learn,bthirion/scikit-learn,aabadie/scikit-learn,shahankhatch/scikit-learn,yonglehou/scikit-learn,MechCoder/scikit-learn,OshynSong/scikit-learn,cdegroc/scikit-learn,pompiduskus/scikit-learn,lucidfrontier45/scikit-learn,yunfeilu/scikit-learn,mfjb/scikit-learn,arjoly/scikit-learn,amueller/scikit-learn,etkirsch/scikit-learn,zorroblue/scikit-learn,jorge2703/scikit-learn,waterponey/scikit-learn,gclenaghan/scikit-learn,potash/scikit-learn,devanshdalal/scikit-learn,ilo10/scikit-learn,JeanKossaifi/scikit-learn,vivekmishra1991/scikit-learn,hugobowne/scikit-learn,anurag313/scikit-learn,pythonvietnam/scikit-learn,phdowling/scikit-learn,andaag/scikit-learn,AlexanderFabisch/scikit-learn,HolgerPeters/scikit-learn,procoder317/scikit-learn,MatthieuBizien/scikit-learn,hdmetor/scikit-learn,ClimbsRocks/scikit-learn,TomDLT/scikit-learn,yask123/scikit-learn,devanshdalal/scikit-learn,IssamLaradji/scikit-learn,AlexanderFabisch/scikit-learn,saiwing-yeung/scikit-learn,tawsifkhan/scikit-learn,qifeigit/scikit-learn,fzalkow/scikit-learn,olologin/scikit-learn,btabibian/scikit-learn,cdegroc/scikit-learn,qifeigit/scikit-learn,sinhrks/scikit-learn,amueller/scikit-learn,Lawrence-Liu/scikit-learn,rohanp/scikit-learn,yask123/scikit-learn,xubenben/scikit-learn,arabenjamin/scikit-learn,vivekmishra1991/scikit-learn,abhishekkrthakur/scikit-learn,joshloyal/scikit-learn,pompiduskus/scikit-learn,Aasmi/scikit-learn,glouppe/scikit-learn,mjudsp/Tsallis,raghavrv/scikit-learn,ElDeveloper/scikit-learn,walterreade/scikit-learn,abhishekgahlot/scikit-learn,luo66/scikit-learn,hsiaoyi0504/scikit-learn,rahuldhote/scikit-learn,rvraghav93/scikit-learn,mattgiguere/scikit-learn,pv/scikit-learn,depet/scikit-learn,sergeyf/scikit-learn,jayflo/scikit-learn,ky822/scikit-learn,andrewnc/scikit-learn,imaculate/scikit-learn,jpautom/scikit-learn,cl4rke/scikit-learn,jkarnows/scikit-learn,0asa/scikit-learn,ilyes14/scikit-learn,tosolveit/scikit-learn,ChanderG/scikit-learn,cdegroc/scikit-learn,jorik041/scikit-learn,B3AU/waveTree,CforED/Machine-Learning,marcocaccin/scikit-learn,thientu/scikit-learn,yonglehou/scikit-learn,JosmanPS/scikit-learn,jaidevd/scikit-learn,russel1237/scikit-learn,vermouthmjl/scikit-learn,bhargav/scikit-learn,dingocuster/scikit-learn,lin-credible/scikit-learn,PrashntS/scikit-learn,cauchycui/scikit-learn,ivannz/scikit-learn,vortex-ape/scikit-learn,IshankGulati/scikit-learn,rrohan/scikit-learn,betatim/scikit-learn,ephes/scikit-learn,bikong2/scikit-learn,mrshu/scikit-learn,f3r/scikit-learn,RachitKansal/scikit-learn,BiaDarkia/scikit-learn,ndingwall/scikit-learn,huobaowangxi/scikit-learn,DSLituiev/scikit-learn,fbagirov/scikit-learn,simon-pepin/scikit-learn,abhishekgahlot/scikit-learn,zhenv5/scikit-learn,vybstat/scikit-learn,RachitKansal/scikit-learn,iismd17/scikit-learn,loli/sklearn-ensembletrees,michigraber/scikit-learn,Jimmy-Morzaria/scikit-learn,huobaowangxi/scikit-learn,q1ang/scikit-learn,murali-munna/scikit-learn,OshynSong/scikit-learn,IndraVikas/scikit-learn,bikong2/scikit-learn,ankurankan/scikit-learn,JsNoNo/scikit-learn,MartinDelzant/scikit-learn,kylerbrown/scikit-learn,466152112/scikit-learn,IshankGulati/scikit-learn,nmayorov/scikit-learn,UNR-AERIAL/scikit-learn,mfjb/scikit-learn,rexshihaoren/scikit-learn,espg/scikit-learn,adamgreenhall/scikit-learn,joernhees/scikit-learn,carrillo/scikit-learn,abimannans/scikit-learn,xavierwu/scikit-learn,davidgbe/scikit-learn,bhargav/scikit-learn,ashhher3/scikit-learn,ominux/scikit-learn,mattilyra/scikit-learn,cainiaocome/scikit-learn,plissonf/scikit-learn,depet/scikit-learn,hdmetor/scikit-learn,hdmetor/scikit-learn,Nyker510/scikit-learn,pompiduskus/scikit-learn,eickenberg/scikit-learn,xuewei4d/scikit-learn,hrjn/scikit-learn,ningchi/scikit-learn,ishanic/scikit-learn,tmhm/scikit-learn,walterreade/scikit-learn,waterponey/scikit-learn,murali-munna/scikit-learn,vivekmishra1991/scikit-learn,JeanKossaifi/scikit-learn,luo66/scikit-learn,ycaihua/scikit-learn,nomadcube/scikit-learn,mwv/scikit-learn,xzh86/scikit-learn,ogrisel/scikit-learn,petosegan/scikit-learn,mblondel/scikit-learn,xiaoxiamii/scikit-learn,kaichogami/scikit-learn,NelisVerhoef/scikit-learn,hugobowne/scikit-learn,wlamond/scikit-learn,dsquareindia/scikit-learn,rahul-c1/scikit-learn,ZENGXH/scikit-learn,mblondel/scikit-learn,YinongLong/scikit-learn,lucidfrontier45/scikit-learn,huzq/scikit-learn,DonBeo/scikit-learn,hitszxp/scikit-learn,hlin117/scikit-learn,jzt5132/scikit-learn,bhargav/scikit-learn,jzt5132/scikit-learn,khkaminska/scikit-learn,tmhm/scikit-learn,equialgo/scikit-learn,kevin-intel/scikit-learn,dsquareindia/scikit-learn,nhejazi/scikit-learn,pratapvardhan/scikit-learn,kagayakidan/scikit-learn,kashif/scikit-learn,beepee14/scikit-learn,yunfeilu/scikit-learn,vortex-ape/scikit-learn,Vimos/scikit-learn,aflaxman/scikit-learn,schets/scikit-learn,dsullivan7/scikit-learn,btabibian/scikit-learn,LiaoPan/scikit-learn,deepesch/scikit-learn,clemkoa/scikit-learn,rohanp/scikit-learn,pv/scikit-learn,billy-inn/scikit-learn,466152112/scikit-learn,equialgo/scikit-learn,kagayakidan/scikit-learn,abimannans/scikit-learn,hdmetor/scikit-learn,siutanwong/scikit-learn,rahul-c1/scikit-learn,huobaowangxi/scikit-learn,B3AU/waveTree,murali-munna/scikit-learn,vibhorag/scikit-learn,dhruv13J/scikit-learn,UNR-AERIAL/scikit-learn,pnedunuri/scikit-learn,kmike/scikit-learn,fbagirov/scikit-learn,manashmndl/scikit-learn,betatim/scikit-learn,fabioticconi/scikit-learn,mojoboss/scikit-learn,shangwuhencc/scikit-learn,khkaminska/scikit-learn,yask123/scikit-learn,rohanp/scikit-learn,vinayak-mehta/scikit-learn,aminert/scikit-learn,djgagne/scikit-learn,heli522/scikit-learn,hlin117/scikit-learn,f3r/scikit-learn,cwu2011/scikit-learn,mjudsp/Tsallis,lenovor/scikit-learn,cybernet14/scikit-learn,ChanChiChoi/scikit-learn,thilbern/scikit-learn,themrmax/scikit-learn,elkingtonmcb/scikit-learn,fabianp/scikit-learn,IndraVikas/scikit-learn,mojoboss/scikit-learn,mlyundin/scikit-learn,nhejazi/scikit-learn,xzh86/scikit-learn,anntzer/scikit-learn,mhue/scikit-learn,lucidfrontier45/scikit-learn,ZenDevelopmentSystems/scikit-learn,zorroblue/scikit-learn,Titan-C/scikit-learn,justincassidy/scikit-learn,shikhardb/scikit-learn,fabioticconi/scikit-learn,smartscheduling/scikit-learn-categorical-tree,carrillo/scikit-learn,cwu2011/scikit-learn,AnasGhrab/scikit-learn,IssamLaradji/scikit-learn,loli/semisupervisedforests,hsiaoyi0504/scikit-learn,vinayak-mehta/scikit-learn,bthirion/scikit-learn,AIML/scikit-learn,murali-munna/scikit-learn,cdegroc/scikit-learn,mhdella/scikit-learn,liangz0707/scikit-learn,terkkila/scikit-learn,Garrett-R/scikit-learn,bigdataelephants/scikit-learn,ndingwall/scikit-learn,mattilyra/scikit-learn,arahuja/scikit-learn,mehdidc/scikit-learn,madjelan/scikit-learn,loli/semisupervisedforests,samzhang111/scikit-learn,mfjb/scikit-learn,rvraghav93/scikit-learn,arabenjamin/scikit-learn,frank-tancf/scikit-learn,jmschrei/scikit-learn,rsivapr/scikit-learn,samuel1208/scikit-learn,henrykironde/scikit-learn,zuku1985/scikit-learn,jakobworldpeace/scikit-learn,jblackburne/scikit-learn,rvraghav93/scikit-learn,maheshakya/scikit-learn,ankurankan/scikit-learn,fengzhyuan/scikit-learn,AlexandreAbraham/scikit-learn,herilalaina/scikit-learn,hainm/scikit-learn,aabadie/scikit-learn,icdishb/scikit-learn,theoryno3/scikit-learn,jpautom/scikit-learn,andrewnc/scikit-learn,cainiaocome/scikit-learn,fabianp/scikit-learn,xubenben/scikit-learn,kylerbrown/scikit-learn,chrsrds/scikit-learn,treycausey/scikit-learn,bigdataelephants/scikit-learn,djgagne/scikit-learn,nesterione/scikit-learn,untom/scikit-learn,elkingtonmcb/scikit-learn,shahankhatch/scikit-learn,russel1237/scikit-learn,AlexandreAbraham/scikit-learn,zaxtax/scikit-learn,mjudsp/Tsallis,anurag313/scikit-learn,walterreade/scikit-learn,pythonvietnam/scikit-learn,lbishal/scikit-learn,Sentient07/scikit-learn,jjx02230808/project0223,RomainBrault/scikit-learn,tdhopper/scikit-learn,ldirer/scikit-learn,BiaDarkia/scikit-learn,Achuth17/scikit-learn,jakirkham/scikit-learn,Sentient07/scikit-learn,untom/scikit-learn,bnaul/scikit-learn,glemaitre/scikit-learn,victorbergelin/scikit-learn,massmutual/scikit-learn,shikhardb/scikit-learn,B3AU/waveTree,nrhine1/scikit-learn,fyffyt/scikit-learn,fredhusser/scikit-learn,NunoEdgarGub1/scikit-learn,TomDLT/scikit-learn,maheshakya/scikit-learn,AlexRobson/scikit-learn,shahankhatch/scikit-learn,Nyker510/scikit-learn,ngoix/OCRF,PatrickChrist/scikit-learn,pypot/scikit-learn,kmike/scikit-learn,scikit-learn/scikit-learn,MartinDelzant/scikit-learn,UNR-AERIAL/scikit-learn,nmayorov/scikit-learn,dsullivan7/scikit-learn,ahoyosid/scikit-learn,trungnt13/scikit-learn,anurag313/scikit-learn,arjoly/scikit-learn,rishikksh20/scikit-learn,lbishal/scikit-learn,Adai0808/scikit-learn,larsmans/scikit-learn,q1ang/scikit-learn,MartinSavc/scikit-learn,ltiao/scikit-learn,0asa/scikit-learn,zorojean/scikit-learn,robin-lai/scikit-learn,alexeyum/scikit-learn,IshankGulati/scikit-learn,scikit-learn/scikit-learn,akionakamura/scikit-learn,pkruskal/scikit-learn,dsullivan7/scikit-learn,clemkoa/scikit-learn,simon-pepin/scikit-learn,evgchz/scikit-learn,Aasmi/scikit-learn,robin-lai/scikit-learn,clemkoa/scikit-learn,LohithBlaze/scikit-learn,kevin-intel/scikit-learn,chrsrds/scikit-learn,xzh86/scikit-learn,kjung/scikit-learn,wazeerzulfikar/scikit-learn,macks22/scikit-learn,cwu2011/scikit-learn,pnedunuri/scikit-learn,dhruv13J/scikit-learn,dingocuster/scikit-learn,h2educ/scikit-learn,MohammedWasim/scikit-learn,ogrisel/scikit-learn,etkirsch/scikit-learn,kashif/scikit-learn,Nyker510/scikit-learn,Barmaley-exe/scikit-learn,ClimbsRocks/scikit-learn,harshaneelhg/scikit-learn,eg-zhang/scikit-learn,zorojean/scikit-learn,dhruv13J/scikit-learn,anirudhjayaraman/scikit-learn,herilalaina/scikit-learn,pkruskal/scikit-learn,Obus/scikit-learn,lin-credible/scikit-learn,ycaihua/scikit-learn,potash/scikit-learn,robbymeals/scikit-learn,vshtanko/scikit-learn,AnasGhrab/scikit-learn,olologin/scikit-learn,LohithBlaze/scikit-learn,terkkila/scikit-learn,kagayakidan/scikit-learn,maheshakya/scikit-learn,themrmax/scikit-learn,shangwuhencc/scikit-learn,elkingtonmcb/scikit-learn,jorik041/scikit-learn,marcocaccin/scikit-learn,idlead/scikit-learn,justincassidy/scikit-learn,pnedunuri/scikit-learn,ashhher3/scikit-learn,Titan-C/scikit-learn,lbishal/scikit-learn,liberatorqjw/scikit-learn,rrohan/scikit-learn,ChanderG/scikit-learn,ilyes14/scikit-learn,poryfly/scikit-learn,fbagirov/scikit-learn,raghavrv/scikit-learn,Djabbz/scikit-learn,nelson-liu/scikit-learn,zuku1985/scikit-learn,jereze/scikit-learn,ChanderG/scikit-learn,jayflo/scikit-learn,mjgrav2001/scikit-learn,AIML/scikit-learn,yunfeilu/scikit-learn,Djabbz/scikit-learn,robin-lai/scikit-learn,smartscheduling/scikit-learn-categorical-tree,raghavrv/scikit-learn,florian-f/sklearn,CVML/scikit-learn,mattilyra/scikit-learn,liyu1990/sklearn,massmutual/scikit-learn,anntzer/scikit-learn,zhenv5/scikit-learn,chrisburr/scikit-learn,procoder317/scikit-learn,mayblue9/scikit-learn,CforED/Machine-Learning,tosolveit/scikit-learn,fredhusser/scikit-learn,heli522/scikit-learn,q1ang/scikit-learn,khkaminska/scikit-learn,AIML/scikit-learn,h2educ/scikit-learn,TomDLT/scikit-learn,DSLituiev/scikit-learn,jmschrei/scikit-learn,untom/scikit-learn,mxjl620/scikit-learn,pianomania/scikit-learn,yyjiang/scikit-learn,yyjiang/scikit-learn,mwv/scikit-learn,mattilyra/scikit-learn,theoryno3/scikit-learn,nomadcube/scikit-learn,vermouthmjl/scikit-learn,Obus/scikit-learn,JosmanPS/scikit-learn,DonBeo/scikit-learn,f3r/scikit-learn,toastedcornflakes/scikit-learn,Obus/scikit-learn,nvoron23/scikit-learn,vshtanko/scikit-learn,florian-f/sklearn,beepee14/scikit-learn,fabioticconi/scikit-learn,joshloyal/scikit-learn,wlamond/scikit-learn,wanggang3333/scikit-learn,sarahgrogan/scikit-learn,alexsavio/scikit-learn,loli/sklearn-ensembletrees,PatrickOReilly/scikit-learn,aabadie/scikit-learn,qifeigit/scikit-learn,xuewei4d/scikit-learn,eickenberg/scikit-learn,zorojean/scikit-learn,jaidevd/scikit-learn,Adai0808/scikit-learn,CforED/Machine-Learning,manashmndl/scikit-learn,schets/scikit-learn,tomlof/scikit-learn,poryfly/scikit-learn,scikit-learn/scikit-learn,kmike/scikit-learn,LohithBlaze/scikit-learn,ephes/scikit-learn,herilalaina/scikit-learn,rohanp/scikit-learn,wlamond/scikit-learn,tomlof/scikit-learn,ClimbsRocks/scikit-learn,ngoix/OCRF,mhue/scikit-learn,ltiao/scikit-learn,appapantula/scikit-learn,krez13/scikit-learn,AnasGhrab/scikit-learn,PrashntS/scikit-learn,eickenberg/scikit-learn,vortex-ape/scikit-learn,macks22/scikit-learn,r-mart/scikit-learn,hsiaoyi0504/scikit-learn,wanggang3333/scikit-learn,dsullivan7/scikit-learn,Akshay0724/scikit-learn,huzq/scikit-learn,xyguo/scikit-learn,poryfly/scikit-learn,Akshay0724/scikit-learn,shusenl/scikit-learn,altairpearl/scikit-learn,AlexRobson/scikit-learn,sanketloke/scikit-learn,ivannz/scikit-learn,jjx02230808/project0223,xavierwu/scikit-learn,alexeyum/scikit-learn,0x0all/scikit-learn,mfjb/scikit-learn,lin-credible/scikit-learn,ahoyosid/scikit-learn,lbishal/scikit-learn,ZENGXH/scikit-learn,tosolveit/scikit-learn,hitszxp/scikit-learn,fzalkow/scikit-learn,jblackburne/scikit-learn,treycausey/scikit-learn,justincassidy/scikit-learn,rvraghav93/scikit-learn,adamgreenhall/scikit-learn,quheng/scikit-learn,xwolf12/scikit-learn,jmschrei/scikit-learn,ngoix/OCRF,PatrickOReilly/scikit-learn,carrillo/scikit-learn,mhdella/scikit-learn,glennq/scikit-learn,mblondel/scikit-learn,cybernet14/scikit-learn,NunoEdgarGub1/scikit-learn,billy-inn/scikit-learn,belltailjp/scikit-learn,krez13/scikit-learn,jlegendary/scikit-learn,jakirkham/scikit-learn,abhishekgahlot/scikit-learn,liyu1990/sklearn,andaag/scikit-learn,cainiaocome/scikit-learn,tomlof/scikit-learn,jorge2703/scikit-learn,rsivapr/scikit-learn,jlegendary/scikit-learn,f3r/scikit-learn,ivannz/scikit-learn,vybstat/scikit-learn,tdhopper/scikit-learn,OshynSong/scikit-learn,Lawrence-Liu/scikit-learn,aewhatley/scikit-learn,zorroblue/scikit-learn,aabadie/scikit-learn,ngoix/OCRF,florian-f/sklearn,sinhrks/scikit-learn,rexshihaoren/scikit-learn,shyamalschandra/scikit-learn,JsNoNo/scikit-learn,jmetzen/scikit-learn,trungnt13/scikit-learn,HolgerPeters/scikit-learn,kmike/scikit-learn,luo66/scikit-learn,nvoron23/scikit-learn,yanlend/scikit-learn,olologin/scikit-learn,Obus/scikit-learn,xavierwu/scikit-learn,jereze/scikit-learn,jseabold/scikit-learn,nmayorov/scikit-learn,dingocuster/scikit-learn,LiaoPan/scikit-learn,hsuantien/scikit-learn,loli/sklearn-ensembletrees,manhhomienbienthuy/scikit-learn,mwv/scikit-learn,mrshu/scikit-learn,saiwing-yeung/scikit-learn,shenzebang/scikit-learn,chrsrds/scikit-learn,voxlol/scikit-learn,belltailjp/scikit-learn,pkruskal/scikit-learn,sanketloke/scikit-learn,Achuth17/scikit-learn,rahul-c1/scikit-learn,michigraber/scikit-learn,massmutual/scikit-learn,0asa/scikit-learn,jpautom/scikit-learn,sarahgrogan/scikit-learn,depet/scikit-learn,ishanic/scikit-learn,nikitasingh981/scikit-learn,mhue/scikit-learn,walterreade/scikit-learn,costypetrisor/scikit-learn,jmschrei/scikit-learn,rajat1994/scikit-learn,glennq/scikit-learn,joshloyal/scikit-learn,MartinDelzant/scikit-learn,samzhang111/scikit-learn,arjoly/scikit-learn,samuel1208/scikit-learn,xuewei4d/scikit-learn,phdowling/scikit-learn,ky822/scikit-learn,xyguo/scikit-learn,terkkila/scikit-learn,Jimmy-Morzaria/scikit-learn,zihua/scikit-learn,RachitKansal/scikit-learn,abhishekgahlot/scikit-learn,untom/scikit-learn,carrillo/scikit-learn,liberatorqjw/scikit-learn,giorgiop/scikit-learn,ogrisel/scikit-learn,siutanwong/scikit-learn,plissonf/scikit-learn,tmhm/scikit-learn,PrashntS/scikit-learn,NunoEdgarGub1/scikit-learn,shenzebang/scikit-learn,Titan-C/scikit-learn,shenzebang/scikit-learn,beepee14/scikit-learn,r-mart/scikit-learn,billy-inn/scikit-learn,ycaihua/scikit-learn,kashif/scikit-learn,manashmndl/scikit-learn,spallavolu/scikit-learn,alvarofierroclavero/scikit-learn,Barmaley-exe/scikit-learn,procoder317/scikit-learn,meduz/scikit-learn,kevin-intel/scikit-learn,jseabold/scikit-learn,AlexRobson/scikit-learn,jjx02230808/project0223,depet/scikit-learn,fengzhyuan/scikit-learn,ChanChiChoi/scikit-learn,nhejazi/scikit-learn,RayMick/scikit-learn,meduz/scikit-learn,potash/scikit-learn,henridwyer/scikit-learn,shyamalschandra/scikit-learn,beepee14/scikit-learn,alexsavio/scikit-learn,dingocuster/scikit-learn,ndingwall/scikit-learn,RomainBrault/scikit-learn,lazywei/scikit-learn,plissonf/scikit-learn,Fireblend/scikit-learn,deepesch/scikit-learn,rahuldhote/scikit-learn,RayMick/scikit-learn,vigilv/scikit-learn,YinongLong/scikit-learn,ashhher3/scikit-learn,iismd17/scikit-learn,henridwyer/scikit-learn,equialgo/scikit-learn,jakobworldpeace/scikit-learn,rishikksh20/scikit-learn,ilyes14/scikit-learn,maheshakya/scikit-learn,larsmans/scikit-learn,jakobworldpeace/scikit-learn,Fireblend/scikit-learn,qifeigit/scikit-learn,lucidfrontier45/scikit-learn,hainm/scikit-learn,nesterione/scikit-learn,fredhusser/scikit-learn,moutai/scikit-learn,ycaihua/scikit-learn,trankmichael/scikit-learn,lazywei/scikit-learn,hugobowne/scikit-learn,fbagirov/scikit-learn,466152112/scikit-learn,vigilv/scikit-learn,aetilley/scikit-learn,hsiaoyi0504/scikit-learn,krez13/scikit-learn,roxyboy/scikit-learn,ZenDevelopmentSystems/scikit-learn,rahuldhote/scikit-learn,petosegan/scikit-learn,bthirion/scikit-learn,rrohan/scikit-learn,IndraVikas/scikit-learn,anirudhjayaraman/scikit-learn,IssamLaradji/scikit-learn,altairpearl/scikit-learn,mattgiguere/scikit-learn,giorgiop/scikit-learn,smartscheduling/scikit-learn-categorical-tree,mayblue9/scikit-learn,BiaDarkia/scikit-learn,davidgbe/scikit-learn,xiaoxiamii/scikit-learn,mayblue9/scikit-learn,mugizico/scikit-learn,IndraVikas/scikit-learn,bigdataelephants/scikit-learn,simon-pepin/scikit-learn,mlyundin/scikit-learn,Srisai85/scikit-learn,mjudsp/Tsallis,Myasuka/scikit-learn,jaidevd/scikit-learn,mhdella/scikit-learn,eg-zhang/scikit-learn,pypot/scikit-learn,ngoix/OCRF,Barmaley-exe/scikit-learn,AlexanderFabisch/scikit-learn,victorbergelin/scikit-learn,harshaneelhg/scikit-learn,mugizico/scikit-learn,Sentient07/scikit-learn,shikhardb/scikit-learn,liberatorqjw/scikit-learn,RomainBrault/scikit-learn,sonnyhu/scikit-learn,michigraber/scikit-learn,theoryno3/scikit-learn,dhruv13J/scikit-learn,lenovor/scikit-learn,pratapvardhan/scikit-learn,manhhomienbienthuy/scikit-learn,aewhatley/scikit-learn,mojoboss/scikit-learn,icdishb/scikit-learn,wlamond/scikit-learn,jlegendary/scikit-learn,andrewnc/scikit-learn,AIML/scikit-learn,MatthieuBizien/scikit-learn,lesteve/scikit-learn,rexshihaoren/scikit-learn,larsmans/scikit-learn,giorgiop/scikit-learn,ilo10/scikit-learn,PatrickChrist/scikit-learn,MartinDelzant/scikit-learn,hrjn/scikit-learn,xyguo/scikit-learn,jakirkham/scikit-learn,loli/semisupervisedforests,pianomania/scikit-learn,belltailjp/scikit-learn,yanlend/scikit-learn,thilbern/scikit-learn,henrykironde/scikit-learn,shangwuhencc/scikit-learn,siutanwong/scikit-learn,nikitasingh981/scikit-learn,betatim/scikit-learn,aminert/scikit-learn,espg/scikit-learn,devanshdalal/scikit-learn,Srisai85/scikit-learn,ZenDevelopmentSystems/scikit-learn,LohithBlaze/scikit-learn,treycausey/scikit-learn,liangz0707/scikit-learn,Adai0808/scikit-learn,Sentient07/scikit-learn,Myasuka/scikit-learn,3manuek/scikit-learn,glemaitre/scikit-learn,Barmaley-exe/scikit-learn,ltiao/scikit-learn,gclenaghan/scikit-learn,mehdidc/scikit-learn,cwu2011/scikit-learn,bthirion/scikit-learn,vinayak-mehta/scikit-learn,eg-zhang/scikit-learn,fengzhyuan/scikit-learn,NelisVerhoef/scikit-learn,ssaeger/scikit-learn,sarahgrogan/scikit-learn,abimannans/scikit-learn,quheng/scikit-learn,rajat1994/scikit-learn,stylianos-kampakis/scikit-learn,ngoix/OCRF,mayblue9/scikit-learn,andaag/scikit-learn,ephes/scikit-learn,jakirkham/scikit-learn,pv/scikit-learn,spallavolu/scikit-learn,ephes/scikit-learn,wzbozon/scikit-learn,larsmans/scikit-learn,nelson-liu/scikit-learn,JeanKossaifi/scikit-learn,Djabbz/scikit-learn,henridwyer/scikit-learn,xwolf12/scikit-learn,h2educ/scikit-learn,phdowling/scikit-learn,mjgrav2001/scikit-learn,RomainBrault/scikit-learn,mxjl620/scikit-learn,mhue/scikit-learn,mblondel/scikit-learn,zihua/scikit-learn,treycausey/scikit-learn,sgenoud/scikit-learn,pythonvietnam/scikit-learn,andrewnc/scikit-learn,shikhardb/scikit-learn,anntzer/scikit-learn,marcocaccin/scikit-learn,wzbozon/scikit-learn,mjgrav2001/scikit-learn,thilbern/scikit-learn,cainiaocome/scikit-learn,robbymeals/scikit-learn,iismd17/scikit-learn,ElDeveloper/scikit-learn,jkarnows/scikit-learn,alvarofierroclavero/scikit-learn,ChanChiChoi/scikit-learn,poryfly/scikit-learn,vermouthmjl/scikit-learn,jkarnows/scikit-learn,nomadcube/scikit-learn,jayflo/scikit-learn,hainm/scikit-learn,ChanChiChoi/scikit-learn,heli522/scikit-learn,mrshu/scikit-learn,quheng/scikit-learn,xubenben/scikit-learn,simon-pepin/scikit-learn,ankurankan/scikit-learn,arabenjamin/scikit-learn,kjung/scikit-learn,davidgbe/scikit-learn,icdishb/scikit-learn,MatthieuBizien/scikit-learn,hrjn/scikit-learn,anirudhjayaraman/scikit-learn,lenovor/scikit-learn,Windy-Ground/scikit-learn,arahuja/scikit-learn,sumspr/scikit-learn,thilbern/scikit-learn,hsuantien/scikit-learn,zhenv5/scikit-learn,ahoyosid/scikit-learn,thientu/scikit-learn,Vimos/scikit-learn,mlyundin/scikit-learn,alvarofierroclavero/scikit-learn,jkarnows/scikit-learn,dsquareindia/scikit-learn,Myasuka/scikit-learn,vinayak-mehta/scikit-learn,trankmichael/scikit-learn,liyu1990/sklearn,smartscheduling/scikit-learn-categorical-tree,justincassidy/scikit-learn,466152112/scikit-learn,meduz/scikit-learn,alexsavio/scikit-learn,voxlol/scikit-learn,ilo10/scikit-learn,ndingwall/scikit-learn,jm-begon/scikit-learn,appapantula/scikit-learn,nesterione/scikit-learn,B3AU/waveTree,MohammedWasim/scikit-learn,roxyboy/scikit-learn,robin-lai/scikit-learn,zorojean/scikit-learn,thientu/scikit-learn,vshtanko/scikit-learn,jakobworldpeace/scikit-learn,tmhm/scikit-learn,imaculate/scikit-learn,Akshay0724/scikit-learn,MechCoder/scikit-learn,AlexRobson/scikit-learn,B3AU/waveTree,3manuek/scikit-learn,JsNoNo/scikit-learn,jblackburne/scikit-learn,wazeerzulfikar/scikit-learn,costypetrisor/scikit-learn,DonBeo/scikit-learn,Jimmy-Morzaria/scikit-learn,tawsifkhan/scikit-learn,aminert/scikit-learn,kylerbrown/scikit-learn,Achuth17/scikit-learn,ankurankan/scikit-learn,LiaoPan/scikit-learn,kmike/scikit-learn,btabibian/scikit-learn,vibhorag/scikit-learn,schets/scikit-learn,kaichogami/scikit-learn,pianomania/scikit-learn,nesterione/scikit-learn,YinongLong/scikit-learn,Vimos/scikit-learn,h2educ/scikit-learn,nelson-liu/scikit-learn,Fireblend/scikit-learn,tawsifkhan/scikit-learn,giorgiop/scikit-learn,mhdella/scikit-learn,florian-f/sklearn,glouppe/scikit-learn,ilyes14/scikit-learn,samuel1208/scikit-learn,kagayakidan/scikit-learn,abhishekkrthakur/scikit-learn,loli/sklearn-ensembletrees,tdhopper/scikit-learn,vigilv/scikit-learn,mjudsp/Tsallis,samuel1208/scikit-learn,glemaitre/scikit-learn,vigilv/scikit-learn,shahankhatch/scikit-learn,sumspr/scikit-learn,vermouthmjl/scikit-learn,ominux/scikit-learn,gclenaghan/scikit-learn,wanggang3333/scikit-learn,vybstat/scikit-learn,yonglehou/scikit-learn,themrmax/scikit-learn,roxyboy/scikit-learn,gotomypc/scikit-learn,Garrett-R/scikit-learn,xuewei4d/scikit-learn,harshaneelhg/scikit-learn,iismd17/scikit-learn,liangz0707/scikit-learn,Clyde-fare/scikit-learn,arahuja/scikit-learn,joshloyal/scikit-learn,pypot/scikit-learn,arahuja/scikit-learn,mattgiguere/scikit-learn,chrisburr/scikit-learn,hlin117/scikit-learn,PatrickChrist/scikit-learn,MechCoder/scikit-learn,yunfeilu/scikit-learn,ChanderG/scikit-learn,bigdataelephants/scikit-learn,waterponey/scikit-learn,JPFrancoia/scikit-learn,amueller/scikit-learn,jereze/scikit-learn,0x0all/scikit-learn,mikebenfield/scikit-learn,Achuth17/scikit-learn,zuku1985/scikit-learn,RayMick/scikit-learn,lazywei/scikit-learn,sanketloke/scikit-learn,3manuek/scikit-learn,sinhrks/scikit-learn,sergeyf/scikit-learn,plissonf/scikit-learn,YinongLong/scikit-learn,sonnyhu/scikit-learn,chrisburr/scikit-learn,larsmans/scikit-learn,3manuek/scikit-learn,pv/scikit-learn,yyjiang/scikit-learn,NelisVerhoef/scikit-learn,russel1237/scikit-learn,nrhine1/scikit-learn,fyffyt/scikit-learn,rexshihaoren/scikit-learn,moutai/scikit-learn,evgchz/scikit-learn,manhhomienbienthuy/scikit-learn,Clyde-fare/scikit-learn,lucidfrontier45/scikit-learn,shusenl/scikit-learn,andaag/scikit-learn,manhhomienbienthuy/scikit-learn,arabenjamin/scikit-learn,cauchycui/scikit-learn,CVML/scikit-learn,espg/scikit-learn,eickenberg/scikit-learn,zaxtax/scikit-learn,scikit-learn/scikit-learn,ningchi/scikit-learn,xiaoxiamii/scikit-learn,ominux/scikit-learn,bikong2/scikit-learn,lenovor/scikit-learn,NunoEdgarGub1/scikit-learn,shusenl/scikit-learn,ankurankan/scikit-learn,yyjiang/scikit-learn,jorge2703/scikit-learn,jmetzen/scikit-learn,mattilyra/scikit-learn,MartinSavc/scikit-learn,lesteve/scikit-learn,kaichogami/scikit-learn,cl4rke/scikit-learn,sarahgrogan/scikit-learn,vibhorag/scikit-learn,kevin-intel/scikit-learn,btabibian/scikit-learn,Titan-C/scikit-learn,ElDeveloper/scikit-learn,hsuantien/scikit-learn,arjoly/scikit-learn,bikong2/scikit-learn,RPGOne/scikit-learn,wzbozon/scikit-learn,hsuantien/scikit-learn,NelisVerhoef/scikit-learn,yonglehou/scikit-learn,UNR-AERIAL/scikit-learn,devanshdalal/scikit-learn,Windy-Ground/scikit-learn,hugobowne/scikit-learn,jzt5132/scikit-learn,aetilley/scikit-learn,stylianos-kampakis/scikit-learn,mrshu/scikit-learn,sergeyf/scikit-learn,fabianp/scikit-learn,AlexandreAbraham/scikit-learn,hitszxp/scikit-learn,HolgerPeters/scikit-learn,Aasmi/scikit-learn,CforED/Machine-Learning,etkirsch/scikit-learn,aetilley/scikit-learn,hitszxp/scikit-learn,RPGOne/scikit-learn,adamgreenhall/scikit-learn,spallavolu/scikit-learn,theoryno3/scikit-learn,chrsrds/scikit-learn,ZENGXH/scikit-learn,vivekmishra1991/scikit-learn,amueller/scikit-learn,fabioticconi/scikit-learn,AlexandreAbraham/scikit-learn,idlead/scikit-learn,Jimmy-Morzaria/scikit-learn,altairpearl/scikit-learn,robbymeals/scikit-learn,spallavolu/scikit-learn,sinhrks/scikit-learn,ivannz/scikit-learn,nikitasingh981/scikit-learn,xwolf12/scikit-learn,fredhusser/scikit-learn,mikebenfield/scikit-learn,alexsavio/scikit-learn,shyamalschandra/scikit-learn,ssaeger/scikit-learn,r-mart/scikit-learn,shyamalschandra/scikit-learn,khkaminska/scikit-learn,jorik041/scikit-learn,loli/sklearn-ensembletrees,trankmichael/scikit-learn,kashif/scikit-learn,deepesch/scikit-learn,huzq/scikit-learn,mattgiguere/scikit-learn,toastedcornflakes/scikit-learn,bhargav/scikit-learn,MohammedWasim/scikit-learn,jseabold/scikit-learn,fabianp/scikit-learn,xzh86/scikit-learn,cybernet14/scikit-learn,costypetrisor/scikit-learn,mikebenfield/scikit-learn,macks22/scikit-learn,loli/semisupervisedforests,phdowling/scikit-learn,anntzer/scikit-learn,maheshakya/scikit-learn,saiwing-yeung/scikit-learn,ldirer/scikit-learn,lesteve/scikit-learn,ElDeveloper/scikit-learn,deepesch/scikit-learn,RPGOne/scikit-learn,jzt5132/scikit-learn,ky822/scikit-learn,xyguo/scikit-learn,vibhorag/scikit-learn,gclenaghan/scikit-learn,nmayorov/scikit-learn,Windy-Ground/scikit-learn,treycausey/scikit-learn,herilalaina/scikit-learn,manashmndl/scikit-learn,Adai0808/scikit-learn,heli522/scikit-learn,zihua/scikit-learn,jmetzen/scikit-learn,shangwuhencc/scikit-learn,glouppe/scikit-learn,mwv/scikit-learn,xavierwu/scikit-learn,rishikksh20/scikit-learn,IssamLaradji/scikit-learn,jm-begon/scikit-learn,DSLituiev/scikit-learn,victorbergelin/scikit-learn,sonnyhu/scikit-learn,pypot/scikit-learn,akionakamura/scikit-learn,ashhher3/scikit-learn,bnaul/scikit-learn,PatrickChrist/scikit-learn,MechCoder/scikit-learn,lazywei/scikit-learn,robbymeals/scikit-learn,hrjn/scikit-learn,ningchi/scikit-learn,RayMick/scikit-learn,zorroblue/scikit-learn,dsquareindia/scikit-learn,tdhopper/scikit-learn,Srisai85/scikit-learn,madjelan/scikit-learn,JPFrancoia/scikit-learn,glennq/scikit-learn,raghavrv/scikit-learn,alexeyum/scikit-learn,sgenoud/scikit-learn,evgchz/scikit-learn,pkruskal/scikit-learn,CVML/scikit-learn,joernhees/scikit-learn,toastedcornflakes/scikit-learn,vortex-ape/scikit-learn,henrykironde/scikit-learn,mehdidc/scikit-learn,chrisburr/scikit-learn,etkirsch/scikit-learn,ClimbsRocks/scikit-learn,wanggang3333/scikit-learn,idlead/scikit-learn,yanlend/scikit-learn,xiaoxiamii/scikit-learn,waterponey/scikit-learn,aflaxman/scikit-learn,tosolveit/scikit-learn,kjung/scikit-learn,ishanic/scikit-learn,appapantula/scikit-learn,sumspr/scikit-learn,Lawrence-Liu/scikit-learn,olologin/scikit-learn,liyu1990/sklearn,aminert/scikit-learn,madjelan/scikit-learn,akionakamura/scikit-learn,jm-begon/scikit-learn,JeanKossaifi/scikit-learn,Myasuka/scikit-learn,nhejazi/scikit-learn,rsivapr/scikit-learn,anurag313/scikit-learn,djgagne/scikit-learn,wazeerzulfikar/scikit-learn,kaichogami/scikit-learn,tomlof/scikit-learn,evgchz/scikit-learn,joernhees/scikit-learn,petosegan/scikit-learn,mugizico/scikit-learn,lesteve/scikit-learn,fengzhyuan/scikit-learn,quheng/scikit-learn,tawsifkhan/scikit-learn,MartinSavc/scikit-learn,stylianos-kampakis/scikit-learn,glennq/scikit-learn,mugizico/scikit-learn,glemaitre/scikit-learn,florian-f/sklearn,cl4rke/scikit-learn,evgchz/scikit-learn,cl4rke/scikit-learn,davidgbe/scikit-learn,frank-tancf/scikit-learn,mxjl620/scikit-learn,PatrickOReilly/scikit-learn,anirudhjayaraman/scikit-learn,Garrett-R/scikit-learn,jaidevd/scikit-learn,mjgrav2001/scikit-learn,DSLituiev/scikit-learn,victorbergelin/scikit-learn,trungnt13/scikit-learn,0asa/scikit-learn,vybstat/scikit-learn,rajat1994/scikit-learn,rahuldhote/scikit-learn,pratapvardhan/scikit-learn,henridwyer/scikit-learn,appapantula/scikit-learn,gotomypc/scikit-learn,meduz/scikit-learn,mehdidc/scikit-learn,jlegendary/scikit-learn,moutai/scikit-learn,lin-credible/scikit-learn,ycaihua/scikit-learn,equialgo/scikit-learn,Garrett-R/scikit-learn,pratapvardhan/scikit-learn,HolgerPeters/scikit-learn,jereze/scikit-learn,siutanwong/scikit-learn,alexeyum/scikit-learn,roxyboy/scikit-learn,Srisai85/scikit-learn,ilo10/scikit-learn,thientu/scikit-learn,djgagne/scikit-learn,billy-inn/scikit-learn,zihua/scikit-learn,clemkoa/scikit-learn,0asa/scikit-learn,JosmanPS/scikit-learn,xwolf12/scikit-learn,rsivapr/scikit-learn,liangz0707/scikit-learn,nrhine1/scikit-learn,MartinSavc/scikit-learn,joernhees/scikit-learn,jorik041/scikit-learn,hitszxp/scikit-learn,nomadcube/scikit-learn,jblackburne/scikit-learn,sgenoud/scikit-learn,sergeyf/scikit-learn,sumspr/scikit-learn,AnasGhrab/scikit-learn,Lawrence-Liu/scikit-learn,jm-begon/scikit-learn,RPGOne/scikit-learn,themrmax/scikit-learn,aflaxman/scikit-learn,depet/scikit-learn,pnedunuri/scikit-learn,eickenberg/scikit-learn,PatrickOReilly/scikit-learn,bnaul/scikit-learn,abhishekkrthakur/scikit-learn,RachitKansal/scikit-learn,cauchycui/scikit-learn,harshaneelhg/scikit-learn,aflaxman/scikit-learn,ZenDevelopmentSystems/scikit-learn,Vimos/scikit-learn,JPFrancoia/scikit-learn,ishanic/scikit-learn,samzhang111/scikit-learn,zaxtax/scikit-learn,abhishekkrthakur/scikit-learn,mrshu/scikit-learn,imaculate/scikit-learn,huobaowangxi/scikit-learn,potash/scikit-learn,CVML/scikit-learn,pythonvietnam/scikit-learn,Akshay0724/scikit-learn,abhishekgahlot/scikit-learn,ahoyosid/scikit-learn,pompiduskus/scikit-learn,r-mart/scikit-learn,shenzebang/scikit-learn,cybernet14/scikit-learn,frank-tancf/scikit-learn,gotomypc/scikit-learn,Clyde-fare/scikit-learn,kylerbrown/scikit-learn,rsivapr/scikit-learn,Clyde-fare/scikit-learn,mxjl620/scikit-learn,madjelan/scikit-learn,idlead/scikit-learn,moutai/scikit-learn
|
1ef1d7a973ce44943fc59315d1f962ed59f06e33
|
seacucumber/backend.py
|
seacucumber/backend.py
|
"""
This module contains the SESBackend class, which is what you'll want to set in
your settings.py::
EMAIL_BACKEND = 'seacucumber.backend.SESBackend'
"""
from django.core.mail.backends.base import BaseEmailBackend
from seacucumber.tasks import SendEmailTask
class SESBackend(BaseEmailBackend):
"""
A Django Email backend that uses Amazon's Simple Email Service.
"""
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of
email messages sent.
:param EmailMessage email_messages: A list of Django's EmailMessage
object instances.
:rtype: int
:returns: The number of EmailMessage objects that were successfully
queued up. Note that these are not in a state where we can
guarantee delivery just yet.
"""
num_sent = 0
for message in email_messages:
# Hand this off to a celery task.
SendEmailTask.delay(
message.from_email,
message.recipients(),
message.message().as_string(),
)
num_sent += 1
return num_sent
|
"""
This module contains the SESBackend class, which is what you'll want to set in
your settings.py::
EMAIL_BACKEND = 'seacucumber.backend.SESBackend'
"""
from django.core.mail.backends.base import BaseEmailBackend
from seacucumber.tasks import SendEmailTask
class SESBackend(BaseEmailBackend):
"""
A Django Email backend that uses Amazon's Simple Email Service.
"""
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of
email messages sent.
:param EmailMessage email_messages: A list of Django's EmailMessage
object instances.
:rtype: int
:returns: The number of EmailMessage objects that were successfully
queued up. Note that these are not in a state where we can
guarantee delivery just yet.
"""
num_sent = 0
for message in email_messages:
# Hand this off to a celery task.
SendEmailTask.delay(
message.from_email,
message.recipients(),
message.message().as_string().decode('utf8'),
)
num_sent += 1
return num_sent
|
Patch to send mails with UTF8 encoding
|
Patch to send mails with UTF8 encoding
Just a temp fix
|
Python
|
mit
|
makielab/sea-cucumber,duointeractive/sea-cucumber
|
a9b56fe98a0df71881c41a2524bdb5abc4b0de50
|
services/imu-logger.py
|
services/imu-logger.py
|
#!/usr/bin/env python3
from sense_hat import SenseHat
from pymongo import MongoClient
import time
DELAY = 1 # in seconds
sense = SenseHat()
client = MongoClient("mongodb://10.0.1.25:27017")
db = client.g2x
while True:
orientation = sense.get_orientation_degrees()
print(orientation)
acceleration = sense.get_accelerometer()
compass = sense.get_compass()
temperature_from_humidity = sense.get_temperature()
temperature_from_pressure = sense.get_temperature_from_pressure()
db.gyroscope.insert_one({
"pitch": orientation["pitch"],
"roll": orientation["roll"],
"yaw": orientation["yaw"]
})
db.accelerometer.insert_one({
"pitch": acceleration["pitch"],
"roll": acceleration["roll"],
"yaw": acceleration["yaw"]
})
db.compass.insert_one({"angle": compass})
db.temperature.insert_one({
"from_humidity": temperature_from_humidity,
"from_pressure": temperature_from_pressure
})
time.sleep(DELAY)
|
#!/usr/bin/env python3
from sense_hat import SenseHat
from pymongo import MongoClient
from datetime import datetime
sense = SenseHat()
client = MongoClient("mongodb://10.0.1.25:27017")
db = client.g2x
last_time = datetime.utcnow()
sample_count = 0
while True:
current_time = datetime.utcnow()
elapsed_time = current_time - last_time
orientation = sense.get_orientation()
gyroscope = sense.get_gyroscope()
acceleration = sense.get_accelerometer()
compass = sense.get_compass()
temperature_from_humidity = sense.get_temperature()
temperature_from_pressure = sense.get_temperature_from_pressure()
sample_count += 1
if elapsed_time.seconds >= 1:
last_time = current_time
print("sample per second =", sample_count)
print("orientation =", orientation)
print("gyroscope =", gyroscope)
print("acceleration =", acceleration)
print("compass =", compass)
print("temperature_from_humidity =", temperature_from_humidity)
print("temperature_from_pressure =", temperature_from_pressure)
sample_count = 0
db.orientation.insert_one({
"pitch": orientation["pitch"],
"roll": orientation["roll"],
"yaw": orientation["yaw"]
})
db.gyroscope.insert_one({
"pitch": gyroscope["pitch"],
"roll": gyroscope["roll"],
"yaw": gyroscope["yaw"]
})
db.accelerometer.insert_one({
"pitch": acceleration["pitch"],
"roll": acceleration["roll"],
"yaw": acceleration["yaw"]
})
db.compass.insert_one({"angle": compass})
db.temperature.insert_one({
"from_humidity": temperature_from_humidity,
"from_pressure": temperature_from_pressure
})
|
Read samples faster but log only once a second
|
Read samples faster but log only once a second
|
Python
|
bsd-3-clause
|
gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2
|
b7b1ae11378b37350a3fcd9d989be58f655ec986
|
calexicon/helpers.py
|
calexicon/helpers.py
|
from datetime import date as vanilla_date
def ordinal(n):
suffix = "th"
if n % 10 == 1:
suffix = "st"
if n % 10 == 2:
suffix = "nd"
if n % 10 == 3:
suffix = "rd"
if 10 < n % 100 < 20:
suffix = "th"
return "%d%s" % (n, suffix)
def month_string(n):
d = vanilla_date(1995, n, 1)
return d.strftime("%B")
|
from datetime import date as vanilla_date
def ordinal(n):
suffix = "th"
if n % 10 in [1, 2, 3]:
suffix = [None, 'st', 'nd', 'rd'][n % 10]
if 10 < n % 100 < 20:
suffix = "th"
return "%d%s" % (n, suffix)
def month_string(n):
d = vanilla_date(1995, n, 1)
return d.strftime("%B")
|
Make this part of the function simpler.
|
Make this part of the function simpler.
|
Python
|
apache-2.0
|
jwg4/qual,jwg4/calexicon
|
4c987cd45080cb6a1a449fa708a567c40ba8c94f
|
examples/pax_mininet_node.py
|
examples/pax_mininet_node.py
|
# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
|
# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise, even with the above iptables rules, the OS
# will still forward packets that have a different IP on the other interfaces, which
# is not the behaviour we want from an ideal node that only processes packets through Pax.
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
|
Add comment explaining why we disable ip_forward
|
Add comment explaining why we disable ip_forward
|
Python
|
apache-2.0
|
niksu/pax,TMVector/pax,niksu/pax,niksu/pax,TMVector/pax
|
3252a1e0f5b2991179d3fabe66f34a19f7cd85c9
|
src/DecodeTest.py
|
src/DecodeTest.py
|
import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
"""
"""
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
def test_decoder_decode_send(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost", "msg":"hello queue a"})
msg = "SEND\naccept-version:1.2\nhost:localhost\n\nhello queue a\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()
|
import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
"""
"""
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
def test_decoder_decode_connect_missing_req_header(self):
msg = "CONNECT\nhost:localhost\n\n\x00"
self.assertRaises(Exception, self.decoder.decode(msg))
def test_decoder_decode_send(self):
testFrame = Frames.SEND(**{"destination":"/queue/a", "msg":"hello queue a"})
msg = "SEND\ndestination:/queue/a\n\nhello queue a\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
def test_decoder_decode_send_missing_req_header(self):
msg = "SEND\n\nhello queue a\x00"
self.assertRaises(Exception, self.decoder.decode(msg))
if __name__ == '__main__':
unittest.main()
|
Send and Connect frame tests
|
Send and Connect frame tests
|
Python
|
mit
|
phan91/STOMP_agilis
|
fb0b129216bd98a90cdee623157df5c7e4a742fb
|
blinkenlights/blinkenlights.py
|
blinkenlights/blinkenlights.py
|
#!/usr/bin/python3
import asyncio, signal, os
from blink import blink
import ipc.coordinator
loop = asyncio.get_event_loop()
def my_interrupt_handler():
print('Stopping')
for task in asyncio.Task.all_tasks():
task.cancel()
loop.stop()
loop.add_signal_handler(signal.SIGINT, my_interrupt_handler)
blink.start()
ipc.coordinator.start(loop)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
except asyncio.CancelledError:
print('Tasks has been canceled')
finally:
ipc.coordinator.stop()
loop.close()
|
#!/usr/bin/python3
import asyncio, signal, os
from blink import blink
import ipc.coordinator
loop = asyncio.get_event_loop()
def my_interrupt_handler():
print('Stopping')
for task in asyncio.Task.all_tasks():
task.cancel()
loop.stop()
loop.add_signal_handler(signal.SIGINT, my_interrupt_handler)
blink.start()
ipc.coordinator.start(loop)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
except asyncio.CancelledError:
print('Tasks has been canceled')
finally:
ipc.coordinator.stop()
os.remove('/tmp/coord.socket')
loop.close()
|
Clean up socket file on exiting
|
Clean up socket file on exiting
Change-Id: I34391c64408b5a35386913bd7be01d81feed61b6
|
Python
|
mit
|
fayoh/KSP-Control
|
3ccaf18243232d756ed139d9f84a6b3903af15f7
|
exploratory_analysis/author_scan.py
|
exploratory_analysis/author_scan.py
|
import os
from utils import Reader
import code
import sys
author_dict = dict()
def extract_authors(tweets):
# code.interact(local=dict(globals(), **locals()))
for t in tweets:
if t.is_post():
actor = t.actor()
create_key(actor['id'])
increment_author(actor, t.is_post())
elif t.is_share():
original_tweet = t.data['object']
actor = original_tweet['actor']
create_key(actor['id'])
increment_author(actor, t.is_post())
else:
print 'Neither post nor share:', t.id()
def increment_author(actor, is_post):
dict_value = author_dict[actor['id']]
dict_value[0] = actor['link']
dict_value[1] = actor['preferredUsername']
dict_value[2] = actor['displayName']
if is_post:
dict_value[3] += 1
else:
dict_value[4] += 1
def create_key(actor_id):
if actor_id not in author_dict.keys():
# link, username, display_name, post, post that gotten shared
default_value = ['', '', '', 0, 0]
author_dict[actor_id] = default_value
def print_all():
for k in author_dict.keys():
value = author_dict[k]
print '"{}","{}","{}","{}",{},{}'.format(k, value[0], value[1], value[2], value[3], value[4])
if __name__ == '__main__':
# coding=utf-8
reload(sys)
sys.setdefaultencoding('utf-8')
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
for f in files:
extract_authors(Reader.read_file(f))
print_all()
# code.interact(local=dict(globals(), **locals()))
|
import os
from utils import Reader
import code
import sys
def extract_authors(tweets):
for t in tweets:
if t.is_post():
actor = t.actor()
print '"{}","{}","{}","{}",{},{}'.format(actor['id'],
actor['link'],
actor['preferredUsername'],
actor['displayName'], 1, 0)
elif t.is_share():
original_tweet = t.data['object']
actor = original_tweet['actor']
print '"{}","{}","{}","{}",{},{}'.format(actor['id'],
actor['link'],
actor['preferredUsername'],
actor['displayName'], 0, 1)
else:
print 'Neither post nor share:', t.id()
if __name__ == '__main__':
# coding=utf-8
reload(sys)
sys.setdefaultencoding('utf-8')
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
for f in files:
extract_authors(Reader.read_file(f))
# code.interact(local=dict(globals(), **locals()))
|
Print everything out in csv and use tableau to do calculation
|
Print everything out in csv and use tableau to do calculation
|
Python
|
apache-2.0
|
chuajiesheng/twitter-sentiment-analysis
|
9d651a1cdb92d7d8ba039fce97a11de085b54990
|
polymorphic/formsets/utils.py
|
polymorphic/formsets/utils.py
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
dest += media
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
Fix the add_media() hack for Django 2.0
|
Fix the add_media() hack for Django 2.0
|
Python
|
bsd-3-clause
|
chrisglass/django_polymorphic,chrisglass/django_polymorphic
|
94e3572a4049b0eb0ff0d762a3bce5248a5bd507
|
src/sas/sasgui/perspectives/file_converter/file_converter.py
|
src/sas/sasgui/perspectives/file_converter/file_converter.py
|
"""
File Converter Plugin
"""
import logging
from sas.sasgui.guiframe.plugin_base import PluginBase
from sas.sasgui.perspectives.file_converter.converter_panel import ConverterWindow
logger = logging.getLogger(__name__)
class Plugin(PluginBase):
"""
This class defines the interface for a Plugin class
for File Converter perspective
"""
def __init__(self):
PluginBase.__init__(self, name="File Converter")
logger.info("File Converter plug-in started")
self._sub_menu = "Tool"
self.converter_frame = None
def get_tools(self):
"""
Returns a set of menu entries
"""
help_txt = "Convert single column ASCII data to CanSAS format"
return [("File Converter", help_txt, self.on_file_converter)]
def on_file_converter(self, event):
if self.converter_frame is None:
frame = ConverterWindow(parent=self.parent, base=self.parent,
manager=self)
self.put_icon(frame)
self.converter_frame = frame
else:
self.converter_frame.Show(False)
self.converter_frame.Show(True)
def put_icon(self, frame):
"""
Put icon in the frame title bar
"""
if hasattr(frame, "IsIconized"):
if not frame.IsIconized():
try:
icon = self.parent.GetIcon()
frame.SetIcon(icon)
except:
pass
|
"""
File Converter Plugin
"""
import logging
from sas.sasgui.guiframe.plugin_base import PluginBase
from sas.sasgui.perspectives.file_converter.converter_panel import ConverterWindow
logger = logging.getLogger(__name__)
class Plugin(PluginBase):
"""
This class defines the interface for a Plugin class
for File Converter perspective
"""
def __init__(self):
PluginBase.__init__(self, name="File Converter")
logger.info("File Converter plug-in started")
self._sub_menu = "Tool"
self.converter_frame = None
def get_tools(self):
"""
Returns a set of menu entries
"""
help_txt = "Convert ASCII or BSL/OTOKO data to CanSAS or NXcanSAS formats"
return [("File Converter", help_txt, self.on_file_converter)]
def on_file_converter(self, event):
if self.converter_frame is None:
frame = ConverterWindow(parent=self.parent, base=self.parent,
manager=self)
self.put_icon(frame)
self.converter_frame = frame
else:
self.converter_frame.Show(False)
self.converter_frame.Show(True)
def put_icon(self, frame):
"""
Put icon in the frame title bar
"""
if hasattr(frame, "IsIconized"):
if not frame.IsIconized():
try:
icon = self.parent.GetIcon()
frame.SetIcon(icon)
except:
pass
|
Update file converter tooltip in tools menu
|
Update file converter tooltip in tools menu
|
Python
|
bsd-3-clause
|
SasView/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview,SasView/sasview,lewisodriscoll/sasview,lewisodriscoll/sasview,SasView/sasview,lewisodriscoll/sasview,lewisodriscoll/sasview
|
4712e870bec7c678f88af3d7b54fcf7c8b040795
|
salt/modules/http.py
|
salt/modules/http.py
|
# -*- coding: utf-8 -*-
'''
Module for making various web calls. Primarily designed for webhooks and the
like, but also useful for basic http testing.
'''
from __future__ import absolute_import
# Import salt libs
import salt.utils.http
def query(url, **kwargs):
'''
Query a resource, and decode the return data
CLI Example:
.. code-block:: bash
salt '*' http.query http://somelink.com/
salt '*' http.query http://somelink.com/ method=POST \
params='key1=val1&key2=val2'
salt '*' http.query http://somelink.com/ method=POST \
data='<xml>somecontent</xml>'
'''
return salt.utils.http.query(url=url, opts=__opts__, **kwargs)
|
# -*- coding: utf-8 -*-
'''
Module for making various web calls. Primarily designed for webhooks and the
like, but also useful for basic http testing.
'''
from __future__ import absolute_import
# Import salt libs
import salt.utils.http
def query(url, **kwargs):
'''
Query a resource, and decode the return data
CLI Example:
.. code-block:: bash
salt '*' http.query http://somelink.com/
salt '*' http.query http://somelink.com/ method=POST \
params='key1=val1&key2=val2'
salt '*' http.query http://somelink.com/ method=POST \
data='<xml>somecontent</xml>'
'''
return salt.utils.http.query(url=url, opts=__opts__, **kwargs)
def update_ca_bundle(target=None, source=None, merge_files=None):
'''
Update the local CA bundle file from a URL
CLI Example:
.. code-block:: bash
salt '*' http.update_ca_bundle
salt '*' http.update_ca_bundle target=/path/to/cacerts.pem
salt '*' http.update_ca_bundle source=https://example.com/cacerts.pem
If the ``target`` is not specified, it will be pulled from the ``ca_cert``
configuration variable available to the minion. If it cannot be found there,
it will be placed at ``<<FILE_ROOTS>>/cacerts.pem``.
If the ``source`` is not specified, it will be pulled from the
``ca_cert_url`` configuration variable available to the minion. If it cannot
be found, it will be downloaded from the cURL website, using an http (not
https) URL. USING THE DEFAULT URL SHOULD BE AVOIDED!
``merge_files`` may also be specified, which includes a string or list of
strings representing a file or files to be appended to the end of the CA
bundle, once it is downloaded.
CLI Example:
.. code-block:: bash
salt '*' http.update_ca_bundle merge_files=/path/to/mycert.pem
'''
if target is None:
target = __salt__['config.get']('ca_bundle', None)
if source is None:
source = __salt__['config.get']('ca_bundle_url', None)
return salt.utils.http.update_ca_bundle(
target, source, __opts__, merge_files
)
|
Allow execution module to update_ca_bundle
|
Allow execution module to update_ca_bundle
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
25e5b38b09a21cd6e6fbf4ba141bc35bb34cb77e
|
Core/views.py
|
Core/views.py
|
from django.shortcuts import render
# Create your views here.
|
from django.http import HttpResponse, HttpResponseNotFound, Http404
from django.shortcuts import render, redirect
from django.middleware.csrf import get_token
from models import *
class view():
request = ''
template = ''
isSecuredArea = True
isUserAuthenticated = False
#Normal Overridable methods
@abstractmethod
def getView():
pass
@abstractmethod
def getTemplate():
pass
def isPageSecured(): #Override to unsecure page
self.isSecuredArea = True
#Request Life Cycle Methods
def handleRequest(request):
self.setUpView(request)
if securityFails():
return self.handleAuthenticationFailue()
self.getTemplate()
content = getView()
return returnView(content)
def setUpView(request):
self.request = request
self.isSecuredArea = isPageSecured()
self.isUserAuthenticated = request.user.is_authenticated()
def returnView(parameters={}):
if self.template != '':
return render(self.request, self.template, {'csrfmiddlewaretoken':get_token(request), 'room':room, 'links': links})
else :
raise Http404
#Security Methods
def securityFails():
if not self.isUserAuthenticated and self.isSecuredArea:
return True
else:
return False
def handleAuthenticationFailue():
return redirect('/Login?next=%s' % request.path)
#Get Room Method
def getCurrentRoom():
roomString = self.request.GET.get('room', 'All')
if roomString != 'All':
return Rooms.object.filter(id=roomString)
else return null;
#Sidebar Methods
def getSideBar():
currentRoom = getCurrentRoom()
links = [{'title': 'All Rooms', 'address': '?', 'active': getSideBarActiveState(null, currentRoom)}]
for room in Rooms.objects.all():
address = '?room=' + room.Name
sidebarItem = {'title': room.Name.replace("_", " ") , 'address': address , 'active':getSideBarActiveState(room, currentRoom)}
links.append(sidebarItem)
return links
def getSideBarActiveState(sidebarItem, currentPage):
if sidebarItem == currentPage:
return 'active'
else:
return ''
|
Add core view hangler class
|
Add core view hangler class
|
Python
|
mit
|
Tomcuzz/OctaHomeAutomation,Tomcuzz/OctaHomeAutomation,Tomcuzz/OctaHomeAutomation,Tomcuzz/OctaHomeAutomation
|
9f9357bc46f813cd8a26a5f14bba5364aa4a4c10
|
rx/core/operators/contains.py
|
rx/core/operators/contains.py
|
from typing import Callable, Optional, TypeVar
from rx import operators as ops
from rx.core import Observable, pipe, typing
from rx.internal.basic import default_comparer
_T = TypeVar("_T")
def contains_(
value: _T, comparer: Optional[typing.Comparer[_T]] = None
) -> Callable[[Observable[_T]], Observable[bool]]:
comparer_ = comparer or default_comparer
filtering = ops.filter(lambda v: comparer_(v, value))
something = ops.some()
return pipe(filtering, something)
__all__ = ["contains_"]
|
from typing import Callable, Optional, TypeVar
from rx import operators as ops
from rx.core import Observable, pipe, typing
from rx.internal.basic import default_comparer
_T = TypeVar("_T")
def contains_(
value: _T, comparer: Optional[typing.Comparer[_T]] = None
) -> Callable[[Observable[_T]], Observable[bool]]:
comparer_ = comparer or default_comparer
def predicate(v: _T) -> bool:
return comparer_(v, value)
filtering = ops.filter(predicate)
something = ops.some()
return pipe(filtering, something)
__all__ = ["contains_"]
|
Use typed function instead of lambda
|
Use typed function instead of lambda
|
Python
|
mit
|
ReactiveX/RxPY,ReactiveX/RxPY
|
86c106fc95946e4558fabfae57bbd039b248a70c
|
mindbender/maya/plugins/validate_single_shape.py
|
mindbender/maya/plugins/validate_single_shape.py
|
import pyblish.api
class ValidateMindbenderSingleShape(pyblish.api.InstancePlugin):
"""One mesh per transform"""
label = "Validate Single Shape"
order = pyblish.api.ValidatorOrder
hosts = ["maya"]
active = False
optional = True
families = [
"mindbender.model",
"mindbender.lookdev"
]
def process(self, instance):
from maya import cmds
has_multiple_shapes = list()
for node in instance:
children = cmds.listRelatives(node, allDescendents=True) or list()
shapes = cmds.listRelatives(node, shapes=True) or list()
# Ensure there is only one child; there could be many,
# including other transform nodes.
has_single_shape = len(children) == 1
# Ensure the one child is a shape
has_single_child = len(shapes) == 1
# Ensure the one child is of type "mesh"
has_single_mesh = cmds.nodeType(shapes[0]) == "mesh"
if not all([has_single_child,
has_single_shape,
has_single_mesh]):
has_multiple_shapes.append(node)
assert not has_multiple_shapes, (
"\"%s\" has transforms with multiple shapes: %s" % (
instance, ", ".join(
"\"" + member + "\"" for member in has_multiple_shapes))
)
|
import pyblish.api
class ValidateMindbenderSingleShape(pyblish.api.InstancePlugin):
"""Transforms with a mesh must ever only contain a single mesh
This ensures models only contain a single shape node.
"""
label = "Validate Single Shape"
order = pyblish.api.ValidatorOrder
hosts = ["maya"]
families = [
"mindbender.model",
]
def process(self, instance):
from maya import cmds
has_multiple_shapes = list()
# Consider entire hierarchy of nodes included in an Instance
hierarchy = cmds.listRelatives(instance, allDescendents=True)
# Consider only nodes of type="mesh"
meshes = cmds.ls(hierarchy, type="mesh", long=True)
transforms = cmds.listRelatives(meshes, parent=True)
for transform in set(transforms):
shapes = cmds.listRelatives(transform, shapes=True) or list()
# Ensure the one child is a shape
has_single_shape = len(shapes) == 1
self.log.info("has single shape: %s" % has_single_shape)
# Ensure the one shape is of type "mesh"
has_single_mesh = (
has_single_shape and
cmds.nodeType(shapes[0]) == "mesh"
)
self.log.info("has single mesh: %s" % has_single_mesh)
if not all([has_single_shape, has_single_mesh]):
has_multiple_shapes.append(transform)
assert not has_multiple_shapes, (
"\"%s\" has transforms with multiple shapes: %s" % (
instance, ", ".join(
"\"" + member + "\"" for member in has_multiple_shapes))
)
|
Repair validate single shape validator
|
Repair validate single shape validator
|
Python
|
mit
|
mindbender-studio/core,MoonShineVFX/core,getavalon/core,MoonShineVFX/core,mindbender-studio/core,getavalon/core
|
2e0585a59e7c3c60b8bf7e0a8d5e377b7f2f9cd5
|
grammar/entities/adjectives/deff.py
|
grammar/entities/adjectives/deff.py
|
from pyparsing import *
from ...constants.math.deff import NUM, FULLNUM
from ...constants.zones.deff import TOP, BOTTOM
from ...constants.verbs.deff import *
from ...mana.deff import color
from ...types.deff import nontype, supertype
from ...functions.deff import delimitedListAnd, delimitedListOr
from decl import *
topnum << (TOP|BOTTOM) + (NUM|FULLNUM)
attacking << ATTACK
blocking << BLOCK
tapped << TAP
untapped << UNTAP
enchanted << ENCHANT
equipped << EQUIP
exiled << EXILE
sacrificed << SACRIFICE
haunted << HAUNT
adjective << (
color
| nontype
| supertype
| topnum
| attacking
| blocking
| tapped
| untapped
| enchanted
| equipped
| exiled
| sacrificed
| haunted
)
andadjectives << delimitedListAnd(adjective)
oradjectives << delimitedListOr(adjective)
adjectives << OneOrMore(andadjectives ^ oradjectives)
|
from pyparsing import *
from ...constants.math.deff import NUM, FULLNUM
from ...constants.zones.deff import TOP, BOTTOM
from ...constants.verbs.deff import *
from ...mana.deff import color
from ...types.deff import nontype, supertype
from ...functions.deff import delimitedListAnd, delimitedListOr
from decl import *
topnum << (TOP|BOTTOM) + (NUM|FULLNUM)
attacking << ATTACK
blocking << BLOCK
tapped << TAP
untapped << UNTAP
enchanted << ENCHANT
equipped << EQUIP
exiled << EXILE
sacrificed << SACRIFICE
haunted << HAUNT
adjective << (
color
| nontype
| supertype
| topnum
| attacking
| blocking
| tapped
| untapped
| enchanted
| equipped
| exiled
| sacrificed
| haunted
)
# 'and' captures both 'legendary creature' (juxtaposed) and 'black and red' (joined)
# 'or' will capture explicit disjunctions 'black or red'
# but since it will come after the ^, not juxtapositions (taken by 'and')
# so the 'one or more' allows 'legendary black or red'
# to be correctly interpreted as (A and (B or C))
# it's non-intuitive, but it works
# at the same time, it forces us to use ^ instead of |
# or "target artifact, enchantment or land"
# becomes ((A and B) or C)
andadjectives << delimitedListAnd(adjective)
oradjectives << delimitedListOr(adjective)
adjectives << OneOrMore(andadjectives ^ oradjectives)
|
Add commentary explaining and/or lists
|
Add commentary explaining and/or lists
|
Python
|
mit
|
jrgdiz/cardwalker,jrgdiz/cardwalker
|
400027592a131872da5754306ee5e0ec2eba61cf
|
tests/test_err.py
|
tests/test_err.py
|
# Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg, = exc_info.value.args
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
# Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
"""RasterioIOError is raised when a disk file can't be opened.
Newlines are removed from GDAL error messages."""
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg, = exc_info.value.args
assert "\n" not in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
Check msg in a way that passes for all GDAL versions
|
Check msg in a way that passes for all GDAL versions
|
Python
|
bsd-3-clause
|
kapadia/rasterio,brendan-ward/rasterio,kapadia/rasterio,kapadia/rasterio,brendan-ward/rasterio,brendan-ward/rasterio
|
2717a35a78f5982f96d57e258dfedd308cb6ffa8
|
hoomd/typeparam.py
|
hoomd/typeparam.py
|
from hoomd.parameterdicts import AttachedTypeParameterDict
class TypeParameter:
def __init__(self, name, type_kind, param_dict):
self.name = name
self.type_kind = type_kind
self.param_dict = param_dict
def __getitem__(self, key):
return self.param_dict[key]
def __setitem__(self, key, value):
self.param_dict[key] = value
@property
def default(self):
return self.param_dict.default
@default.setter
def default(self, value):
self.param_dict.default = value
def attach(self, cpp_obj, sim):
self.param_dict = AttachedTypeParameterDict(cpp_obj,
self.name,
self.type_kind,
self.param_dict,
sim)
return self
def detach(self):
self.param_dict = self.param_dict.to_dettached()
return self
def to_dict(self):
return self.param_dict.to_dict()
|
from hoomd.parameterdicts import AttachedTypeParameterDict
class TypeParameter:
def __init__(self, name, type_kind, param_dict):
self.name = name
self.type_kind = type_kind
self.param_dict = param_dict
def __getitem__(self, key):
return self.param_dict[key]
def __setitem__(self, key, value):
self.param_dict[key] = value
@property
def default(self):
return self.param_dict.default
@default.setter
def default(self, value):
self.param_dict.default = value
def attach(self, cpp_obj, sim):
self.param_dict = AttachedTypeParameterDict(cpp_obj,
self.name,
self.type_kind,
self.param_dict,
sim)
return self
def detach(self):
self.param_dict = self.param_dict.to_dettached()
return self
def to_dict(self):
return self.param_dict.to_dict()
def keys(self):
yield from self.param_dict.keys()
|
Add keys iterator for ``TypeParameter``
|
Add keys iterator for ``TypeParameter``
|
Python
|
bsd-3-clause
|
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
|
d2250ac74b0797d1662c054d2357573578caa251
|
core/tasks.py
|
core/tasks.py
|
import os
import gzip
import urllib.request
from celery import shared_task
from django.core.mail import EmailMessage
from celery.task import periodic_task
from celery.schedules import crontab
@shared_task(name='deliver_email')
def deliver_email(subject=None, body=None, recipients=None):
#print("Entering core.tasks.deliver_email for ...", recipients)
if recipients:
for recipient in recipients:
#print("sending email to recipient: ", recipient)
email = EmailMessage(subject, body, to=[recipient])
email.send()
@periodic_task(bind=True, run_every=crontab(0, 0, day_of_month='7'))
def update_geolocation(self):
# Establish desired paths and directories
current_directory = os.path.dirname(__file__)
compressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb.gz')
uncompressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb')
# Pull down current database file
url = "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz"
urllib.request.urlretrieve(url, compressed_filepath)
# Read and unzip compressed file to current directory
zipped = gzip.open(compressed_filepath, "rb")
uncompressed = open(uncompressed_filepath, "wb")
uncompressed.write(zipped.read())
zipped.close()
uncompressed.close()
# Remove zipped file
os.remove(compressed_filepath)
|
import os
import gzip
import urllib.request
from celery import shared_task
from django.core.mail import EmailMessage
from celery.task import periodic_task
from celery.schedules import crontab
@shared_task(name='deliver_email')
def deliver_email(subject=None, body=None, recipients=None):
if recipients:
for recipient in recipients:
email = EmailMessage(subject, body, to=[recipient])
email.send()
@periodic_task(bind=True, run_every=crontab(0, 0, day_of_month='7'))
def update_geolocation(self):
# Establish desired paths and directories
current_directory = os.path.dirname(__file__)
compressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb.gz')
uncompressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb')
# Pull down current database file
url = "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz"
urllib.request.urlretrieve(url, compressed_filepath)
# Read and unzip compressed file to current directory
zipped = gzip.open(compressed_filepath, "rb")
uncompressed = open(uncompressed_filepath, "wb")
uncompressed.write(zipped.read())
zipped.close()
uncompressed.close()
# Remove zipped file
os.remove(compressed_filepath)
|
Clean up code and remove print statements
|
Clean up code and remove print statements
|
Python
|
mit
|
LindaTNguyen/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,gdit-cnd/RAPID,gdit-cnd/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,LindaTNguyen/RAPID
|
96aa6271a4dab8c4e222c4161ab9ad06472b4f19
|
orges/test/integration/test_main.py
|
orges/test/integration/test_main.py
|
from __future__ import division, print_function, with_statement
from nose.tools import eq_
from orges.main import optimize
from orges.optimizer.gridsearch import GridSearchOptimizer
from orges.test.util.one_param_sleep_and_negate_f import f
def test_optimize_running_too_long_aborts():
optimizer = GridSearchOptimizer()
val = optimize(f, timeout=1, optimizer=optimizer)
# f(a=0) is 0, f(a=1) is -1. Because of the timeout we never see a=1, hence
# we except the minimum before the timeout to be 0.
eq_(str(val), "(a=0,)")
if __name__ == '__main__':
import nose
nose.runmodule()
|
from __future__ import division, print_function, with_statement
from nose.tools import eq_
from orges.main import optimize
from orges.optimizer.gridsearch import GridSearchOptimizer
from orges.test.util.one_param_sleep_and_negate_f import f
def test_optimize_running_too_long_aborts():
optimizer = GridSearchOptimizer()
result = optimize(f, timeout=1, optimizer=optimizer)
# f(a=0) is 0, f(a=1) is -1. Because of the timeout we never see a=1, hence
# we except the minimum before the timeout to be 0.
eq_(result[0].value, 0)
if __name__ == '__main__':
import nose
nose.runmodule()
|
Fix test for optimize method
|
Fix test for optimize method
|
Python
|
bsd-3-clause
|
cigroup-ol/metaopt,cigroup-ol/metaopt,cigroup-ol/metaopt
|
7f83888c957b892e6cc9d2e92f49a2737a9eabfe
|
logstash_handler/__init__.py
|
logstash_handler/__init__.py
|
from logging.handlers import SocketHandler
import ssl
class LogstashHandler(SocketHandler):
"""
Sends output to an optionally encrypted streaming logstash TCP listener.
"""
def __init__(self, host, port, keyfile=None, certfile=None, ssl=True):
SocketHandler.__init__(self, host, port)
self.keyfile = keyfile
self.certfile = certfile
self.ssl = ssl
def makeSocket(self, timeout=1):
s = SocketHandler.makeSocket(self, timeout)
if self.ssl:
return ssl.wrap_socket(s, keyfile=self.keyfile, certfile=self.certfile)
return s
def makePickle(self, record):
"""
Just format the record according to the formatter. A new line is appended to
support streaming listeners.
"""
return self.format(record) + "\n"
|
from logging.handlers import SocketHandler
import ssl
class LogstashHandler(SocketHandler):
"""
Sends output to an optionally encrypted streaming logstash TCP listener.
"""
def __init__(self, host, port, keyfile=None, certfile=None, ca_certs=None, ssl=True):
SocketHandler.__init__(self, host, port)
self.keyfile = keyfile
self.certfile = certfile
self.ca_certs = ca_certs
self.ssl = ssl
def makeSocket(self, timeout=1):
s = SocketHandler.makeSocket(self, timeout)
if self.ssl:
return ssl.wrap_socket(s, keyfile=self.keyfile, certfile=self.certfile, ca_certs=self.ca_certs)
return s
def makePickle(self, record):
"""
Just format the record according to the formatter. A new line is appended to
support streaming listeners.
"""
return self.format(record) + "\n"
|
Add support for CA certificates
|
Add support for CA certificates
better SSL support
|
Python
|
mit
|
klynch/python-logstash-handler
|
09fa1e01c6de9dffc99c7726607d64c843b564ba
|
osgtest/tests/test_53_gums.py
|
osgtest/tests/test_53_gums.py
|
import os
import pwd
import unittest
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.tomcat as tomcat
import osgtest.library.osgunittest as osgunittest
class TestGUMS(osgunittest.OSGTestCase):
def test_01_map_user(self):
core.skip_ok_unless_installed('gums-service')
host_dn, _ = core.certificate_info(core.config['certs.hostcert'])
pwd_entry = pwd.getpwnam(core.options.username)
cert_path = os.path.join(pwd_entry.pw_dir, '.globus', 'usercert.pem')
user_dn, _ = core.certificate_info(cert_path)
command = ('gums-host', 'mapUser', user_dn)
core.check_system(command, 'Map GUMS user')
|
import os
import pwd
import unittest
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.tomcat as tomcat
import osgtest.library.osgunittest as osgunittest
class TestGUMS(osgunittest.OSGTestCase):
def test_01_map_user(self):
core.skip_ok_unless_installed('gums-service')
host_dn, _ = core.certificate_info(core.config['certs.hostcert'])
pwd_entry = pwd.getpwnam(core.options.username)
cert_path = os.path.join(pwd_entry.pw_dir, '.globus', 'usercert.pem')
user_dn, _ = core.certificate_info(cert_path)
command = ('gums', 'mapUser', '--serv', host_dn, user_dn)
core.check_system(command, 'Map GUMS user')
|
Revert accidental gums test change from previous commit.
|
Revert accidental gums test change from previous commit.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@17355 4e558342-562e-0410-864c-e07659590f8c
|
Python
|
apache-2.0
|
efajardo/osg-test,efajardo/osg-test
|
256a86b9cfbf2f78fc913b87997dd89673d177c5
|
custom/icds_reports/migrations/0070_ccsrecordmonthly_closed.py
|
custom/icds_reports/migrations/0070_ccsrecordmonthly_closed.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-11 14:35
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations, models
from corehq.sql_db.operations import RawSQLMigration
from custom.icds_reports.utils.migrations import get_view_migrations
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates'))
class Migration(migrations.Migration):
dependencies = [
('icds_reports', '0069_valid_visits'),
]
operations = [
migrations.AddField(
model_name='CcsRecordMonthlyView',
name='open_in_month',
field=models.SmallIntegerField(blank=True, null=True),
),
]
operations.extend(get_view_migrations())
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-11 14:35
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations, models
from corehq.sql_db.operations import RawSQLMigration
from custom.icds_reports.utils.migrations import get_view_migrations
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates'))
class Migration(migrations.Migration):
dependencies = [
('icds_reports', '0069_valid_visits'),
]
operations = [
]
operations.extend(get_view_migrations())
|
Remove adding field to View model
|
Remove adding field to View model
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
49c00236569d48f651bd8f2226907d5c784cbe77
|
json262/json262.py
|
json262/json262.py
|
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
"""
Serialize data to/from JSON
Inspired by https://github.com/django/django/blob/master/django/core/serializers/json.py
"""
# Avoid shadowing the standard library json module
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
import decimal
import json
class JSON262Encoder(json.JSONEncoder):
"""
JSON encoder aiming to be fully compliant with ECMA-262.
"""
def default(self, o):
# See "Date Time String Format" in the ECMA-262 specification.
if isinstance(o, datetime.datetime):
r = o.isoformat()
if o.microsecond:
r = r[:23] + r[26:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, datetime.date):
return o.isoformat()
elif isinstance(o, datetime.time):
r = o.isoformat()
if o.microsecond:
r = r[:12]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, decimal.Decimal):
return str(o)
else:
return super(JSON262Encoder, self).default(o)
|
Bring in encoder from webhooks.
|
Bring in encoder from webhooks.
|
Python
|
bsd-3-clause
|
audreyr/standardjson,audreyr/standardjson
|
94b716142a575e73d906f332fda84d68b549d5cd
|
trove/tests/unittests/util/util.py
|
trove/tests/unittests/util/util.py
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
DB_SETUP = None
def init_db():
global DB_SETUP
if DB_SETUP:
return
from trove.common import cfg
from trove.db import get_db_api
from trove.db.sqlalchemy import session
CONF = cfg.CONF
db_api = get_db_api()
db_api.db_sync(CONF)
session.configure_db(CONF)
DB_SETUP = True
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import threading
from trove.common import cfg
from trove.db import get_db_api
from trove.db.sqlalchemy import session
CONF = cfg.CONF
DB_SETUP = None
LOCK = threading.Lock()
def init_db():
with LOCK:
global DB_SETUP
if not DB_SETUP:
db_api = get_db_api()
db_api.db_sync(CONF)
session.configure_db(CONF)
DB_SETUP = True
|
Fix concurrency issue with Python 3.4 test
|
Fix concurrency issue with Python 3.4 test
We have been seeing failures in parallel Py34
tests caused by the test database being set up more
than once.
The existing mechanism is not thread-safe.
Add a lock around the database setup to ensure
the it is ever executed by only one thread.
Partially implements: blueprint trove-python3
Change-Id: I68aba50d60b912384080911a6f78283f027c4ee3
|
Python
|
apache-2.0
|
zhangg/trove,zhangg/trove,hplustree/trove,openstack/trove,openstack/trove,hplustree/trove
|
a44c71cf25672606bd866014982b18836acc46ef
|
string/reverse.py
|
string/reverse.py
|
# Reverse each word in a sentence
def reverse_sentence(string, separator):
# string_list = string.split()
# flipped_list = string_list[::-1]
flipped_list = (string.split())[::-1] # split string into list and then reverse order of elements in list
output = separator.join(flipped_list)
print output
|
# Reverse each word in a sentence
def reverse_sentence(string):
string_list = string.split() # split string by word into list
output = ' '.join([word[::-1] for word in string_list]) # reverse each element/word in list and consolidate into single string
print output
# test cases
test = "Hey dude!"
reverse_sentence(test)
test2 = "dude"
reverse_sentence(test2)
|
Debug method and add test cases
|
Debug method and add test cases
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
d029c67f59ce65f9ad651b2e261e7f29ef8c2ca2
|
sync_scheduler.py
|
sync_scheduler.py
|
from tapiriik.database import db
from tapiriik.messagequeue import mq
import kombu
from datetime import datetime
import time
channel = mq.channel()
exchange = kombu.Exchange("tapiriik-users", type="direct")(channel)
exchange.declare()
producer = kombu.Producer(channel, exchange)
while True:
queueing_at = datetime.utcnow()
users = db.users.find(
{
"NextSynchronization": {"$lte": datetime.utcnow()}
},
{
"_id": True,
"SynchronizationHostRestriction": True
}
).sort("NextSynchronization")
scheduled_ids = set()
for user in users:
producer.publish(str(user["_id"]), routing_key=user["SynchronizationHostRestriction"] if "SynchronizationHostRestriction" in user and user["SynchronizationHostRestriction"] else "")
scheduled_ids.add(user["_id"])
print("Scheduled %d users at %s" % (len(scheduled_ids), datetime.utcnow()))
db.users.update({"_id": {"$in": list(scheduled_ids)}}, {"$set": {"QueuedAt": queueing_at}, "$unset": {"NextSynchronization": True}}, multi=True)
time.sleep(1)
|
from tapiriik.database import db
from tapiriik.messagequeue import mq
from tapiriik.sync import Sync
import kombu
from datetime import datetime
import time
Sync.InitializeWorkerBindings()
producer = kombu.Producer(Sync._channel, Sync._exchange)
while True:
queueing_at = datetime.utcnow()
users = db.users.find(
{
"NextSynchronization": {"$lte": datetime.utcnow()}
},
{
"_id": True,
"SynchronizationHostRestriction": True
}
).sort("NextSynchronization")
scheduled_ids = set()
for user in users:
producer.publish(str(user["_id"]), routing_key=user["SynchronizationHostRestriction"] if "SynchronizationHostRestriction" in user and user["SynchronizationHostRestriction"] else "")
scheduled_ids.add(user["_id"])
print("Scheduled %d users at %s" % (len(scheduled_ids), datetime.utcnow()))
db.users.update({"_id": {"$in": list(scheduled_ids)}}, {"$set": {"QueuedAt": queueing_at}, "$unset": {"NextSynchronization": True}}, multi=True)
time.sleep(1)
|
Declare relevant queues in sync scheduler
|
Declare relevant queues in sync scheduler
|
Python
|
apache-2.0
|
campbellr/tapiriik,niosus/tapiriik,dlenski/tapiriik,niosus/tapiriik,cmgrote/tapiriik,gavioto/tapiriik,cpfair/tapiriik,cpfair/tapiriik,abhijit86k/tapiriik,cheatos101/tapiriik,abhijit86k/tapiriik,niosus/tapiriik,cmgrote/tapiriik,dmschreiber/tapiriik,brunoflores/tapiriik,marxin/tapiriik,campbellr/tapiriik,cgourlay/tapiriik,abs0/tapiriik,brunoflores/tapiriik,campbellr/tapiriik,mjnbike/tapiriik,cgourlay/tapiriik,cheatos101/tapiriik,cmgrote/tapiriik,brunoflores/tapiriik,dlenski/tapiriik,cheatos101/tapiriik,mjnbike/tapiriik,mduggan/tapiriik,campbellr/tapiriik,cgourlay/tapiriik,cmgrote/tapiriik,brunoflores/tapiriik,dmschreiber/tapiriik,niosus/tapiriik,cgourlay/tapiriik,olamy/tapiriik,dlenski/tapiriik,mduggan/tapiriik,mjnbike/tapiriik,abhijit86k/tapiriik,olamy/tapiriik,olamy/tapiriik,gavioto/tapiriik,gavioto/tapiriik,cpfair/tapiriik,marxin/tapiriik,cpfair/tapiriik,abs0/tapiriik,abhijit86k/tapiriik,cheatos101/tapiriik,olamy/tapiriik,mduggan/tapiriik,dmschreiber/tapiriik,abs0/tapiriik,dlenski/tapiriik,gavioto/tapiriik,mduggan/tapiriik,mjnbike/tapiriik,marxin/tapiriik,marxin/tapiriik,abs0/tapiriik,dmschreiber/tapiriik
|
8322c776fe989d65f83beaefff5089716d0286e7
|
test/test_pydh.py
|
test/test_pydh.py
|
import pyDH
def test_pydh_keygen():
d1 = pyDH.DiffieHellman()
d2 = pyDH.DiffieHellman()
d1_pubkey = d1.gen_public_key()
d2_pubkey = d2.gen_public_key()
d1_sharedkey = d1.gen_shared_key(d2_pubkey)
d2_sharedkey = d2.gen_shared_key(d1_pubkey)
assert d1_sharedkey == d2_sharedkey
|
import sys
sys.path.append('.')
import pyDH
def test_pydh_keygen():
d1 = pyDH.DiffieHellman()
d2 = pyDH.DiffieHellman()
d1_pubkey = d1.gen_public_key()
d2_pubkey = d2.gen_public_key()
d1_sharedkey = d1.gen_shared_key(d2_pubkey)
d2_sharedkey = d2.gen_shared_key(d1_pubkey)
assert d1_sharedkey == d2_sharedkey
|
Add current dir to Python path
|
Add current dir to Python path
|
Python
|
apache-2.0
|
amiralis/pyDH
|
4e309e7f70760e400dc7150b34e7f86c4c5643b4
|
golddust/packages.py
|
golddust/packages.py
|
# Copyright 2015-2017 John "LuaMilkshake" Marion
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GoldDust Packages Classes/Utilities
"""
class Package:
"""A package managed by GoldDust"""
def __init__(self):
self.name = ""
self.version = ""
@property
def tarball(self):
"""The tarball file name for this package."""
return "{}-{}.tar.bz2".format(self.name, self.version)
@property
def sig_file(self):
"""The detached signature file name for this package."""
return "{}.sig".format(self.tarball)
class InstallScript:
"""Package pre/post install action script.
"""
def pre_install(self):
"""Called before any files are installed.
"""
pass
def post_install(self):
"""Called after files are installed.
"""
pass
|
# Copyright 2015-2017 John "LuaMilkshake" Marion
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GoldDust Packages Classes/Utilities
"""
class Package:
"""A package managed by GoldDust"""
def __init__(self):
self.name = ""
self.version = ""
@property
def tarball(self):
"""The tarball file name for this package."""
return "{}-{}.tar.bz2".format(self.name, self.version)
@property
def sig_file(self):
"""The detached signature file name for this package."""
return "{}.sig".format(self.tarball)
class InstallScript:
"""Package pre/post install action script.
These functions are used to perform extra work beyond extracting
files.
Note that JAR modification should only be done using the `munge_jar`
function. This lets GoldDust know that you're modifying the JAR so it
can properly handle other JAR mod packages as well.
"""
def pre_install(self):
"""Called before any files are installed.
"""
pass
def munge_jar(self, jar):
"""Modify the Minecraft JAR file.
"""
pass
def post_install(self):
"""Called after files are installed.
"""
pass
|
Add munge_jar stub for InstallScript
|
Add munge_jar stub for InstallScript
|
Python
|
apache-2.0
|
Packeteers/GoldDust
|
4fb3ff629f88935a6dcd905f9268eb953b6ad7fb
|
src/syft/grid/client/request_api/group_api.py
|
src/syft/grid/client/request_api/group_api.py
|
# stdlib
from typing import Any
from typing import Dict
# third party
from pandas import DataFrame
# syft relative
from ...messages.group_messages import CreateGroupMessage
from ...messages.group_messages import DeleteGroupMessage
from ...messages.group_messages import GetGroupMessage
from ...messages.group_messages import GetGroupsMessage
from ...messages.group_messages import UpdateGroupMessage
from .request_api import GridRequestAPI
class GroupRequestAPI(GridRequestAPI):
response_key = "group"
def __init__(self, send):
super().__init__(
create_msg=CreateGroupMessage,
get_msg=GetGroupMessage,
get_all_msg=GetGroupsMessage,
update_msg=UpdateGroupMessage,
delete_msg=DeleteGroupMessage,
send=send,
response_key=GroupRequestAPI.response_key,
)
def __getitem__(self, key):
return self.get(group_id=key)
def __delitem__(self, key):
self.delete(group_id=key)
|
# stdlib
from typing import Any
from typing import Callable
# syft relative
from ...messages.group_messages import CreateGroupMessage
from ...messages.group_messages import DeleteGroupMessage
from ...messages.group_messages import GetGroupMessage
from ...messages.group_messages import GetGroupsMessage
from ...messages.group_messages import UpdateGroupMessage
from .request_api import GridRequestAPI
class GroupRequestAPI(GridRequestAPI):
response_key = "group"
def __init__(self, send: Callable):
super().__init__(
create_msg=CreateGroupMessage,
get_msg=GetGroupMessage,
get_all_msg=GetGroupsMessage,
update_msg=UpdateGroupMessage,
delete_msg=DeleteGroupMessage,
send=send,
response_key=GroupRequestAPI.response_key,
)
def __getitem__(self, key: int) -> Any:
return self.get(group_id=key)
def __delitem__(self, key: int) -> Any:
self.delete(group_id=key)
|
Update Group API - ADD type hints - Remove unused imports
|
Update Group API
- ADD type hints
- Remove unused imports
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
599760942e556c5d23deb0904beafcdf11235595
|
stoneridge_reporter.py
|
stoneridge_reporter.py
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import glob
import os
import requests
import stoneridge
class StoneRidgeReporter(object):
def __init__(self):
self.rootdir = stoneridge.get_config('server', 'directory')
self.pattern = os.path.join(self.rootdir, '*.json')
self.url = stoneridge.get_config('report', 'url')
def run(self):
files = glob.glob(self.pattern)
for fpath in files:
fname = os.path.basename(f)
unlink_ok = False
with file(fpath, 'rb') as f:
try:
requests.post(self.url, files={fname: f})
unlink_ok = True
except:
pass
if unlink_ok:
os.unlink(fpath)
@stoneridge.main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--config', dest='config', required=True)
args = parser.parse_args()
stoneridge._conffile = args.config
reporter = StoneRidgeReporter()
reporter.run()
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import glob
import os
import requests
import stoneridge
class StoneRidgeReporter(object):
def __init__(self):
self.rootdir = stoneridge.get_config('server', 'directory')
self.pattern = os.path.join(self.rootdir, '*.json')
self.url = stoneridge.get_config('report', 'url')
def run(self):
files = glob.glob(self.pattern)
for fpath in files:
fname = os.path.basename(fpath)
unlink_ok = False
with file(fpath, 'rb') as f:
try:
post_data = 'data=%s' % (f.read(),)
r = requests.post(self.url, data=post_data)
unlink_ok = True
except Exception, e:
pass
if unlink_ok:
os.unlink(fpath)
@stoneridge.main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--config', dest='config', required=True)
args = parser.parse_args()
stoneridge._conffile = args.config
reporter = StoneRidgeReporter()
reporter.run()
|
Make reporter succeed in talking to the graph server
|
Make reporter succeed in talking to the graph server
|
Python
|
mpl-2.0
|
mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge
|
101e50f1e668169836a5f253c938420f3675fb16
|
jesusmtnez/python/kata/game.py
|
jesusmtnez/python/kata/game.py
|
class Game():
def __init__(self):
self._score = 0
def roll(self, pins):
self._score += pins
def score(self):
return self._score
|
class Game():
def __init__(self):
self._rolls = [0] * 21
self._current_roll = 0
def roll(self, pins):
self._rolls[self._current_roll] += pins
self._current_roll += 1
def score(self):
score = 0
for frame in range(0, 20, 2):
if self._is_spare(frame):
score += 10 + self._rolls[frame + 2]
else:
score += self._frame_score(frame)
return score
def _is_spare(self, frame):
return self._rolls[frame] + self._rolls[frame + 1] == 10
def _frame_score(self, frame):
return self._rolls[frame] + self._rolls[frame + 1]
|
Add 'spare' support in when calculating scores
|
[Python] Add 'spare' support in when calculating scores
|
Python
|
mit
|
JesusMtnez/devexperto-challenge,JesusMtnez/devexperto-challenge
|
2060cf215d851f86ae8c2766b4a2985c9a37cfae
|
temba/flows/migrations/0056_indexes_update.py
|
temba/flows/migrations/0056_indexes_update.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
INDEX_SQL = """
CREATE INDEX flows_flowrun_org_modified_id
ON flows_flowrun (org_id, modified_on DESC, id DESC);
DROP INDEX IF EXISTS flows_flowrun_org_id_modified_on;
CREATE INDEX flows_flowrun_org_responded_modified_id
ON flows_flowrun (org_id, responded, modified_on DESC, id DESC);
DROP INDEX IF EXISTS flows_flowrun_org_id_modified_on_responded;
CREATE INDEX flows_flowrun_flow_modified_id
ON flows_flowrun (flow_id, modified_on DESC, id DESC);
DROP INDEX IF EXISTS flows_flowrun_flow_id_modified_on;
CREATE INDEX flows_flowrun_flow_responded_modified_id
ON flows_flowrun (flow_id, responded, modified_on DESC, id DESC);
DROP INDEX IF EXISTS flows_flowrun_flow_id_modified_on_responded;
"""
class Migration(migrations.Migration):
dependencies = [
('flows', '0055_populate_step_broadcasts'),
]
operations = [
migrations.RunSQL(INDEX_SQL)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
INDEX_SQL = """
CREATE INDEX flows_flowrun_org_modified_id
ON flows_flowrun (org_id, modified_on DESC, id DESC);
DROP INDEX IF EXISTS flows_flowrun_org_id_modified_on;
CREATE INDEX flows_flowrun_org_modified_id_where_responded
ON flows_flowrun (org_id, modified_on DESC, id DESC)
WHERE responded = TRUE;
DROP INDEX IF EXISTS flows_flowrun_org_id_modified_on_responded;
CREATE INDEX flows_flowrun_flow_modified_id
ON flows_flowrun (flow_id, modified_on DESC, id DESC);
DROP INDEX IF EXISTS flows_flowrun_flow_id_modified_on;
CREATE INDEX flows_flowrun_flow_modified_id_where_responded
ON flows_flowrun (flow_id, modified_on DESC, id DESC)
WHERE responded = TRUE;
DROP INDEX IF EXISTS flows_flowrun_flow_id_modified_on_responded;
"""
class Migration(migrations.Migration):
dependencies = [
('flows', '0055_populate_step_broadcasts'),
]
operations = [
migrations.RunSQL(INDEX_SQL)
]
|
Revert "index on flow run responded field as well"
|
Revert "index on flow run responded field as well"
This reverts commit cbbac0f0f23f6e0ad3ce15a784aad30a82a2fe5a.
|
Python
|
agpl-3.0
|
ewheeler/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,ewheeler/rapidpro,pulilab/rapidpro,pulilab/rapidpro,ewheeler/rapidpro,pulilab/rapidpro,ewheeler/rapidpro
|
1f5d52f18df2fba70b53acd681ebb381f532adff
|
tests/conftest.py
|
tests/conftest.py
|
""" Fixtures in this file are available to all files automatically, no
importing required. Only put general purpose fixtures here!
"""
import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
# XXX is this the right scope for this? This will remove log/ at the end of
# the test session.
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
|
""" Fixtures in this file are available to all files automatically, no
importing required. Only put general purpose fixtures here!
"""
import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
Testing log directory is removed at the end of the test run!
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
|
Document expected behaviour instead of leaving XXX comment
|
Document expected behaviour instead of leaving XXX comment
|
Python
|
agpl-3.0
|
wakermahmud/sync-engine,ErinCall/sync-engine,nylas/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,nylas/sync-engine,PriviPK/privipk-sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,jobscore/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,PriviPK/privipk-sync-engine,rmasters/inbox,rmasters/inbox,Eagles2F/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,ErinCall/sync-engine,closeio/nylas,jobscore/sync-engine,gale320/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,closeio/nylas,wakermahmud/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,nylas/sync-engine,rmasters/inbox,wakermahmud/sync-engine,jobscore/sync-engine,gale320/sync-engine,rmasters/inbox
|
f17611b39c9cc3ec6815093db2eb85cb6b30b5ba
|
lwr/lwr_client/transport/standard.py
|
lwr/lwr_client/transport/standard.py
|
"""
LWR HTTP Client layer based on Python Standard Library (urllib2)
"""
from __future__ import with_statement
from os.path import getsize
import mmap
try:
from urllib2 import urlopen
except ImportError:
from urllib.request import urlopen
try:
from urllib2 import Request
except ImportError:
from urllib.request import Request
class Urllib2Transport(object):
def _url_open(self, request, data):
return urlopen(request, data)
def execute(self, url, data=None, input_path=None, output_path=None):
request = Request(url=url, data=data)
input = None
try:
if input_path:
input = open(input_path, 'rb')
if getsize(input_path):
input = open(input_path, 'rb')
data = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
else:
data = b""
response = self._url_open(request, data)
finally:
if input:
input.close()
if output_path:
with open(output_path, 'wb') as output:
while True:
buffer = response.read(1024)
if not buffer:
break
output.write(buffer)
return response
else:
return response.read()
|
"""
LWR HTTP Client layer based on Python Standard Library (urllib2)
"""
from __future__ import with_statement
from os.path import getsize
import mmap
try:
from urllib2 import urlopen
except ImportError:
from urllib.request import urlopen
try:
from urllib2 import Request
except ImportError:
from urllib.request import Request
class Urllib2Transport(object):
def _url_open(self, request, data):
return urlopen(request, data)
def execute(self, url, data=None, input_path=None, output_path=None):
request = Request(url=url, data=data)
input = None
try:
if input_path:
if getsize(input_path):
input = open(input_path, 'rb')
data = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
else:
data = b""
response = self._url_open(request, data)
finally:
if input:
input.close()
if output_path:
with open(output_path, 'wb') as output:
while True:
buffer = response.read(1024)
if not buffer:
break
output.write(buffer)
return response
else:
return response.read()
|
Fix small bug introduced in 0b8e5d428e60.
|
Fix small bug introduced in 0b8e5d428e60.
Opening file twice.
|
Python
|
apache-2.0
|
jmchilton/pulsar,natefoo/pulsar,ssorgatem/pulsar,jmchilton/lwr,galaxyproject/pulsar,jmchilton/pulsar,ssorgatem/pulsar,galaxyproject/pulsar,natefoo/pulsar,jmchilton/lwr
|
0858cd463d4e6179e3bf4abbfa94cc54fb0600db
|
test/integration/test_node_propagation.py
|
test/integration/test_node_propagation.py
|
class TestPropagation(object):
def test_node_propagation(self):
"""
Tests that check node propagation
1) Spin up four servers.
2) Make the first one send a sync request to all three others.
3) Count the numbers of requests made.
4) Check databases to see that they all know each other.
"""
pass
|
from kitten.server import KittenServer
from gevent.pool import Group
from mock import MagicMock
class TestPropagation(object):
def setup_method(self, method):
self.servers = Group()
for port in range(4):
ns = MagicMock()
ns.port = 9812 + port
server = KittenServer(ns)
self.servers.spawn(server.listen_forever)
def test_node_propagation(self):
"""
Tests that check node propagation
1) Spin up four servers.
2) Make the first one send a sync request to all three others.
3) Count the numbers of requests made.
4) Check databases to see that they all know each other.
"""
pass
|
Add setup to first integration test
|
Add setup to first integration test
|
Python
|
mit
|
thiderman/network-kitten
|
4c655c31bf9625fe426c8b481afba41fe328494d
|
metaci/api/renderers/csv_renderer.py
|
metaci/api/renderers/csv_renderer.py
|
# I started here: https://www.django-rest-framework.org/api-guide/renderers/#example
from rest_framework import renderers
import unicodecsv as csv
import io
import logging
logger = logging.getLogger(__name__)
class SimpleCSVRenderer(renderers.BaseRenderer):
"""Renders simple 1-level-deep data as csv"""
media_type = "text/plain" # should we use text/csv instead?
format = "csv"
def render(self, data, media_type=None, renderer_context={}):
if "results" not in data:
logger.warning(f"no results in data: {str(data)}")
# Is this the right thing to do?
detail = data.get("detail", "unexpected error")
return detail
table_data = self.to_table(data["results"])
csv_buffer = io.BytesIO()
writer = csv.writer(csv_buffer)
for row in table_data:
writer.writerow(row)
return csv_buffer.getvalue()
def to_table(self, data, fields=None):
"""Generator to stream the data as a series of rows"""
if data:
if fields is None:
fields = data[0].keys()
yield fields
for item in data:
row = [item.get(key, None) for key in fields]
yield row
|
# I started here: https://www.django-rest-framework.org/api-guide/renderers/#example
import csv
import io
import logging
from rest_framework import renderers
logger = logging.getLogger(__name__)
class SimpleCSVRenderer(renderers.BaseRenderer):
"""Renders simple 1-level-deep data as csv"""
media_type = "text/plain" # should we use text/csv instead?
format = "csv"
def render(self, data, media_type=None, renderer_context={}):
if "results" not in data:
logger.warning(f"no results in data: {str(data)}")
# Is this the right thing to do?
detail = data.get("detail", "unexpected error")
return detail
table_data = self.to_table(data["results"])
csv_buffer = io.StringIO()
writer = csv.writer(csv_buffer)
for row in table_data:
writer.writerow(row)
return csv_buffer.getvalue().encode("utf-8")
def to_table(self, data, fields=None):
"""Generator to stream the data as a series of rows"""
if data:
if fields is None:
fields = data[0].keys()
yield fields
for item in data:
row = [item.get(key, None) for key in fields]
yield row
|
Remove dependency on unicodecsv module
|
Remove dependency on unicodecsv module
|
Python
|
bsd-3-clause
|
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
|
f1f18b6b996d2bcf108bf7b594d0fdf4dab23057
|
timpani/themes.py
|
timpani/themes.py
|
import os
import os.path
from . import database
THEME_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../themes"))
def getCurrentTheme():
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == "theme"))
if query.count() > 0:
themeName = query.first().value
themes = os.listdir(THEME_PATH)
folderName = None
try:
folderName = next(theme for theme in themes if theme.lower() == themeName.lower())
except StopIteration:
return None
themeFile = open(
os.path.join(THEME_PATH, folderName, "theme.css"), "r")
theme = themeFile.read()
themeFile.close()
templateFile = open(
os.path.join(THEME_PATH, folderName, "template.html"), "r")
template = templatefile.read()
templateFile.close()
return {"template": template, "theme": theme}
def getAvailableThemes():
files = os.listdir(THEME_PATH)
for item in files:
path = os.path.join(THEME_PATH, item)
if not os.path.isdir(path):
files.remove(item)
return files
|
import os
import os.path
from . import database
THEME_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../themes"))
def getCurrentTheme():
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == "theme"))
if query.count() > 0:
themeName = query.first().value
themes = os.listdir(THEME_PATH)
folderName = None
try:
folderName = next(theme for theme in themes if theme.lower() == themeName.lower())
except StopIteration:
return None
themePath = os.path.join(THEME_PATH, folderName, "theme.css")
theme = "" #No CSS
if os.path.isfile(themePath):
themeFile = open(themePath, "r")
theme = themeFile.read()
themeFile.close()
templatePath = os.path.join(THEME_PATH, folderName, "template.html")
template = None #If this is None, the default template can be used.
if os.path.isfile(templatePath):
templateFile = open(templatePath, "r")
template = templatefile.read()
templateFile.close()
return {"template": template, "theme": theme}
def getAvailableThemes():
files = os.listdir(THEME_PATH)
for item in files:
path = os.path.join(THEME_PATH, item)
if not os.path.isdir(path):
files.remove(item)
return files
|
Add cases for either CSS or template not existing
|
Add cases for either CSS or template not existing
|
Python
|
mit
|
ollien/Timpani,ollien/Timpani,ollien/Timpani
|
f5aa51a57e3d161c12d8b8390e6e6aab7609b459
|
readthedocs/projects/feeds.py
|
readthedocs/projects/feeds.py
|
from django.contrib.syndication.views import Feed
from django.db.models import Max
from projects.models import Project
class LatestProjectsFeed(Feed):
title = "Recently updated documentation"
link = "http://readthedocs.org"
description = "Recently updated documentation on Read the Docs"
def items(self):
return Project.objects.filter(builds__isnull=False).annotate(max_date=Max('builds__date')).order_by('-max_date')[:10]
def item_title(self, item):
return item.name
def item_description(self, item):
return item.get_latest_build()
class NewProjectsFeed(Feed):
title = "Newest documentation"
link = "http://readthedocs.org"
description = "Recently created documentation on Read the Docs"
def items(self):
return Project.objects.all().order_by('-pk')[:10]
def item_title(self, item):
return item.name
def item_description(self, item):
return item.get_latest_build()
|
from django.contrib.syndication.views import Feed
from django.db.models import Max
from projects.models import Project
class LatestProjectsFeed(Feed):
title = "Recently updated documentation"
link = "http://readthedocs.org"
description = "Recently updated documentation on Read the Docs"
def items(self):
return Project.objects.order_by('-modified_date')[:10]
def item_title(self, item):
return item.name
def item_description(self, item):
return item.get_latest_build()
class NewProjectsFeed(Feed):
title = "Newest documentation"
link = "http://readthedocs.org"
description = "Recently created documentation on Read the Docs"
def items(self):
return Project.objects.all().order_by('-pk')[:10]
def item_title(self, item):
return item.name
def item_description(self, item):
return item.get_latest_build()
|
Make the RSS feed not slow.
|
Make the RSS feed not slow.
|
Python
|
mit
|
VishvajitP/readthedocs.org,soulshake/readthedocs.org,agjohnson/readthedocs.org,attakei/readthedocs-oauth,nikolas/readthedocs.org,pombredanne/readthedocs.org,michaelmcandrew/readthedocs.org,laplaceliu/readthedocs.org,takluyver/readthedocs.org,atsuyim/readthedocs.org,mrshoki/readthedocs.org,d0ugal/readthedocs.org,asampat3090/readthedocs.org,wanghaven/readthedocs.org,techtonik/readthedocs.org,sunnyzwh/readthedocs.org,SteveViss/readthedocs.org,gjtorikian/readthedocs.org,sils1297/readthedocs.org,KamranMackey/readthedocs.org,KamranMackey/readthedocs.org,laplaceliu/readthedocs.org,stevepiercy/readthedocs.org,kdkeyser/readthedocs.org,raven47git/readthedocs.org,jerel/readthedocs.org,dirn/readthedocs.org,agjohnson/readthedocs.org,GovReady/readthedocs.org,raven47git/readthedocs.org,attakei/readthedocs-oauth,hach-que/readthedocs.org,tddv/readthedocs.org,titiushko/readthedocs.org,agjohnson/readthedocs.org,espdev/readthedocs.org,emawind84/readthedocs.org,jerel/readthedocs.org,techtonik/readthedocs.org,stevepiercy/readthedocs.org,takluyver/readthedocs.org,GovReady/readthedocs.org,safwanrahman/readthedocs.org,techtonik/readthedocs.org,laplaceliu/readthedocs.org,kenwang76/readthedocs.org,johncosta/private-readthedocs.org,Carreau/readthedocs.org,Tazer/readthedocs.org,atsuyim/readthedocs.org,kdkeyser/readthedocs.org,royalwang/readthedocs.org,emawind84/readthedocs.org,cgourlay/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,singingwolfboy/readthedocs.org,KamranMackey/readthedocs.org,soulshake/readthedocs.org,sunnyzwh/readthedocs.org,tddv/readthedocs.org,kenwang76/readthedocs.org,espdev/readthedocs.org,atsuyim/readthedocs.org,Carreau/readthedocs.org,singingwolfboy/readthedocs.org,takluyver/readthedocs.org,Tazer/readthedocs.org,nikolas/readthedocs.org,royalwang/readthedocs.org,sunnyzwh/readthedocs.org,clarkperkins/readthedocs.org,kdkeyser/readthedocs.org,nyergler/pythonslides,gjtorikian/readthedocs.org,sid-kap/readthedocs.org,clarkperkins/readthedocs.org,istresearch/readthedocs.org,raven47git/readthedocs.org,johncosta/private-readthedocs.org,johncosta/private-readthedocs.org,LukasBoersma/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,dirn/readthedocs.org,cgourlay/readthedocs.org,d0ugal/readthedocs.org,VishvajitP/readthedocs.org,agjohnson/readthedocs.org,nyergler/pythonslides,rtfd/readthedocs.org,sils1297/readthedocs.org,jerel/readthedocs.org,rtfd/readthedocs.org,sid-kap/readthedocs.org,gjtorikian/readthedocs.org,gjtorikian/readthedocs.org,LukasBoersma/readthedocs.org,espdev/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,wijerasa/readthedocs.org,mhils/readthedocs.org,stevepiercy/readthedocs.org,KamranMackey/readthedocs.org,VishvajitP/readthedocs.org,sid-kap/readthedocs.org,rtfd/readthedocs.org,Carreau/readthedocs.org,kenwang76/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,mhils/readthedocs.org,LukasBoersma/readthedocs.org,ojii/readthedocs.org,nikolas/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,atsuyim/readthedocs.org,nyergler/pythonslides,espdev/readthedocs.org,royalwang/readthedocs.org,attakei/readthedocs-oauth,pombredanne/readthedocs.org,kenshinthebattosai/readthedocs.org,singingwolfboy/readthedocs.org,sils1297/readthedocs.org,SteveViss/readthedocs.org,dirn/readthedocs.org,michaelmcandrew/readthedocs.org,cgourlay/readthedocs.org,soulshake/readthedocs.org,fujita-shintaro/readthedocs.org,kenshinthebattosai/readthedocs.org,Tazer/readthedocs.org,soulshake/readthedocs.org,kenshinthebattosai/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,titiushko/readthedocs.org,CedarLogic/readthedocs.org,LukasBoersma/readthedocs.org,SteveViss/readthedocs.org,asampat3090/readthedocs.org,SteveViss/readthedocs.org,titiushko/readthedocs.org,clarkperkins/readthedocs.org,mhils/readthedocs.org,tddv/readthedocs.org,singingwolfboy/readthedocs.org,kenshinthebattosai/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,fujita-shintaro/readthedocs.org,istresearch/readthedocs.org,mrshoki/readthedocs.org,nyergler/pythonslides,ojii/readthedocs.org,safwanrahman/readthedocs.org,kdkeyser/readthedocs.org,wijerasa/readthedocs.org,davidfischer/readthedocs.org,wanghaven/readthedocs.org,cgourlay/readthedocs.org,sunnyzwh/readthedocs.org,istresearch/readthedocs.org,attakei/readthedocs-oauth,asampat3090/readthedocs.org,wijerasa/readthedocs.org,wanghaven/readthedocs.org,GovReady/readthedocs.org,davidfischer/readthedocs.org,pombredanne/readthedocs.org,dirn/readthedocs.org,Tazer/readthedocs.org,asampat3090/readthedocs.org,ojii/readthedocs.org,rtfd/readthedocs.org,mrshoki/readthedocs.org,mrshoki/readthedocs.org,espdev/readthedocs.org,michaelmcandrew/readthedocs.org,Carreau/readthedocs.org,hach-que/readthedocs.org,emawind84/readthedocs.org,safwanrahman/readthedocs.org,techtonik/readthedocs.org,royalwang/readthedocs.org,kenwang76/readthedocs.org,VishvajitP/readthedocs.org,mhils/readthedocs.org,sid-kap/readthedocs.org,nikolas/readthedocs.org,GovReady/readthedocs.org,fujita-shintaro/readthedocs.org,laplaceliu/readthedocs.org,davidfischer/readthedocs.org,michaelmcandrew/readthedocs.org,wanghaven/readthedocs.org,fujita-shintaro/readthedocs.org,titiushko/readthedocs.org,sils1297/readthedocs.org,takluyver/readthedocs.org,CedarLogic/readthedocs.org,wijerasa/readthedocs.org,ojii/readthedocs.org
|
29316060fb422a881833e411350e0149575bf1c4
|
update-database/stackdoc/namespaces/python.py
|
update-database/stackdoc/namespaces/python.py
|
import re
import urllib
############### Functions called by stackdoc
def get_version():
return 1
def get_ids(title, body, tags):
ids = []
if "http://docs.python.org/" in body:
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
m = re.match("http://docs.python.org/(?:release/)?(?:dev/)?(?:[0-9](?:\.[0-9]/)+)?(?:py3k/)?library/([.a-z0-9]+)(?:-examples)?\.html", url)
if m:
ids.append(m.group(1))
return ids
def get_tags():
return [
"python"
]
|
import re
import urllib
############### Functions called by stackdoc
def get_version():
return 2
def get_ids(title, body, tags):
ids = []
if "http://docs.python.org/" in body or "http://www.python.org/doc/" in body:
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
docsm = re.match("http://docs.python.org/(?:release/)?(?:dev/)?(?:[0-9](?:\.[0-9]/)+)?(?:py3k/)?library/([.a-z0-9]+)(?:-examples)?\.html", url)
if docsm:
ids.append(docsm.group(1))
olddocsm = re.match("http://www.python.org/doc/(?:[0-9](?:\.[0-9]/)+)/lib/module-([.a-z0-9]+)\.html", url)
if olddocsm:
ids.append(olddocsm.group(1))
return ids
def get_tags():
return [
"python"
]
|
Support old style Python doc links.
|
Support old style Python doc links.
|
Python
|
bsd-3-clause
|
alnorth/stackdoc,alnorth/stackdoc,alnorth/stackdoc
|
f19d4eaec9681192eb761758b1506638b78a5e15
|
tests/__init__.py
|
tests/__init__.py
|
import inspect
import os
# Get testdata absolute path.
abs_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
path = abs_path + "/testdata"
|
import inspect
import os
# Get testdata absolute path.
abs_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
#path = abs_path + "/testdata"
path = "./testdata"
|
Change the testdata path to relative path.
|
Change the testdata path to relative path.
|
Python
|
mit
|
PytLab/VASPy,PytLab/VASPy
|
4ce674ea3a672c2819112b5237319000e33f22c5
|
marten/__init__.py
|
marten/__init__.py
|
"""Stupid simple Python configuration environments"""
from __future__ import absolute_import
import os as _os
__version__ = '0.6.0'
_os.environ.setdefault('MARTEN_ENV', 'default')
try:
from .util import get_config_from_env as _get_config
except ImportError:
config = None
else:
config = _get_config()
|
"""Stupid simple Python configuration environments"""
from __future__ import absolute_import
from marten import loaded_configs
import os as _os
__version__ = '0.6.1'
_os.environ.setdefault('MARTEN_ENV', 'default')
try:
from .util import get_config_from_env as _get_config
except ImportError:
config = None
else:
config = _get_config()
|
Add explicit import for loaded_configs namespace to fix RuntimeWarning
|
Add explicit import for loaded_configs namespace to fix RuntimeWarning
|
Python
|
mit
|
nick-allen/marten
|
0dac29f30853498f6e9d82c8b791ced5ec21667c
|
models/00_settings.py
|
models/00_settings.py
|
import os
import logging
import json
from logging.config import dictConfig
from gluon.storage import Storage
from gluon.contrib.appconfig import AppConfig
# app_config use to cache values in production
app_config = AppConfig(reload=True)
# settings is used to avoid cached values in production
settings = Storage()
# LOGGING CONFIGURATIONS
settings.logging_config = dict(main=os.path.join(request.folder,
'logging.json'),
scheduler=os.path.join(request.folder,
'logging-scheduler.json'))
# INITIALIZE LOGGING
if os.path.exists(settings.logging_config['main']):
try:
config = json.loads(open(settings.logging_config['main']).read())
logging.config.dictConfig(config)
except ValueError as e:
pass
logger = logging.getLogger(settings.app_name)
# DATABASE CONFIGURATION
# Check whether POSTGRES_ENABLED env var is set to True or not.
# If so, generate connection string.
if os.environ['POSTGRES_ENABLED'] == 'True':
settings.db_uri = 'postgres://{u}:{p}@{h}:{po}/{db}'.format(
u=app_config.get('postgres.username'),
p=app_config.get('postgres.password'),
h=app_config.get('postgres.hostname'),
po=app_config.get('postgres.port'),
db=app_config.get('postgres.database'))
else:
settings.db_uri = app_config.get('db.uri')
|
import os
import logging
import json
from logging.config import dictConfig
from gluon.storage import Storage
from gluon.contrib.appconfig import AppConfig
# app_config use to cache values in production
app_config = AppConfig(reload=True)
# settings is used to avoid cached values in production
settings = Storage()
# LOGGING CONFIGURATIONS
settings.logging_config = dict(main=os.path.join(request.folder,
'logging.json'),
scheduler=os.path.join(request.folder,
'logging-scheduler.json'))
# INITIALIZE LOGGING
if os.path.exists(settings.logging_config['main']):
try:
config = json.loads(open(settings.logging_config['main']).read())
logging.config.dictConfig(config)
except ValueError as e:
pass
logger = logging.getLogger(settings.app_name)
# DATABASE CONFIGURATION
# Check whether POSTGRES_ENABLED env var is set to True or not.
# If so, generate connection string.
if app_config.has_key('postgres'):
settings.db_uri = 'postgres://{u}:{p}@{h}:{po}/{db}'.format(
u=app_config.get('postgres.username'),
p=app_config.get('postgres.password'),
h=app_config.get('postgres.hostname'),
po=app_config.get('postgres.port'),
db=app_config.get('postgres.database'))
else:
settings.db_uri = app_config.get('db.uri')
|
Check configuration file rather than env variable
|
Check configuration file rather than env variable
|
Python
|
apache-2.0
|
wefner/w2pfooty,wefner/w2pfooty,wefner/w2pfooty
|
6c0c05c523043abd4fb35ee53daf1a216346a94d
|
tests/runtests.py
|
tests/runtests.py
|
#!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
def run_integration_tests():
with TestDaemon():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(tests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
|
#!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
def run_integration_tests():
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
|
Add support for a dir of client tests
|
Add support for a dir of client tests
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
6c2d73b0d387eb49e38b0432318733b56d2deb96
|
tests/settings.py
|
tests/settings.py
|
SECRET_KEY = 'not-anymore'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
INSTALLED_APPS = [
'tests',
]
|
SECRET_KEY = 'not-anymore'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
INSTALLED_APPS = [
'tests',
]
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
|
Add support for Django 4.0.
|
Add support for Django 4.0.
|
Python
|
mit
|
gintas/django-picklefield
|
8d7862a7045fbb52ce3a2499766ffa1ffef284af
|
tests/settings.py
|
tests/settings.py
|
"""
Settings for tests.
"""
from moztrap.settings.default import *
DEFAULT_FILE_STORAGE = "tests.storage.MemoryStorage"
ALLOW_ANONYMOUS_ACCESS = False
SITE_URL = "http://localhost:80"
USE_BROWSERID = True
|
"""
Settings for tests.
"""
from moztrap.settings.default import *
DEFAULT_FILE_STORAGE = "tests.storage.MemoryStorage"
ALLOW_ANONYMOUS_ACCESS = False
SITE_URL = "http://localhost:80"
USE_BROWSERID = True
PASSWORD_HASHERS = ['django.contrib.auth.hashers.UnsaltedMD5PasswordHasher']
|
Use faster password hashing in tests.
|
Use faster password hashing in tests.
|
Python
|
bsd-2-clause
|
mccarrmb/moztrap,bobsilverberg/moztrap,mozilla/moztrap,mccarrmb/moztrap,shinglyu/moztrap,mccarrmb/moztrap,shinglyu/moztrap,mccarrmb/moztrap,shinglyu/moztrap,mccarrmb/moztrap,bobsilverberg/moztrap,mozilla/moztrap,bobsilverberg/moztrap,shinglyu/moztrap,mozilla/moztrap,shinglyu/moztrap,bobsilverberg/moztrap,mozilla/moztrap,mozilla/moztrap
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.