commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
36bde060bbdb4cf9d0396719b8b82952a73bf2b5
|
bucky/collector.py
|
bucky/collector.py
|
import time
import multiprocessing
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
class StatsCollector(multiprocessing.Process):
def __init__(self, queue):
super(StatsCollector, self).__init__()
self.queue = queue
def close(self):
pass
def run(self):
setproctitle("bucky: %s" % self.__class__.__name__)
err = 0
while True:
start_timestamp = time.time()
if not self.collect():
err = min(err + 1, 2)
else:
err = 0
stop_timestamp = time.time()
sleep_time = (err + 1) * self.interval - (stop_timestamp - start_timestamp)
if sleep_time > 0.1:
time.sleep(sleep_time)
def collect(self):
raise NotImplementedError()
def add_stat(self, name, value, timestamp, **metadata):
if metadata:
if self.metadata:
metadata.update(self.metadata)
else:
metadata = self.metadata
if metadata:
self.queue.put((None, name, value, timestamp, metadata))
else:
self.queue.put((None, name, value, timestamp))
def merge_dicts(self, *dicts):
ret = {}
for d in dicts:
if d:
ret.update(d)
return ret
|
import time
import multiprocessing
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
class StatsCollector(multiprocessing.Process):
def __init__(self, queue):
super(StatsCollector, self).__init__()
self.queue = queue
def close(self):
pass
def run(self):
setproctitle("bucky: %s" % self.__class__.__name__)
interval = self.interval
while True:
start_timestamp = time.time()
interval = self.interval if self.collect() else interval+interval
stop_timestamp = time.time()
interval = min(interval, 300)
interval = interval - (stop_timestamp - start_timestamp)
if interval > 0.1:
time.sleep(interval)
def collect(self):
raise NotImplementedError()
def add_stat(self, name, value, timestamp, **metadata):
if metadata:
if self.metadata:
metadata.update(self.metadata)
else:
metadata = self.metadata
if metadata:
self.queue.put((None, name, value, timestamp, metadata))
else:
self.queue.put((None, name, value, timestamp))
def merge_dicts(self, *dicts):
ret = {}
for d in dicts:
if d:
ret.update(d)
return ret
|
Change the back-off algo for failures
|
Change the back-off algo for failures
|
Python
|
apache-2.0
|
jsiembida/bucky3
|
9f005120c6d408e8cf3097dd74d5dada24305c88
|
src/jsonlogger.py
|
src/jsonlogger.py
|
import logging
import json
import re
from datetime import datetime
class JsonFormatter(logging.Formatter):
"""A custom formatter to format logging records as json objects"""
def parse(self):
standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt)
def format(self, record):
"""Formats a log record and serializes to json"""
mappings = {
'asctime': create_timestamp,
'message': lambda r: r.msg,
}
formatters = self.parse()
log_record = {}
for formatter in formatters:
try:
log_record[formatter] = mappings[formatter](record)
except KeyError:
log_record[formatter] = record.__dict__[formatter]
return json.dumps(log_record)
def create_timestamp(record):
"""Creates a human readable timestamp for a log records created date"""
timestamp = datetime.fromtimestamp(record.created)
return timestamp.strftime("%y-%m-%d %H:%M:%S,%f"),
|
import logging
import json
import re
class JsonFormatter(logging.Formatter):
"""A custom formatter to format logging records as json objects"""
def parse(self):
standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt)
def format(self, record):
"""Formats a log record and serializes to json"""
formatters = self.parse()
record.message = record.getMessage()
# only format time if needed
if "asctime" in formatters:
record.asctime = self.formatTime(record, self.datefmt)
log_record = {}
for formatter in formatters:
log_record[formatter] = record.__dict__[formatter]
return json.dumps(log_record)
|
Use the same logic to format message and asctime than the standard library.
|
Use the same logic to format message and asctime than the standard library.
This way we producte better message text on some circumstances when not logging
a string and use the date formater from the base class that uses the date format
configured from a file or a dict.
|
Python
|
bsd-2-clause
|
madzak/python-json-logger,bbc/python-json-logger
|
937fd7c07dfe98a086a9af07f0f7b316a6f2f6d8
|
invoke/main.py
|
invoke/main.py
|
"""
Invoke's own 'binary' entrypoint.
Dogfoods the `program` module.
"""
from ._version import __version__
from .program import Program
program = Program(name="Invoke", binary='inv[oke]', version=__version__)
|
"""
Invoke's own 'binary' entrypoint.
Dogfoods the `program` module.
"""
from . import __version__, Program
program = Program(
name="Invoke",
binary='inv[oke]',
version=__version__,
)
|
Clean up binstub a bit
|
Clean up binstub a bit
|
Python
|
bsd-2-clause
|
frol/invoke,frol/invoke,pyinvoke/invoke,mkusz/invoke,mattrobenolt/invoke,pfmoore/invoke,pyinvoke/invoke,mkusz/invoke,mattrobenolt/invoke,pfmoore/invoke
|
0b1587a484bd63632dbddfe5f0a4fe3c898e4fb0
|
awacs/dynamodb.py
|
awacs/dynamodb.py
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from aws import Action
service_name = 'Amazon DynamoDB'
prefix = 'dynamodb'
BatchGetItem = Action(prefix, 'BatchGetItem')
CreateTable = Action(prefix, 'CreateTable')
DeleteItem = Action(prefix, 'DeleteItem')
DeleteTable = Action(prefix, 'DeleteTable')
DescribeTable = Action(prefix, 'DescribeTable')
GetItem = Action(prefix, 'GetItem')
ListTables = Action(prefix, 'ListTables')
PutItem = Action(prefix, 'PutItem')
Query = Action(prefix, 'Query')
Scan = Action(prefix, 'Scan')
UpdateItem = Action(prefix, 'UpdateItem')
UpdateTable = Action(prefix, 'UpdateTable')
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from aws import Action
from aws import ARN as BASE_ARN
service_name = 'Amazon DynamoDB'
prefix = 'dynamodb'
class ARN(BASE_ARN):
def __init__(self, region, account, table=None, index=None):
sup = super(ARN, self)
resource = '*'
if table:
resource = 'table/' + table
if index:
resource += '/index/' + index
sup.__init__(prefix, region=region, account=account, resource=resource)
BatchGetItem = Action(prefix, 'BatchGetItem')
CreateTable = Action(prefix, 'CreateTable')
DeleteItem = Action(prefix, 'DeleteItem')
DeleteTable = Action(prefix, 'DeleteTable')
DescribeTable = Action(prefix, 'DescribeTable')
GetItem = Action(prefix, 'GetItem')
ListTables = Action(prefix, 'ListTables')
PutItem = Action(prefix, 'PutItem')
Query = Action(prefix, 'Query')
Scan = Action(prefix, 'Scan')
UpdateItem = Action(prefix, 'UpdateItem')
UpdateTable = Action(prefix, 'UpdateTable')
|
Add logic for DynamoDB ARNs
|
Add logic for DynamoDB ARNs
See:
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/UsingIAMWithDDB.html
I also decided not to name the ARN object 'DynamoDB_ARN' or anything
like that, and instead went with just 'ARN' since the class is already
stored in the dynamodb module. Kind of waffling on whether or not that
was the right thing to do, since it's not how this is handled elsewhere,
but it seems like it makes sense. If you're going to deal with multiple
ARNs, say in SDB & Dynamo, then it seems like you should be doing:
from awacs.sdb import ARN as SDB_ARN
from awacs.dynamodb import ARN as DynamoDB_ARN
Let me know what you guys think about that.
|
Python
|
bsd-2-clause
|
craigbruce/awacs,cloudtools/awacs
|
f996755665c9e55af5139a473b859aa0eb507515
|
back2back/wsgi.py
|
back2back/wsgi.py
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling, MediaCling
application = Cling(MediaCling(get_wsgi_application()))
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
|
Remove MediaCling as there isn't any.
|
Remove MediaCling as there isn't any.
|
Python
|
bsd-2-clause
|
mjtamlyn/back2back,mjtamlyn/back2back,mjtamlyn/back2back,mjtamlyn/back2back
|
3e98ed8801d380b6ab40156b1f20a1f9fe23a755
|
books/views.py
|
books/views.py
|
from rest_framework import viewsets
from books.models import BookPage
from books.serializers import BookPageSerializer
class BookPageViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows BookPages to be viewed or edited.
"""
queryset = BookPage.objects.all()
serializer_class = BookPageSerializer
|
from rest_framework import viewsets
from books.models import BookPage
from books.serializers import BookPageSerializer
class BookPageViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows BookPages to be viewed or edited.
"""
queryset = BookPage.objects.order_by('page_number')
serializer_class = BookPageSerializer
|
Order book pages by page number.
|
Order book pages by page number.
|
Python
|
mit
|
Pepedou/Famas
|
fe7ab3060c43d509f995cc64998139a623b21a4a
|
bot/cogs/owner.py
|
bot/cogs/owner.py
|
import discord
from discord.ext import commands
class Owner:
"""Admin-only commands that make the bot dynamic."""
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.is_owner()
async def close(self, ctx: commands.Context):
"""Closes the bot safely. Can only be used by the owner."""
await self.bot.logout()
@commands.command()
@commands.is_owner()
async def status(self, ctx: commands.Context, *, status: str):
"""Changes the bot's status. Can only be used by the owner."""
await self.bot.change_presence(activity=discord.Game(name=status))
@commands.command(name="reload")
@commands.is_owner()
async def _reload(self, ctx, *, ext: str = None):
"""Reloads a module. Can only be used by the owner."""
if ext:
self.bot.unload_extension(ext)
self.bot.load_extension(ext)
else:
for m in self.bot.initial_extensions:
self.bot.unload_extension(m)
self.bot.load_extension(m)
def setup(bot):
bot.add_cog(Owner(bot))
|
import discord
from discord.ext import commands
class Owner:
"""Admin-only commands that make the bot dynamic."""
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.is_owner()
async def close(self, ctx: commands.Context):
"""Closes the bot safely. Can only be used by the owner."""
await self.bot.logout()
@commands.command()
@commands.is_owner()
async def status(self, ctx: commands.Context, *, status: str):
"""Changes the bot's status. Can only be used by the owner."""
await self.bot.change_presence(activity=discord.Game(name=status))
@commands.command(name="reload")
@commands.is_owner()
async def _reload(self, ctx, *, ext: str = None):
"""Reloads a module. Can only be used by the owner."""
if ext:
self.bot.unload_extension(ext)
self.bot.load_extension(ext)
else:
for m in self.bot.initial_extensions:
self.bot.unload_extension(m)
self.bot.load_extension(m)
await ctx.message.add_reaction(self.bot.emoji_rustok)
def setup(bot):
bot.add_cog(Owner(bot))
|
Add OK reaction to reload command
|
Add OK reaction to reload command
|
Python
|
mit
|
ivandardi/RustbotPython,ivandardi/RustbotPython
|
a703bed82bb2cfcf8b18b5e651bd2e992a590696
|
numpy/_array_api/_types.py
|
numpy/_array_api/_types.py
|
"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
Array = ndarray
Device = TypeVar('device')
Dtype = Literal[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]
SupportsDLPack = TypeVar('SupportsDLPack')
SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
PyCapsule = TypeVar('PyCapsule')
|
"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = Any
Dtype = Type[Union[[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]]]
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
Use better type definitions for the array API custom types
|
Use better type definitions for the array API custom types
|
Python
|
mit
|
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
|
f26a59aae33fd1afef919427e0c36e744cb904fc
|
test/test_normalizedString.py
|
test/test_normalizedString.py
|
from rdflib import *
import unittest
class test_normalisedString(unittest.TestCase):
def test1(self):
lit2 = Literal("\two\nw", datatype=XSD.normalizedString)
lit = Literal("\two\nw", datatype=XSD.string)
self.assertEqual(lit == lit2, False)
def test2(self):
lit = Literal("\tBeing a Doctor Is\n\ta Full-Time Job\r", datatype=XSD.normalizedString)
st = Literal(" Being a Doctor Is a Full-Time Job ", datatype=XSD.string)
self.assertFalse(Literal.eq(st,lit))
def test3(self):
lit=Literal("hey\nthere", datatype=XSD.normalizedString).n3()
print(lit)
self.assertTrue(lit=="\"hey there\"^^<http://www.w3.org/2001/XMLSchema#normalizedString>")
if __name__ == "__main__":
unittest.main()
|
from rdflib import Literal
from rdflib.namespace import XSD
import unittest
class test_normalisedString(unittest.TestCase):
def test1(self):
lit2 = Literal("\two\nw", datatype=XSD.normalizedString)
lit = Literal("\two\nw", datatype=XSD.string)
self.assertEqual(lit == lit2, False)
def test2(self):
lit = Literal("\tBeing a Doctor Is\n\ta Full-Time Job\r", datatype=XSD.normalizedString)
st = Literal(" Being a Doctor Is a Full-Time Job ", datatype=XSD.string)
self.assertFalse(Literal.eq(st,lit))
def test3(self):
lit = Literal("hey\nthere", datatype=XSD.normalizedString).n3()
self.assertTrue(lit=="\"hey there\"^^<http://www.w3.org/2001/XMLSchema#normalizedString>")
def test4(self):
lit = Literal("hey\nthere\ta tab\rcarriage return", datatype=XSD.normalizedString)
expected = Literal("""hey there a tab carriage return""", datatype=XSD.string)
self.assertEqual(str(lit), str(expected))
if __name__ == "__main__":
unittest.main()
|
Add a new test to test all chars that are getting replaced
|
Add a new test to test all chars that are getting replaced
|
Python
|
bsd-3-clause
|
RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib
|
543fc894120db6e8d854e746d631c87cc53f622b
|
website/noveltorpedo/tests.py
|
website/noveltorpedo/tests.py
|
from django.test import TestCase
from django.test import Client
from noveltorpedo.models import *
import unittest
from django.utils import timezone
client = Client()
class SearchTests(TestCase):
def test_that_the_front_page_loads_properly(self):
response = client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'NovelTorpedo Search')
def test_insertion_and_querying_of_data(self):
author = Author()
author.name = "Jack Frost"
author.save()
story = Story()
story.title = "The Big One"
story.save()
story.authors.add(author)
segment = StorySegment()
segment.published = timezone.now()
segment.story = story
segment.title = "Chapter One"
segment.contents = "This is how it all went down..."
segment.save()
|
from django.test import TestCase
from django.test import Client
from noveltorpedo.models import *
from django.utils import timezone
from django.core.management import call_command
client = Client()
class SearchTests(TestCase):
def test_that_the_front_page_loads_properly(self):
response = client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'NovelTorpedo Search')
def test_insertion_and_querying_of_data(self):
# Create a new story in the database.
author = Author()
author.name = 'Jack Frost'
author.save()
story = Story()
story.title = 'The Big One'
story.save()
story.authors.add(author)
segment = StorySegment()
segment.published = timezone.now()
segment.story = story
segment.title = 'Chapter Three'
segment.contents = 'This is how it all went down...'
segment.save()
# Index the new story.
call_command('update_index')
# Query via author name.
response = client.get('/', {'q': 'Jack Frost'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# Query via story name.
response = client.get('/', {'q': 'The Big One'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# Query via segment contents.
response = client.get('/', {'q': 'Chapter Three'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
|
Rebuild index and test variety of queries
|
Rebuild index and test variety of queries
|
Python
|
mit
|
NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo
|
80524970b9e802787918af9ce6d25110be825df4
|
moderngl/__init__.py
|
moderngl/__init__.py
|
'''
ModernGL: PyOpenGL alternative
'''
from .error import *
from .buffer import *
from .compute_shader import *
from .conditional_render import *
from .context import *
from .framebuffer import *
from .program import *
from .program_members import *
from .query import *
from .renderbuffer import *
from .scope import *
from .texture import *
from .texture_3d import *
from .texture_array import *
from .texture_cube import *
from .vertex_array import *
from .sampler import *
__version__ = '5.6.1'
|
'''
ModernGL: High performance rendering for Python 3
'''
from .error import *
from .buffer import *
from .compute_shader import *
from .conditional_render import *
from .context import *
from .framebuffer import *
from .program import *
from .program_members import *
from .query import *
from .renderbuffer import *
from .scope import *
from .texture import *
from .texture_3d import *
from .texture_array import *
from .texture_cube import *
from .vertex_array import *
from .sampler import *
__version__ = '5.6.1'
|
Update module level description of moderngl
|
Update module level description of moderngl
|
Python
|
mit
|
cprogrammer1994/ModernGL,cprogrammer1994/ModernGL,cprogrammer1994/ModernGL
|
cb557823258fe61c2e86db30a7bfe8d0de120f15
|
tests/conftest.py
|
tests/conftest.py
|
import betamax
import os
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/cassettes'
record_mode = 'once'
if os.environ.get('TRAVIS_GH3'):
record_mode = 'never'
config.default_cassette_options['record_mode'] = record_mode
config.define_cassette_placeholder(
'<AUTH_TOKEN>',
os.environ.get('GH_AUTH', 'xxxxxxxxxxx')
)
|
import betamax
import os
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/cassettes'
record_mode = 'once'
if os.environ.get('TRAVIS_GH3'):
record_mode = 'never'
config.default_cassette_options['record_mode'] = record_mode
config.define_cassette_placeholder(
'<AUTH_TOKEN>',
os.environ.get('GH_AUTH', 'x' * 20)
)
|
Update the default value for the placeholder
|
Update the default value for the placeholder
If I decide to start matching on headers this will be necessary
|
Python
|
bsd-3-clause
|
krxsky/github3.py,ueg1990/github3.py,agamdua/github3.py,sigmavirus24/github3.py,wbrefvem/github3.py,h4ck3rm1k3/github3.py,balloob/github3.py,degustaf/github3.py,christophelec/github3.py,jim-minter/github3.py,itsmemattchung/github3.py,icio/github3.py
|
e4d5fa8c70dd283d4511f155da5be5835b1836f7
|
tests/unit/test_validate.py
|
tests/unit/test_validate.py
|
import pytest
import mock
import synapseclient
from genie import validate
center = "SAGE"
syn = mock.create_autospec(synapseclient.Synapse)
@pytest.fixture(params=[
# tuple with (input, expectedOutput)
(["data_CNA_SAGE.txt"], "cna"),
(["data_clinical_supp_SAGE.txt"], "clinical"),
(["data_clinical_supp_sample_SAGE.txt",
"data_clinical_supp_patient_SAGE.txt"], "clinical")])
def filename_fileformat_map(request):
return request.param
def test_perfect_get_filetype(filename_fileformat_map):
(filepath_list, fileformat) = filename_fileformat_map
assert validate.determine_filetype(
syn, filepath_list, center) == fileformat
# def test_wrongfilename_get_filetype():
# assert input_to_database.get_filetype(syn, ['wrong.txt'], center) is None
|
import pytest
import mock
import synapseclient
import pytest
from genie import validate
center = "SAGE"
syn = mock.create_autospec(synapseclient.Synapse)
@pytest.fixture(params=[
# tuple with (input, expectedOutput)
(["data_CNA_SAGE.txt"], "cna"),
(["data_clinical_supp_SAGE.txt"], "clinical"),
(["data_clinical_supp_sample_SAGE.txt",
"data_clinical_supp_patient_SAGE.txt"], "clinical")])
def filename_fileformat_map(request):
return request.param
def test_perfect_get_filetype(filename_fileformat_map):
(filepath_list, fileformat) = filename_fileformat_map
assert validate.determine_filetype(
syn, filepath_list, center) == fileformat
def test_wrongfilename_get_filetype():
with pytest.raises(
ValueError,
match="Your filename is incorrect! "
"Please change your filename before you run "
"the validator or specify --filetype if you are "
"running the validator locally"):
validate.determine_filetype(syn, ['wrong.txt'], center)
|
Add in unit tests for validate.py
|
Add in unit tests for validate.py
|
Python
|
mit
|
thomasyu888/Genie,thomasyu888/Genie,thomasyu888/Genie,thomasyu888/Genie
|
96365d3467e1b0a9520eaff8086224d2d181b03b
|
mopidy/mixers/osa.py
|
mopidy/mixers/osa.py
|
from subprocess import Popen, PIPE
from mopidy.mixers import BaseMixer
class OsaMixer(BaseMixer):
def _get_volume(self):
try:
return int(Popen(
['osascript', '-e', 'output volume of (get volume settings)'],
stdout=PIPE).communicate()[0])
except ValueError:
return None
def _set_volume(self, volume):
Popen(['osascript', '-e', 'set volume output volume %d' % volume])
|
from subprocess import Popen, PIPE
import time
from mopidy.mixers import BaseMixer
CACHE_TTL = 30
class OsaMixer(BaseMixer):
_cache = None
_last_update = None
def _valid_cache(self):
return (self._cache is not None
and self._last_update is not None
and (int(time.time() - self._last_update) < CACHE_TTL))
def _get_volume(self):
if not self._valid_cache():
try:
self._cache = int(Popen(
['osascript', '-e',
'output volume of (get volume settings)'],
stdout=PIPE).communicate()[0])
except ValueError:
self._cache = None
self._last_update = int(time.time())
return self._cache
def _set_volume(self, volume):
Popen(['osascript', '-e', 'set volume output volume %d' % volume])
self._cache = volume
self._last_update = int(time.time())
|
Add caching of OsaMixer volume
|
Add caching of OsaMixer volume
If volume is just managed through Mopidy it is always correct. If another
application changes the volume, Mopidy will be correct within 30 seconds.
|
Python
|
apache-2.0
|
tkem/mopidy,quartz55/mopidy,tkem/mopidy,pacificIT/mopidy,bacontext/mopidy,mokieyue/mopidy,vrs01/mopidy,ZenithDK/mopidy,ZenithDK/mopidy,priestd09/mopidy,bencevans/mopidy,swak/mopidy,jmarsik/mopidy,jodal/mopidy,ali/mopidy,jodal/mopidy,adamcik/mopidy,rawdlite/mopidy,diandiankan/mopidy,adamcik/mopidy,SuperStarPL/mopidy,abarisain/mopidy,ZenithDK/mopidy,ali/mopidy,liamw9534/mopidy,dbrgn/mopidy,adamcik/mopidy,SuperStarPL/mopidy,bencevans/mopidy,quartz55/mopidy,mopidy/mopidy,quartz55/mopidy,ali/mopidy,bencevans/mopidy,quartz55/mopidy,swak/mopidy,SuperStarPL/mopidy,hkariti/mopidy,pacificIT/mopidy,mokieyue/mopidy,jmarsik/mopidy,diandiankan/mopidy,jodal/mopidy,kingosticks/mopidy,jcass77/mopidy,glogiotatidis/mopidy,bacontext/mopidy,priestd09/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,mokieyue/mopidy,diandiankan/mopidy,dbrgn/mopidy,jmarsik/mopidy,kingosticks/mopidy,glogiotatidis/mopidy,mopidy/mopidy,vrs01/mopidy,tkem/mopidy,hkariti/mopidy,woutervanwijk/mopidy,swak/mopidy,ZenithDK/mopidy,jmarsik/mopidy,vrs01/mopidy,rawdlite/mopidy,liamw9534/mopidy,ali/mopidy,pacificIT/mopidy,mopidy/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,swak/mopidy,priestd09/mopidy,hkariti/mopidy,rawdlite/mopidy,mokieyue/mopidy,kingosticks/mopidy,vrs01/mopidy,bencevans/mopidy,bacontext/mopidy,diandiankan/mopidy,bacontext/mopidy,pacificIT/mopidy,tkem/mopidy,dbrgn/mopidy,abarisain/mopidy,jcass77/mopidy,hkariti/mopidy,rawdlite/mopidy,woutervanwijk/mopidy,jcass77/mopidy
|
0180aead701820d2de140791c3e271b4b8a7d231
|
tests/__init__.py
|
tests/__init__.py
|
import os
def fixture_response(path):
return open(os.path.join(
os.path.dirname(__file__),
'fixtures',
path)).read()
|
import os
def fixture_response(path):
with open(os.path.join(os.path.dirname(__file__),
'fixtures',
path)) as fixture:
return fixture.read()
|
Fix file handlers being left open for fixtures
|
Fix file handlers being left open for fixtures
|
Python
|
mit
|
accepton/accepton-python
|
d07bf029b7ba9b5ef1f494d119a2eca004c1818a
|
tests/basics/list_slice_3arg.py
|
tests/basics/list_slice_3arg.py
|
x = list(range(10))
print(x[::-1])
print(x[::2])
print(x[::-2])
|
x = list(range(10))
print(x[::-1])
print(x[::2])
print(x[::-2])
x = list(range(9))
print(x[::-1])
print(x[::2])
print(x[::-2])
|
Add small testcase for 3-arg slices.
|
tests: Add small testcase for 3-arg slices.
|
Python
|
mit
|
neilh10/micropython,danicampora/micropython,tuc-osg/micropython,noahchense/micropython,ahotam/micropython,alex-march/micropython,SungEun-Steve-Kim/test-mp,suda/micropython,SungEun-Steve-Kim/test-mp,noahwilliamsson/micropython,neilh10/micropython,aethaniel/micropython,noahwilliamsson/micropython,chrisdearman/micropython,redbear/micropython,AriZuu/micropython,praemdonck/micropython,ceramos/micropython,firstval/micropython,rubencabrera/micropython,selste/micropython,pozetroninc/micropython,galenhz/micropython,omtinez/micropython,dmazzella/micropython,turbinenreiter/micropython,vriera/micropython,toolmacher/micropython,kostyll/micropython,hiway/micropython,SungEun-Steve-Kim/test-mp,ernesto-g/micropython,xyb/micropython,ernesto-g/micropython,dxxb/micropython,kostyll/micropython,vitiral/micropython,PappaPeppar/micropython,dmazzella/micropython,TDAbboud/micropython,matthewelse/micropython,lbattraw/micropython,xyb/micropython,stonegithubs/micropython,orionrobots/micropython,kerneltask/micropython,ChuckM/micropython,selste/micropython,omtinez/micropython,rubencabrera/micropython,xuxiaoxin/micropython,alex-march/micropython,xhat/micropython,jlillest/micropython,kostyll/micropython,cloudformdesign/micropython,infinnovation/micropython,blazewicz/micropython,deshipu/micropython,hosaka/micropython,feilongfl/micropython,henriknelson/micropython,adafruit/micropython,Peetz0r/micropython-esp32,mgyenik/micropython,hiway/micropython,Vogtinator/micropython,alex-robbins/micropython,mianos/micropython,martinribelotta/micropython,jmarcelino/pycom-micropython,pfalcon/micropython,pramasoul/micropython,HenrikSolver/micropython,skybird6672/micropython,suda/micropython,kostyll/micropython,pfalcon/micropython,puuu/micropython,tralamazza/micropython,blazewicz/micropython,ruffy91/micropython,Timmenem/micropython,heisewangluo/micropython,Timmenem/micropython,xuxiaoxin/micropython,jmarcelino/pycom-micropython,pfalcon/micropython,oopy/micropython,puuu/micropython,adafruit/circuitpython,tdautc19841202/micropython,torwag/micropython,paul-xxx/micropython,KISSMonX/micropython,suda/micropython,PappaPeppar/micropython,skybird6672/micropython,orionrobots/micropython,dxxb/micropython,skybird6672/micropython,lbattraw/micropython,alex-robbins/micropython,xuxiaoxin/micropython,drrk/micropython,cloudformdesign/micropython,slzatz/micropython,ruffy91/micropython,danicampora/micropython,heisewangluo/micropython,SungEun-Steve-Kim/test-mp,emfcamp/micropython,ericsnowcurrently/micropython,hosaka/micropython,ahotam/micropython,MrSurly/micropython-esp32,misterdanb/micropython,xuxiaoxin/micropython,lowRISC/micropython,xyb/micropython,deshipu/micropython,cwyark/micropython,jimkmc/micropython,trezor/micropython,supergis/micropython,kostyll/micropython,deshipu/micropython,jmarcelino/pycom-micropython,mgyenik/micropython,AriZuu/micropython,praemdonck/micropython,ganshun666/micropython,rubencabrera/micropython,trezor/micropython,vitiral/micropython,danicampora/micropython,EcmaXp/micropython,ceramos/micropython,TDAbboud/micropython,micropython/micropython-esp32,orionrobots/micropython,lbattraw/micropython,supergis/micropython,galenhz/micropython,redbear/micropython,toolmacher/micropython,ceramos/micropython,cnoviello/micropython,paul-xxx/micropython,dhylands/micropython,EcmaXp/micropython,tralamazza/micropython,Vogtinator/micropython,rubencabrera/micropython,noahwilliamsson/micropython,bvernoux/micropython,hosaka/micropython,mhoffma/micropython,selste/micropython,heisewangluo/micropython,xhat/micropython,warner83/micropython,methoxid/micropystat,vitiral/micropython,supergis/micropython,praemdonck/micropython,utopiaprince/micropython,noahchense/micropython,tdautc19841202/micropython,oopy/micropython,pozetroninc/micropython,torwag/micropython,deshipu/micropython,HenrikSolver/micropython,feilongfl/micropython,ganshun666/micropython,swegener/micropython,torwag/micropython,aethaniel/micropython,EcmaXp/micropython,oopy/micropython,swegener/micropython,xhat/micropython,tdautc19841202/micropython,heisewangluo/micropython,tdautc19841202/micropython,deshipu/micropython,ryannathans/micropython,paul-xxx/micropython,danicampora/micropython,toolmacher/micropython,ryannathans/micropython,blazewicz/micropython,galenhz/micropython,xhat/micropython,hosaka/micropython,noahwilliamsson/micropython,mgyenik/micropython,toolmacher/micropython,mpalomer/micropython,xyb/micropython,ChuckM/micropython,Timmenem/micropython,supergis/micropython,ernesto-g/micropython,misterdanb/micropython,MrSurly/micropython-esp32,ryannathans/micropython,swegener/micropython,KISSMonX/micropython,vriera/micropython,alex-robbins/micropython,matthewelse/micropython,danicampora/micropython,mgyenik/micropython,KISSMonX/micropython,suda/micropython,tuc-osg/micropython,warner83/micropython,blazewicz/micropython,slzatz/micropython,mhoffma/micropython,AriZuu/micropython,dxxb/micropython,Vogtinator/micropython,drrk/micropython,tuc-osg/micropython,tuc-osg/micropython,cnoviello/micropython,tobbad/micropython,jimkmc/micropython,blmorris/micropython,alex-march/micropython,adamkh/micropython,heisewangluo/micropython,adamkh/micropython,cloudformdesign/micropython,pramasoul/micropython,firstval/micropython,stonegithubs/micropython,torwag/micropython,ChuckM/micropython,Peetz0r/micropython-esp32,ganshun666/micropython,MrSurly/micropython,AriZuu/micropython,methoxid/micropystat,swegener/micropython,adafruit/circuitpython,skybird6672/micropython,blazewicz/micropython,ceramos/micropython,Timmenem/micropython,neilh10/micropython,mhoffma/micropython,paul-xxx/micropython,emfcamp/micropython,EcmaXp/micropython,neilh10/micropython,lbattraw/micropython,Peetz0r/micropython-esp32,infinnovation/micropython,galenhz/micropython,kerneltask/micropython,cnoviello/micropython,feilongfl/micropython,toolmacher/micropython,emfcamp/micropython,EcmaXp/micropython,praemdonck/micropython,alex-robbins/micropython,matthewelse/micropython,utopiaprince/micropython,vriera/micropython,adafruit/micropython,micropython/micropython-esp32,blmorris/micropython,stonegithubs/micropython,ericsnowcurrently/micropython,lowRISC/micropython,emfcamp/micropython,tdautc19841202/micropython,dhylands/micropython,bvernoux/micropython,dinau/micropython,oopy/micropython,PappaPeppar/micropython,MrSurly/micropython,alex-march/micropython,warner83/micropython,aethaniel/micropython,TDAbboud/micropython,Timmenem/micropython,aethaniel/micropython,SungEun-Steve-Kim/test-mp,dxxb/micropython,mianos/micropython,ernesto-g/micropython,jlillest/micropython,trezor/micropython,tobbad/micropython,redbear/micropython,cnoviello/micropython,xuxiaoxin/micropython,HenrikSolver/micropython,redbear/micropython,omtinez/micropython,hiway/micropython,SHA2017-badge/micropython-esp32,ganshun666/micropython,blmorris/micropython,dinau/micropython,emfcamp/micropython,cnoviello/micropython,ryannathans/micropython,kerneltask/micropython,redbear/micropython,infinnovation/micropython,adafruit/micropython,henriknelson/micropython,ericsnowcurrently/micropython,paul-xxx/micropython,kerneltask/micropython,misterdanb/micropython,jlillest/micropython,pramasoul/micropython,vriera/micropython,noahwilliamsson/micropython,ceramos/micropython,dinau/micropython,dmazzella/micropython,swegener/micropython,ernesto-g/micropython,mgyenik/micropython,mpalomer/micropython,ahotam/micropython,skybird6672/micropython,noahchense/micropython,ahotam/micropython,pfalcon/micropython,mhoffma/micropython,blmorris/micropython,xyb/micropython,micropython/micropython-esp32,dinau/micropython,noahchense/micropython,lbattraw/micropython,puuu/micropython,jmarcelino/pycom-micropython,misterdanb/micropython,turbinenreiter/micropython,matthewelse/micropython,martinribelotta/micropython,tobbad/micropython,warner83/micropython,adamkh/micropython,tobbad/micropython,alex-march/micropython,bvernoux/micropython,slzatz/micropython,ruffy91/micropython,adafruit/micropython,chrisdearman/micropython,SHA2017-badge/micropython-esp32,praemdonck/micropython,mianos/micropython,mpalomer/micropython,MrSurly/micropython-esp32,HenrikSolver/micropython,feilongfl/micropython,turbinenreiter/micropython,torwag/micropython,jlillest/micropython,drrk/micropython,henriknelson/micropython,alex-robbins/micropython,firstval/micropython,AriZuu/micropython,SHA2017-badge/micropython-esp32,cwyark/micropython,puuu/micropython,orionrobots/micropython,pramasoul/micropython,martinribelotta/micropython,feilongfl/micropython,adafruit/circuitpython,TDAbboud/micropython,mhoffma/micropython,hosaka/micropython,MrSurly/micropython-esp32,TDAbboud/micropython,puuu/micropython,firstval/micropython,misterdanb/micropython,Peetz0r/micropython-esp32,utopiaprince/micropython,pozetroninc/micropython,lowRISC/micropython,infinnovation/micropython,mianos/micropython,trezor/micropython,drrk/micropython,dinau/micropython,neilh10/micropython,adafruit/circuitpython,PappaPeppar/micropython,micropython/micropython-esp32,HenrikSolver/micropython,adafruit/circuitpython,mianos/micropython,methoxid/micropystat,adafruit/micropython,jimkmc/micropython,chrisdearman/micropython,Vogtinator/micropython,cwyark/micropython,ericsnowcurrently/micropython,utopiaprince/micropython,chrisdearman/micropython,MrSurly/micropython,slzatz/micropython,henriknelson/micropython,aethaniel/micropython,blmorris/micropython,MrSurly/micropython,matthewelse/micropython,cwyark/micropython,dhylands/micropython,kerneltask/micropython,vitiral/micropython,selste/micropython,ahotam/micropython,vitiral/micropython,suda/micropython,orionrobots/micropython,ChuckM/micropython,dxxb/micropython,cloudformdesign/micropython,mpalomer/micropython,adamkh/micropython,adafruit/circuitpython,chrisdearman/micropython,supergis/micropython,jlillest/micropython,stonegithubs/micropython,selste/micropython,trezor/micropython,ruffy91/micropython,jimkmc/micropython,xhat/micropython,mpalomer/micropython,pfalcon/micropython,cwyark/micropython,tobbad/micropython,micropython/micropython-esp32,MrSurly/micropython,omtinez/micropython,pozetroninc/micropython,ruffy91/micropython,infinnovation/micropython,SHA2017-badge/micropython-esp32,omtinez/micropython,dhylands/micropython,oopy/micropython,adamkh/micropython,martinribelotta/micropython,ChuckM/micropython,bvernoux/micropython,henriknelson/micropython,Peetz0r/micropython-esp32,turbinenreiter/micropython,matthewelse/micropython,KISSMonX/micropython,methoxid/micropystat,dhylands/micropython,dmazzella/micropython,PappaPeppar/micropython,jmarcelino/pycom-micropython,firstval/micropython,hiway/micropython,ryannathans/micropython,lowRISC/micropython,tralamazza/micropython,lowRISC/micropython,vriera/micropython,turbinenreiter/micropython,warner83/micropython,utopiaprince/micropython,pozetroninc/micropython,drrk/micropython,bvernoux/micropython,martinribelotta/micropython,ganshun666/micropython,rubencabrera/micropython,MrSurly/micropython-esp32,galenhz/micropython,noahchense/micropython,cloudformdesign/micropython,tuc-osg/micropython,SHA2017-badge/micropython-esp32,KISSMonX/micropython,methoxid/micropystat,pramasoul/micropython,Vogtinator/micropython,hiway/micropython,ericsnowcurrently/micropython,slzatz/micropython,stonegithubs/micropython,tralamazza/micropython,jimkmc/micropython
|
2a43183f5d2c14bacb92fe563d3c2ddf61b116da
|
tests/testMain.py
|
tests/testMain.py
|
import os
import unittest
import numpy
import arcpy
from utils import *
# import our constants;
# configure test data
# XXX: use .ini files for these instead? used in other 'important' unit tests
from config import *
# import our local directory so we can use the internal modules
import_paths = ['../Install/toolbox', '../Install']
addLocalPaths(import_paths)
class TestBpiScript(unittest.TestCase):
from scripts import bpi
def testBpiImport(self, method=bpi):
self.assertRaises(ValueError, method.main(), None)
def testBpiRun(self):
pass
class TestStandardizeBpiGridsScript(unittest.TestCase):
from scripts import standardize_bpi_grids
def testStdImport(self, method=standardize_bpi_grids):
pass
def testStdRun(self):
pass
class TestBtmDocument(unittest.TestCase):
# XXX this won't automatically get the right thing... how can we fix it?
import utils
def testXMLDocumentExists(self):
self.assertTrue(os.path.exists(xml_doc))
if __name__ == '__main__':
unittest.main()
|
import os
import unittest
import numpy
import arcpy
from utils import *
# import our constants;
# configure test data
# XXX: use .ini files for these instead? used in other 'important' unit tests
from config import *
# import our local directory so we can use the internal modules
import_paths = ['../Install/toolbox', '../Install']
addLocalPaths(import_paths)
class TestBpiScript(unittest.TestCase):
from scripts import bpi
def testBpiImport(self, method=bpi):
self.assertRaises(ValueError, method.main(), None)
def testBpiRun(self):
pass
class TestStandardizeBpiGridsScript(unittest.TestCase):
from scripts import standardize_bpi_grids
def testStdImport(self, method=standardize_bpi_grids):
pass
def testStdRun(self):
pass
class TestBtmDocument(unittest.TestCase):
# XXX this won't automatically get the right thing... how can we fix it?
import utils
def testXmlDocumentExists(self):
self.assertTrue(os.path.exists(xml_doc))
def testCsvDocumentExists(self):
self.assertTrue(os.path.exists(csv_doc))
if __name__ == '__main__':
unittest.main()
|
Make naming consistent with our standard (camelcase always, even with acronymn)
|
Make naming consistent with our standard (camelcase always, even with acronymn)
|
Python
|
mpl-2.0
|
EsriOceans/btm
|
457d8002a3758cc8f28ba195a21afc4e0d33965a
|
tests/vec_test.py
|
tests/vec_test.py
|
"""Tests for vectors."""
from sympy import sympify
from drudge import Vec
def test_vecs_has_basic_properties():
"""Tests the basic properties of vector instances."""
base = Vec('v')
v_ab = Vec('v', indices=['a', 'b'])
v_ab_1 = base['a', 'b']
v_ab_2 = (base['a'])['b']
indices_ref = (sympify('a'), sympify('b'))
hash_ref = hash(v_ab)
str_ref = 'v[a, b]'
repr_ref = "Vec('v', (a, b))"
for i in [v_ab, v_ab_1, v_ab_2]:
assert i.base == base.base
assert i.indices == indices_ref
assert hash(i) == hash_ref
assert i == v_ab
assert str(i) == str_ref
assert repr(i) == repr_ref
|
"""Tests for vectors."""
from sympy import sympify
from drudge import Vec
def test_vecs_has_basic_properties():
"""Tests the basic properties of vector instances."""
base = Vec('v')
v_ab = Vec('v', indices=['a', 'b'])
v_ab_1 = base['a', 'b']
v_ab_2 = (base['a'])['b']
indices_ref = (sympify('a'), sympify('b'))
hash_ref = hash(v_ab)
str_ref = 'v[a, b]'
repr_ref = "Vec('v', (a, b))"
for i in [v_ab, v_ab_1, v_ab_2]:
assert i.label == base.label
assert i.base == base
assert i.indices == indices_ref
assert hash(i) == hash_ref
assert i == v_ab
assert str(i) == str_ref
assert repr(i) == repr_ref
|
Update tests for vectors for the new protocol
|
Update tests for vectors for the new protocol
Now the tests for vectors are updated for the new non backward
compatible change for the concepts of label and base.
|
Python
|
mit
|
tschijnmo/drudge,tschijnmo/drudge,tschijnmo/drudge
|
df00a5319028e53826c1a4fd29ed39bb671b4911
|
tutorials/urls.py
|
tutorials/urls.py
|
from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view()),
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view()),
]
|
from django.conf.urls import include, url
from markdownx import urls as markdownx
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view()),
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view()),
# this not working correctly - some error in gatherTutorials
url(r'/add/', views.NewTutorial.as_view(), name='add_tutorial'),
url(r'^markdownx/', include(markdownx)),
]
|
Add markdownx url, Add add-tutrorial url
|
Add markdownx url, Add add-tutrorial url
|
Python
|
agpl-3.0
|
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
|
3bce013c51c454721de3a868ea6d8e8c6d335112
|
cycli/neo4j.py
|
cycli/neo4j.py
|
import requests
from py2neo import Graph, authenticate
class Neo4j:
def __init__(self, host, port, username=None, password=None):
self.host = host
self.port = port
self.username = username
self.password = password
self.host_port = "{host}:{port}".format(host=host, port=port)
self.url = "http://{host_port}/db/data/".format(host_port=self.host_port)
def connection(self):
if self.username and self.password:
authenticate(self.host_port, self.username, self.password)
graph = Graph(self.url)
return graph
def cypher(self, query):
tx = self.connection().cypher.begin()
try:
tx.append(query)
results = tx.process()
tx.commit()
except Exception as e:
results = e
except KeyboardInterrupt:
tx.rollback()
results = ""
return results
def labels(self):
return sorted(list(self.connection().node_labels))
def relationship_types(self):
return sorted(list(self.connection().relationship_types))
def properties(self):
url = self.url + "propertykeys"
r = requests.get(url, auth=(self.username, self.password))
props = r.json()
return sorted(props)
|
import requests
from py2neo import Graph, authenticate
class Neo4j:
def __init__(self, host, port, username=None, password=None):
self.username = username
self.password = password
self.host_port = "{host}:{port}".format(host=host, port=port)
self.url = "http://{host_port}/db/data/".format(host_port=self.host_port)
def connection(self):
if self.username and self.password:
authenticate(self.host_port, self.username, self.password)
graph = Graph(self.url)
return graph
def cypher(self, query):
tx = self.connection().cypher.begin()
try:
tx.append(query)
results = tx.process()
tx.commit()
except Exception as e:
results = e
except KeyboardInterrupt:
tx.rollback()
results = ""
return results
def labels(self):
return sorted(list(self.connection().node_labels))
def relationship_types(self):
return sorted(list(self.connection().relationship_types))
def properties(self):
url = self.url + "propertykeys"
r = requests.get(url, auth=(self.username, self.password))
props = r.json()
return sorted(props)
|
Remove host and port attributes from Neo4j
|
Remove host and port attributes from Neo4j
|
Python
|
mit
|
nicolewhite/cycli,nicolewhite/cycli
|
70c9deb44cbbce13fbe094640786398cb4683b08
|
ldap_sync/tasks.py
|
ldap_sync/tasks.py
|
from django.core.management import call_command
from celery import task
@task
def syncldap():
"""
Call the appropriate management command to synchronize the LDAP users
with the local database.
"""
call_command('syncldap')
|
from django.core.management import call_command
from celery import shared_task
@shared_task
def syncldap():
"""
Call the appropriate management command to synchronize the LDAP users
with the local database.
"""
call_command('syncldap')
|
Change Celery task to shared task
|
Change Celery task to shared task
|
Python
|
bsd-3-clause
|
alexsilva/django-ldap-sync,jbittel/django-ldap-sync,PGower/django-ldap3-sync,alexsilva/django-ldap-sync
|
026fade3f064f0185fa3a6f2075d43353e041970
|
whois-scraper.py
|
whois-scraper.py
|
from lxml import html
from PIL import Image
import requests
def enlarge_image(image_file):
image = Image.open(image_file)
enlarged_size = map(lambda x: x*2, image.size)
enlarged_image = image.resize(enlarged_size)
return enlarged_image
def extract_text(image_file):
image = enlarge_image(image_file)
# Use Tesseract to extract text from the enlarged image. Then Return it.
domain = 'speedtest.net'
page = requests.get('http://www.whois.com/whois/{}'.format(domain))
tree = html.fromstring(page.content)
|
from lxml import html
from PIL import Image
import requests
import urllib.request
def enlarge_image(image_file):
image = Image.open(image_file)
enlarged_size = map(lambda x: x*2, image.size)
enlarged_image = image.resize(enlarged_size)
return enlarged_image
def extract_text(image_file):
image = enlarge_image(image_file)
# Use Tesseract to extract text from the enlarged image. Then Return it.
def fix_emails(whois_data, image_urls):
count = 0
for index, item in enumerate(whois_data):
if item.startswith('@'):
with urllib.request.urlopen(image_urls[count]) as response:
email_username = extract_text(image_urls[count])
whois_data[index-1:index+1] = [whois_data[index-1] + email_username + whois_data[index]]
count += 1
return whois_data
def scrape_whois(domain):
domain = 'speedtest.net'
page = requests.get('http://www.whois.com/whois/{}'.format(domain))
tree = html.fromstring(page.content)
registrar_data = tree.xpath('//*[@id="registrarData"]/text()')
registrar_images = list(map(lambda x: 'http://www.whois.com' + x, tree.xpath('//*[@id="registrarData"]/img/@src')))
registry_data = tree.xpath('//*[@id="registryData"]/text()')
registry_images = list(map(lambda x: 'http://www.whois.com' + x, tree.xpath('//*[@id="registryData"]/img/@src')))
|
Add functions to scrape whois data and fix the e-mails in it
|
Add functions to scrape whois data and fix the e-mails in it
- Add function scrape_whois which scrapes the raw whois information for a given domain from http://www.whois.com/whois.
- Add function fix_emails. http://www.whois.com hides the username-part of the contact e-mails from the whois info by displaying it as an image. This function fixes that using the extract_text function.
|
Python
|
mit
|
SkullTech/whois-scraper
|
b89f6981d4f55790aa919f36e02a6312bd5f1583
|
tests/__init__.py
|
tests/__init__.py
|
import unittest
import sys
from six import PY3
if PY3:
from urllib.parse import urlsplit, parse_qsl
else:
from urlparse import urlsplit, parse_qsl
import werkzeug as wz
from flask import Flask, url_for, render_template_string
from flask.ext.images import Images, ImageSize, resized_img_src
import flask
flask_version = tuple(map(int, flask.__version__.split('.')))
class TestCase(unittest.TestCase):
def setUp(self):
self.app = self.create_app()
self.app_ctx = self.app.app_context()
self.app_ctx.push()
self.req_ctx = self.app.test_request_context('http://localhost:8000/')
self.req_ctx.push()
self.client = self.app.test_client()
def create_app(self):
app = Flask(__name__)
app.config['TESTING'] = True
app.config['SERVER_NAME'] = 'localhost'
app.config['SECRET_KEY'] = 'secret secret'
app.config['IMAGES_PATH'] = ['assets']
self.images = Images(app)
return app
def assert200(self, res):
self.assertEqual(res.status_code, 200)
|
import unittest
import sys
from six import PY3
if PY3:
from urllib.parse import urlsplit, parse_qsl
else:
from urlparse import urlsplit, parse_qsl
import werkzeug as wz
from flask import Flask, url_for, render_template_string
import flask
from flask_images import Images, ImageSize, resized_img_src
flask_version = tuple(map(int, flask.__version__.split('.')))
class TestCase(unittest.TestCase):
def setUp(self):
self.app = self.create_app()
self.app_ctx = self.app.app_context()
self.app_ctx.push()
self.req_ctx = self.app.test_request_context('http://localhost:8000/')
self.req_ctx.push()
self.client = self.app.test_client()
def create_app(self):
app = Flask(__name__)
app.config['TESTING'] = True
app.config['SERVER_NAME'] = 'localhost'
app.config['SECRET_KEY'] = 'secret secret'
app.config['IMAGES_PATH'] = ['assets']
self.images = Images(app)
return app
def assert200(self, res):
self.assertEqual(res.status_code, 200)
|
Stop using `flask.ext.*` in tests.
|
Stop using `flask.ext.*` in tests.
|
Python
|
bsd-3-clause
|
mikeboers/Flask-Images
|
211972701d8dbd39e42ec5a8d10b9c56be858d3e
|
tests/conftest.py
|
tests/conftest.py
|
import string
import pytest
@pytest.fixture
def identity_fixures():
l = []
for i, c in enumerate(string.ascii_uppercase):
l.append(dict(
name='identity_{0}'.format(i),
access_key_id='someaccesskey_{0}'.format(c),
secret_access_key='notasecret_{0}_{1}'.format(i, c),
))
return l
@pytest.fixture
def identity_store(tmpdir):
from awsident.storage import IdentityStore
identity_store = IdentityStore(config_path=str(tmpdir))
def fin():
identity_store.identities.clear()
identity_store.save_to_config()
return identity_store
@pytest.fixture
def identity_store_with_data(tmpdir):
from awsident.storage import IdentityStore
identity_store = IdentityStore(config_path=str(tmpdir))
for data in identity_fixures():
identity_store.add_identity(data)
def fin():
identity_store.identities.clear()
identity_store.save_to_config()
return identity_store
|
import string
import pytest
@pytest.fixture
def identity_fixures():
l = []
for i, c in enumerate(string.ascii_uppercase):
l.append(dict(
name='identity_{0}'.format(i),
access_key_id='someaccesskey_{0}'.format(c),
secret_access_key='notasecret_{0}_{1}'.format(i, c),
))
return l
@pytest.fixture
def identity_store(tmpdir):
from awsident.storage import IdentityStore
identity_store = IdentityStore(config_path=str(tmpdir))
return identity_store
@pytest.fixture
def identity_store_with_data(tmpdir):
from awsident.storage import IdentityStore
identity_store = IdentityStore(config_path=str(tmpdir))
for data in identity_fixures():
identity_store.add_identity(data)
return identity_store
|
Remove fixture teardown since nothing should be saved (tmpdir)
|
Remove fixture teardown since nothing should be saved (tmpdir)
|
Python
|
mit
|
nocarryr/AWS-Identity-Manager
|
debdc71a1c22412c46d8bf74315a5467c1e228ee
|
magnum/tests/unit/common/test_exception.py
|
magnum/tests/unit/common/test_exception.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
from magnum.common import exception
from magnum.i18n import _
from magnum.tests import base
class TestMagnumException(exception.MagnumException):
message = _("templated %(name)s")
class TestException(base.BaseTestCase):
def raise_(self, ex):
raise ex
def test_message_is_templated(self):
ex = TestMagnumException(name="NAME")
self.assertEqual("templated NAME", ex.message)
def test_custom_message_is_templated(self):
ex = TestMagnumException(_("custom templated %(name)s"), name="NAME")
self.assertEqual("custom templated NAME", ex.message)
def test_all_exceptions(self):
for name, obj in inspect.getmembers(exception):
if inspect.isclass(obj) and issubclass(obj, Exception):
self.assertRaises(obj, self.raise_, obj())
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
from magnum.common import exception
from magnum.i18n import _
from magnum.tests import base
class TestMagnumException(exception.MagnumException):
message = _("templated %(name)s")
class TestException(base.BaseTestCase):
def raise_(self, ex):
raise ex
def test_message_is_templated(self):
ex = TestMagnumException(name="NAME")
self.assertEqual("templated NAME", str(ex))
def test_custom_message_is_templated(self):
ex = TestMagnumException(_("custom templated %(name)s"), name="NAME")
self.assertEqual("custom templated NAME", str(ex))
def test_all_exceptions(self):
for name, obj in inspect.getmembers(exception):
if inspect.isclass(obj) and issubclass(obj, Exception):
self.assertRaises(obj, self.raise_, obj())
|
Stop using deprecated 'message' attribute in Exception
|
Stop using deprecated 'message' attribute in Exception
The 'message' attribute has been deprecated and removed
from Python3.
For more details, please check:
https://www.python.org/dev/peps/pep-0352/
Change-Id: Id952e4f59a911df7ccc1d64e7a8a2d5e9ee353dd
|
Python
|
apache-2.0
|
ArchiFleKs/magnum,ArchiFleKs/magnum,openstack/magnum,openstack/magnum
|
021cf436c23c5c705d0e3c5b6383e25811ade669
|
webmaster_verification/views.py
|
webmaster_verification/views.py
|
import logging
logger = logging.getLogger(__name__)
from django.http import Http404
from django.views.generic import TemplateView
import settings
class VerificationView(TemplateView):
"""
This simply adds the verification key to the view context and makes sure
we return a 404 if the key wasn't set for the provider
"""
def get_context_data(self, **kwargs):
context = super(VerificationView, self).get_context_data(**kwargs)
try:
context['%s_verification' % self.provider] = settings.WEBMASTER_VERIFICATION[self.provider]
except KeyError:
raise Http404
except AttributeError:
logger.info("WEBMASTER_VERIFICATION not defined in settings")
raise Http404
return context
class GoogleVerificationView(VerificationView):
template_name = 'google_verify_template.html'
provider = 'google'
class BingVerificationView(VerificationView):
template_name = 'bing_verify_template.xml'
provider = 'bing'
|
import logging
logger = logging.getLogger(__name__)
from django.http import Http404
from django.views.generic import TemplateView
import settings
class VerificationView(TemplateView):
"""
This simply adds the verification key to the view context and makes sure
we return a 404 if the key wasn't set for the provider
"""
def get_context_data(self, **kwargs):
context = super(VerificationView, self).get_context_data(**kwargs)
try:
context['%s_verification' % self.provider] = settings.WEBMASTER_VERIFICATION[self.provider]
except KeyError:
raise Http404
except AttributeError:
logger.info("WEBMASTER_VERIFICATION not defined in settings")
raise Http404
return context
class GoogleVerificationView(VerificationView):
template_name = 'webmaster_verification/google_verify_template.html'
provider = 'google'
class BingVerificationView(VerificationView):
template_name = 'webmaster_verification/bing_verify_template.xml'
provider = 'bing'
|
Use the new template path
|
Use the new template path
|
Python
|
bsd-3-clause
|
nkuttler/django-webmaster-verification,nkuttler/django-webmaster-verification
|
4d1b96792f73777adaa0a79341901ca82f57839b
|
use/functional.py
|
use/functional.py
|
def pipe(*functions):
def closure(x):
for fn in functions:
if not out:
out = fn(x)
else:
out = fn(out)
return out
return closure
|
import collections
import functools
def pipe(*functions):
def closure(x):
for fn in functions:
if not out:
out = fn(x)
else:
out = fn(out)
return out
return closure
class memoize(object):
'''Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
'''
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args)
if args in self.cache:
return self.cache[args]
else:
value = self.func(*args)
self.cache[args] = value
return value
def __repr__(self):
'''Return the function's docstring.'''
return self.func.__doc__
def __get__(self, obj, objtype):
'''Support instance methods.'''
return functools.partial(self.__call__, obj)
|
Add a simple memoize function
|
Add a simple memoize function
|
Python
|
mit
|
log0ymxm/corgi
|
3f2b4236bdb5199d4830a893c7b511f7875dc501
|
plata/utils.py
|
plata/utils.py
|
from decimal import Decimal
import simplejson
from django.core.serializers.json import DjangoJSONEncoder
try:
simplejson.dumps([42], use_decimal=True)
except TypeError:
raise Exception('simplejson>=2.1 with support for use_decimal required.')
class JSONFieldDescriptor(object):
def __init__(self, field):
self.field = field
def __get__(self, obj, objtype):
cache_field = '_cached_jsonfield_%s' % self.field
if not hasattr(obj, cache_field):
try:
setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field),
use_decimal=True))
except (TypeError, ValueError):
setattr(obj, cache_field, {})
return getattr(obj, cache_field)
def __set__(self, obj, value):
setattr(obj, '_cached_jsonfield_%s' % self.field, value)
setattr(obj, self.field, simplejson.dumps(value, use_decimal=True,
cls=DjangoJSONEncoder))
|
from decimal import Decimal
import simplejson
from django.core.serializers.json import DjangoJSONEncoder
try:
simplejson.dumps([42], use_decimal=True)
except TypeError:
raise Exception('simplejson>=2.1 with support for use_decimal required.')
class CallbackOnUpdateDict(dict):
"""Dict which executes a callback on every update"""
def __init__(self, *args, **kwargs):
self.callback = kwargs.pop('callback')
super(CallbackOnUpdateDict, self).__init__(*args, **kwargs)
def __setitem__(self, key, value):
ret = super(CallbackOnUpdateDict, self).__setitem__(key, value)
self.callback(self)
return ret
def update(self, d):
ret = super(CallbackOnUpdateDict, self).update(d)
self.callback(self)
return ret
class JSONFieldDescriptor(object):
def __init__(self, field):
self.field = field
def __get__(self, obj, objtype):
cache_field = '_cached_jsonfield_%s' % self.field
if not hasattr(obj, cache_field):
try:
value = simplejson.loads(getattr(obj, self.field), use_decimal=True)
except (TypeError, ValueError):
value = {}
self.__set__(obj, value)
return getattr(obj, cache_field)
def __set__(self, obj, value):
if not isinstance(value, CallbackOnUpdateDict):
value = CallbackOnUpdateDict(value,
# Update cached and serialized value on every write to the data dict
callback=lambda d: self.__set__(obj, d))
setattr(obj, '_cached_jsonfield_%s' % self.field, value)
setattr(obj, self.field, simplejson.dumps(value, use_decimal=True,
cls=DjangoJSONEncoder))
|
Make working with JSONDataDescriptor easier
|
Make working with JSONDataDescriptor easier
|
Python
|
bsd-3-clause
|
allink/plata,armicron/plata,armicron/plata,armicron/plata,stefanklug/plata
|
131f266e73139f1148ee3e9fcce8db40842afb88
|
sale_channel/models/account.py
|
sale_channel/models/account.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Sales Channels
# Copyright (C) 2016 June
# 1200 Web Development
# http://1200wd.com/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _, exceptions
class AccountTax(models.Model):
_inherit = 'account.tax'
@api.model
def _get_sales_channel_domain(self):
ids = self.env.ref('res_partner_category.sales_channel').ids
return [('category_id', 'in', ids)]
sales_channel_id = fields.Many2one('res.partner', string="Sales channel",
ondelete='set null', domain=_get_sales_channel_domain)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Sales Channels
# Copyright (C) 2016 June
# 1200 Web Development
# http://1200wd.com/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _, exceptions
class AccountTax(models.Model):
_inherit = 'account.tax'
@api.model
def _get_sales_channel_domain(self):
ids = self.env.ref('res_partner_category.sales_channel').ids
return [('category_id', 'in', ids)]
sales_channel_id = fields.Many2one('res.partner', string="Sales channel",
ondelete='set null', domain=_get_sales_channel_domain)
_sql_constraints = [
('name_company_uniq', 'unique(name, company_id, sales_channel_id)', 'Tax Name must be unique per company and sales channel!'),
]
|
Add constraint, tax name must be unique for each company and sales channel
|
[IMP] Add constraint, tax name must be unique for each company and sales channel
|
Python
|
agpl-3.0
|
1200wd/1200wd_addons,1200wd/1200wd_addons
|
999d243fbc9908255ae292186bf8b17eb67e42e8
|
planner/forms.py
|
planner/forms.py
|
from django import forms
class LoginForm(forms.Form):
email = forms.EmailField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
|
from django.contrib.auth.forms import AuthenticationForm
from django import forms
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
|
Fix LoginForm to be conformant to builtin AuthenticationForm
|
Fix LoginForm to be conformant to builtin AuthenticationForm
|
Python
|
mit
|
livingsilver94/getaride,livingsilver94/getaride,livingsilver94/getaride
|
e1240aa33b286ba52507128458fc6d6b3b68dfb3
|
statsmodels/stats/multicomp.py
|
statsmodels/stats/multicomp.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 30 18:27:25 2012
Author: Josef Perktold
"""
from statsmodels.sandbox.stats.multicomp import MultiComparison
def pairwise_tukeyhsd(endog, groups, alpha=0.05):
'''calculate all pairwise comparisons with TukeyHSD confidence intervals
this is just a wrapper around tukeyhsd method of MultiComparison
Parameters
----------
endog : ndarray, float, 1d
response variable
groups : ndarray, 1d
array with groups, can be string or integers
alpha : float
significance level for the test
Returns
-------
results : TukeyHSDResults instance
A results class containing relevant data and some post-hoc
calculations
See Also
--------
MultiComparison
tukeyhsd
statsmodels.sandbox.stats.multicomp.TukeyHSDResults
'''
return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 30 18:27:25 2012
Author: Josef Perktold
"""
from statsmodels.sandbox.stats.multicomp import tukeyhsd, MultiComparison
def pairwise_tukeyhsd(endog, groups, alpha=0.05):
'''calculate all pairwise comparisons with TukeyHSD confidence intervals
this is just a wrapper around tukeyhsd method of MultiComparison
Parameters
----------
endog : ndarray, float, 1d
response variable
groups : ndarray, 1d
array with groups, can be string or integers
alpha : float
significance level for the test
Returns
-------
results : TukeyHSDResults instance
A results class containing relevant data and some post-hoc
calculations
See Also
--------
MultiComparison
tukeyhsd
statsmodels.sandbox.stats.multicomp.TukeyHSDResults
'''
return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
|
Put back an import that my IDE incorrectly flagged as unused
|
Put back an import that my IDE incorrectly flagged as unused
|
Python
|
bsd-3-clause
|
gef756/statsmodels,detrout/debian-statsmodels,detrout/debian-statsmodels,bzero/statsmodels,YihaoLu/statsmodels,wzbozon/statsmodels,edhuckle/statsmodels,cbmoore/statsmodels,musically-ut/statsmodels,josef-pkt/statsmodels,cbmoore/statsmodels,rgommers/statsmodels,hlin117/statsmodels,ChadFulton/statsmodels,edhuckle/statsmodels,hainm/statsmodels,musically-ut/statsmodels,gef756/statsmodels,edhuckle/statsmodels,saketkc/statsmodels,jseabold/statsmodels,jstoxrocky/statsmodels,adammenges/statsmodels,waynenilsen/statsmodels,bzero/statsmodels,nvoron23/statsmodels,wdurhamh/statsmodels,huongttlan/statsmodels,alekz112/statsmodels,adammenges/statsmodels,nguyentu1602/statsmodels,waynenilsen/statsmodels,yl565/statsmodels,phobson/statsmodels,alekz112/statsmodels,wzbozon/statsmodels,huongttlan/statsmodels,saketkc/statsmodels,hainm/statsmodels,hlin117/statsmodels,kiyoto/statsmodels,YihaoLu/statsmodels,waynenilsen/statsmodels,bashtage/statsmodels,wzbozon/statsmodels,jseabold/statsmodels,gef756/statsmodels,phobson/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,rgommers/statsmodels,astocko/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,bzero/statsmodels,jseabold/statsmodels,bert9bert/statsmodels,hlin117/statsmodels,saketkc/statsmodels,statsmodels/statsmodels,wdurhamh/statsmodels,edhuckle/statsmodels,wkfwkf/statsmodels,nvoron23/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,wdurhamh/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,wwf5067/statsmodels,bsipocz/statsmodels,nguyentu1602/statsmodels,kiyoto/statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,wkfwkf/statsmodels,ChadFulton/statsmodels,detrout/debian-statsmodels,hainm/statsmodels,wwf5067/statsmodels,phobson/statsmodels,wkfwkf/statsmodels,bert9bert/statsmodels,alekz112/statsmodels,nguyentu1602/statsmodels,hainm/statsmodels,bert9bert/statsmodels,yl565/statsmodels,nguyentu1602/statsmodels,Averroes/statsmodels,bzero/statsmodels,kiyoto/statsmodels,wwf5067/statsmodels,nvoron23/statsmodels,statsmodels/statsmodels,YihaoLu/statsmodels,bashtage/statsmodels,rgommers/statsmodels,bert9bert/statsmodels,bashtage/statsmodels,bert9bert/statsmodels,saketkc/statsmodels,DonBeo/statsmodels,musically-ut/statsmodels,Averroes/statsmodels,cbmoore/statsmodels,kiyoto/statsmodels,nvoron23/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,Averroes/statsmodels,astocko/statsmodels,huongttlan/statsmodels,wkfwkf/statsmodels,wdurhamh/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels,wdurhamh/statsmodels,josef-pkt/statsmodels,nvoron23/statsmodels,bzero/statsmodels,wzbozon/statsmodels,jstoxrocky/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,hlin117/statsmodels,alekz112/statsmodels,astocko/statsmodels,musically-ut/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,yl565/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,rgommers/statsmodels,astocko/statsmodels,bashtage/statsmodels,phobson/statsmodels,Averroes/statsmodels,huongttlan/statsmodels,yl565/statsmodels,jstoxrocky/statsmodels,wkfwkf/statsmodels,bsipocz/statsmodels,cbmoore/statsmodels,gef756/statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,DonBeo/statsmodels,DonBeo/statsmodels,bsipocz/statsmodels,kiyoto/statsmodels,wzbozon/statsmodels,detrout/debian-statsmodels,yl565/statsmodels,cbmoore/statsmodels,saketkc/statsmodels,waynenilsen/statsmodels,statsmodels/statsmodels
|
eb785ce7485c438cfcaf6bb48d8cf8a840970bd4
|
src/tenyksddate/main.py
|
src/tenyksddate/main.py
|
import datetime
from tenyksservice import TenyksService, run_service
from ddate.base import DDate
class DiscordianDate(TenyksService):
direct_only = True
irc_message_filters = {
'date': [r'^(?i)(ddate|discordian) (?P<month>(.*)) (?P<day>(.*)) (?P<year>(.*))'],
'today': [r'^(?i)(ddate|discordian)']
}
def __init__(self, *args, **kwargs):
super(DiscordianDate, self).__init__(*args, **kwargs)
def handle_today(self, data, match):
self.send(str(DDate()), data)
def handle_date(self, data, match):
year = int(match.groupdict()['year'])
month = int(match.groupdict()['month'])
day = int(match.groupdict()['day'])
self.send(str(DDate(datetime.date(year=year, month=month, day=day))), data)
def main():
run_service(DiscordianDate)
if __name__ == '__main__':
main()
|
import datetime
from tenyksservice import TenyksService, run_service
from ddate.base import DDate
class DiscordianDate(TenyksService):
direct_only = True
irc_message_filters = {
'date': [r'^(?i)(ddate|discordian) (?P<month>(.*)) (?P<day>(.*)) (?P<year>(.*))'],
'today': [r'^(?i)(ddate|discordian)']
}
def __init__(self, *args, **kwargs):
super(DiscordianDate, self).__init__(*args, **kwargs)
def handle_date(self, data, match):
year = int(match.groupdict()['year'])
month = int(match.groupdict()['month'])
day = int(match.groupdict()['day'])
self.send(str(DDate(datetime.date(year=year, month=month, day=day))), data)
def handle_today(self, data, match):
self.send(str(DDate()), data)
def main():
run_service(DiscordianDate)
if __name__ == '__main__':
main()
|
Change method order to match filters
|
Change method order to match filters
|
Python
|
mit
|
kyleterry/tenyks-contrib,cblgh/tenyks-contrib,colby/tenyks-contrib
|
68046b638b5d2a9d9a0c9c588a6c2b833442e01b
|
plinth/modules/ikiwiki/forms.py
|
plinth/modules/ikiwiki/forms.py
|
#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Forms for configuring ikiwiki
"""
from django import forms
from django.utils.translation import ugettext_lazy as _
class IkiwikiCreateForm(forms.Form):
"""Form to create a wiki or blog."""
site_type = forms.ChoiceField(
label=_('Type'),
choices=[('wiki', 'Wiki'), ('blog', 'Blog')])
name = forms.CharField(label=_('Name'))
admin_name = forms.CharField(label=_('Admin Account Name'))
admin_password = forms.CharField(
label=_('Admin Account Password'),
widget=forms.PasswordInput())
|
#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Forms for configuring ikiwiki
"""
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.core.validators import RegexValidator
class IkiwikiCreateForm(forms.Form):
"""Form to create a wiki or blog."""
site_type = forms.ChoiceField(
label=_('Type'),
choices=[('wiki', 'Wiki'), ('blog', 'Blog')])
name = forms.CharField(label=_('Name'),
validators=[RegexValidator(regex='^[a-zA-Z0-9]+$')])
admin_name = forms.CharField(label=_('Admin Account Name'))
admin_password = forms.CharField(
label=_('Admin Account Password'),
widget=forms.PasswordInput())
|
Allow only alphanumerics in wiki/blog name
|
ikiwiki: Allow only alphanumerics in wiki/blog name
|
Python
|
agpl-3.0
|
harry-7/Plinth,kkampardi/Plinth,freedomboxtwh/Plinth,vignanl/Plinth,kkampardi/Plinth,harry-7/Plinth,kkampardi/Plinth,vignanl/Plinth,vignanl/Plinth,vignanl/Plinth,kkampardi/Plinth,vignanl/Plinth,freedomboxtwh/Plinth,freedomboxtwh/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,harry-7/Plinth,kkampardi/Plinth
|
2aed2eb4a1db5fba9d161a679c147f2260fb0780
|
msg/serializers.py
|
msg/serializers.py
|
from django.contrib.auth.models import User, Group
from rest_framework import serializers
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'groups')
class GroupSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Group
fields = ('url', 'name')
from msg.models import Msg
class MsgSerializer( serializers.ModelSerializer ):
class Meta:
model = Msg
fields = ('frame_id','timestamp','source','channel','signature','body')
|
from django.contrib.auth.models import User, Group
from rest_framework import serializers
class UnixEpochDateField(serializers.DateTimeField):
def to_native(self, value):
""" Return epoch time for a datetime object or ``None``"""
import time
try:
return int(time.mktime(value.timetuple()))
except (AttributeError, TypeError):
return None
def from_native(self, value):
import datetime
return datetime.datetime.fromtimestamp(int(value))
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'groups')
class GroupSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Group
fields = ('url', 'name')
from msg.models import Msg
class MsgSerializer( serializers.ModelSerializer ):
epoch = = UnixEpochDateField(source='timestamp')
class Meta:
model = Msg
fields = ('frame_id','timestamp','source','channel','signature','body','epoch')
|
Add epoch conversion to timestamp
|
Add epoch conversion to timestamp
|
Python
|
mit
|
orisi/fastcast
|
65fcfbfae9ef1a68d324aea932f983f7edd00cdf
|
mopidy/__init__.py
|
mopidy/__init__.py
|
import logging
from mopidy import settings as raw_settings
logger = logging.getLogger('mopidy')
def get_version():
return u'0.1.dev'
def get_mpd_protocol_version():
return u'0.16.0'
def get_class(name):
module_name = name[:name.rindex('.')]
class_name = name[name.rindex('.') + 1:]
logger.info('Loading: %s from %s', class_name, module_name)
module = __import__(module_name, globals(), locals(), [class_name], -1)
class_object = getattr(module, class_name)
return class_object
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
|
import logging
from multiprocessing.reduction import reduce_connection
import pickle
from mopidy import settings as raw_settings
logger = logging.getLogger('mopidy')
def get_version():
return u'0.1.dev'
def get_mpd_protocol_version():
return u'0.16.0'
def get_class(name):
module_name = name[:name.rindex('.')]
class_name = name[name.rindex('.') + 1:]
logger.info('Loading: %s from %s', class_name, module_name)
module = __import__(module_name, globals(), locals(), [class_name], -1)
class_object = getattr(module, class_name)
return class_object
def pickle_connection(connection):
return pickle.dumps(reduce_connection(connection))
def unpickle_connection(pickled_connection):
# From http://stackoverflow.com/questions/1446004
unpickled = pickle.loads(pickled_connection)
func = unpickled[0]
args = unpickled[1]
return func(*args)
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
|
Add util functions for pickling and unpickling multiprocessing.Connection
|
Add util functions for pickling and unpickling multiprocessing.Connection
|
Python
|
apache-2.0
|
SuperStarPL/mopidy,pacificIT/mopidy,swak/mopidy,hkariti/mopidy,dbrgn/mopidy,jmarsik/mopidy,diandiankan/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,quartz55/mopidy,ali/mopidy,pacificIT/mopidy,adamcik/mopidy,rawdlite/mopidy,swak/mopidy,dbrgn/mopidy,jodal/mopidy,hkariti/mopidy,priestd09/mopidy,dbrgn/mopidy,jmarsik/mopidy,quartz55/mopidy,liamw9534/mopidy,mokieyue/mopidy,mokieyue/mopidy,tkem/mopidy,jcass77/mopidy,woutervanwijk/mopidy,dbrgn/mopidy,ali/mopidy,SuperStarPL/mopidy,woutervanwijk/mopidy,ali/mopidy,swak/mopidy,diandiankan/mopidy,ZenithDK/mopidy,abarisain/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,quartz55/mopidy,bacontext/mopidy,hkariti/mopidy,vrs01/mopidy,ZenithDK/mopidy,vrs01/mopidy,glogiotatidis/mopidy,abarisain/mopidy,jodal/mopidy,adamcik/mopidy,rawdlite/mopidy,vrs01/mopidy,adamcik/mopidy,jcass77/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,rawdlite/mopidy,tkem/mopidy,jcass77/mopidy,jmarsik/mopidy,mopidy/mopidy,priestd09/mopidy,ZenithDK/mopidy,ali/mopidy,kingosticks/mopidy,tkem/mopidy,bencevans/mopidy,priestd09/mopidy,swak/mopidy,mopidy/mopidy,bacontext/mopidy,bencevans/mopidy,diandiankan/mopidy,mopidy/mopidy,pacificIT/mopidy,liamw9534/mopidy,SuperStarPL/mopidy,tkem/mopidy,vrs01/mopidy,bacontext/mopidy,bencevans/mopidy,quartz55/mopidy,bencevans/mopidy,diandiankan/mopidy,bacontext/mopidy,mokieyue/mopidy,hkariti/mopidy,glogiotatidis/mopidy,rawdlite/mopidy,ZenithDK/mopidy,jodal/mopidy,kingosticks/mopidy
|
a8bd6e86583b72211f028ecb51df2ee27550b258
|
submit.py
|
submit.py
|
import json
import requests
import argparse
parser = argparse.ArgumentParser(
description="Upload submission from submit.cancergenetrust.org")
parser.add_argument('file', nargs='?', default="submission.json",
help="Path to json file to submit")
args = parser.parse_args()
with open(args.file) as f:
submission = json.loads(f.read())
submission["clinical"]["CGT Public ID"] = submission["patientId"]
r = requests.post("http://localhost:5000/v0/submissions?publish=true",
files=[("files[]",
("foundationone.json",
json.dumps(submission["genomic"], sort_keys=True)))],
data=submission["clinical"])
print(r.text)
assert(r.status_code == requests.codes.ok)
|
import json
import requests
import argparse
parser = argparse.ArgumentParser(
description="Upload submission from submit.cancergenetrust.org")
parser.add_argument('file', nargs='?', default="submission.json",
help="Path to json file to submit")
args = parser.parse_args()
with open(args.file) as f:
submission = json.loads(f.read())
submission["clinical"]["CGT Public ID"] = submission["patientId"]
if submission["genomic"]:
print("Submitting clinical and genomic data")
r = requests.post("http://localhost:5000/v0/submissions?publish=true",
files=[("files[]",
("foundationone.json",
json.dumps(submission["genomic"], sort_keys=True)))],
data=submission["clinical"])
else:
print("No genomic data, submitting only clinical")
r = requests.post("http://localhost:5000/v0/submissions?publish=true",
data=submission["clinical"])
print(r.text)
assert(r.status_code == requests.codes.ok)
|
Handle only clinical, no genomic, submission
|
Handle only clinical, no genomic, submission
|
Python
|
apache-2.0
|
ga4gh/CGT,ga4gh/CGT,ga4gh/CGT
|
81904effd492e2b2cea64dc98b29033261ae8b62
|
tests/generator_test.py
|
tests/generator_test.py
|
from fixture import GeneratorTest
from google.appengine.ext import testbed, ndb
class GeneratorTest(GeneratorTest):
def testLotsaModelsGenerated(self):
for klass in self.klasses:
k = klass._get_kind()
assert ndb.Model._lookup_model(k) == klass, klass
|
from fixture import GeneratorTest
from google.appengine.ext import testbed, ndb
class GeneratorTest(GeneratorTest):
def testLotsaModelsGenerated(self):
for klass in self.klasses:
k = klass._get_kind()
assert ndb.Model._lookup_model(k) == klass, klass
assert len(self.klasses) > 100
|
Check that we are creating Test Classes
|
Check that we are creating Test Classes
|
Python
|
mit
|
talkiq/gaend,samedhi/gaend,talkiq/gaend,samedhi/gaend
|
bc36a19d3bb1c07cbe2a44de88f227ef71c50b8c
|
notebooks/utils.py
|
notebooks/utils.py
|
def print_generated_sequence(g, num, *, sep=", "):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `g`.
"""
elems = [str(next(g)) for _ in range(num)]
sep_initial = "\n" if sep == "\n" else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
|
def print_generated_sequence(g, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `g`.
"""
if seed:
g.reset(seed)
elems = [str(next(g)) for _ in range(num)]
sep_initial = "\n" if sep == "\n" else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
|
Allow passing seed directly to helper function
|
Allow passing seed directly to helper function
|
Python
|
mit
|
maxalbert/tohu
|
44223235e5b8b0c49df564ae190927905de1f9a4
|
plenario/worker.py
|
plenario/worker.py
|
from datetime import datetime
from flask import Flask
import plenario.tasks as tasks
def create_worker():
app = Flask(__name__)
app.config.from_object('plenario.settings')
app.url_map.strict_slashes = False
@app.route('/update/weather', methods=['POST'])
def weather():
return tasks.update_weather.delay().id
@app.route('/update/often', methods=['POST'])
def metar():
return tasks.update_metar.delay().id
@app.route('/update/<frequency>', methods=['POST'])
def update(frequency):
return tasks.frequency_update.delay(frequency).id
@app.route('/archive', methods=['POST'])
def archive():
return tasks.archive.delay(datetime.now()).id
@app.route('/resolve', methods=['POST'])
def resolve():
return tasks.resolve.delay().id
@app.route('/health', methods=['GET', 'POST'])
def check_health():
return tasks.health.delay().id
return app
|
import os
from datetime import datetime
from flask import Flask
import plenario.tasks as tasks
def create_worker():
app = Flask(__name__)
app.config.from_object('plenario.settings')
app.url_map.strict_slashes = False
@app.route('/update/weather', methods=['POST'])
def weather():
return tasks.update_weather.delay().id
@app.route('/update/often', methods=['POST'])
def metar():
return tasks.update_metar.delay().id
@app.route('/update/<frequency>', methods=['POST'])
def update(frequency):
return tasks.frequency_update.delay(frequency).id
@app.route('/archive', methods=['POST'])
def archive():
return tasks.archive.delay(datetime.now()).id
@app.route('/resolve', methods=['POST'])
def resolve():
if not os.environ.get('PRIVATE'):
return 'hullo'
return tasks.resolve.delay().id
@app.route('/health', methods=['GET', 'POST'])
def check_health():
return tasks.health.delay().id
return app
|
Add temporary check to block production resolve
|
Add temporary check to block production resolve
|
Python
|
mit
|
UrbanCCD-UChicago/plenario,UrbanCCD-UChicago/plenario,UrbanCCD-UChicago/plenario
|
2ec93f385e9eea63d42e17a2a777b459edf93816
|
tools/debug_adapter.py
|
tools/debug_adapter.py
|
#!/usr/bin/python
import sys
if 'darwin' in sys.platform:
sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
sys.path.append('.')
import adapter
adapter.main.run_tcp_server(multiple=False)
|
#!/usr/bin/python
import sys
if 'darwin' in sys.platform:
sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
sys.path.append('.')
import adapter
adapter.main.run_tcp_server()
|
Update code for changed function.
|
Update code for changed function.
|
Python
|
mit
|
vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb
|
143b74a2c6f99d2d92ac85310351327ffb630c1e
|
uscampgrounds/admin.py
|
uscampgrounds/admin.py
|
from django.contrib.gis import admin
from uscampgrounds.models import *
class CampgroundAdmin(admin.OSMGeoAdmin):
list_display = ('name', 'campground_code', 'campground_type', 'phone', 'sites', 'elevation', 'hookups', 'amenities')
list_filter = ('campground_type',)
admin.site.register(Campground, CampgroundAdmin)
|
from django.contrib.gis import admin
from uscampgrounds.models import *
class CampgroundAdmin(admin.OSMGeoAdmin):
list_display = ('name', 'campground_code', 'campground_type', 'phone', 'sites', 'elevation', 'hookups', 'amenities')
list_filter = ('campground_type',)
search_fields = ('name',)
admin.site.register(Campground, CampgroundAdmin)
|
Allow searching campgrounds by name for convenience.
|
Allow searching campgrounds by name for convenience.
|
Python
|
bsd-3-clause
|
adamfast/geodjango-uscampgrounds,adamfast/geodjango-uscampgrounds
|
d9024e4db0489b141fec9b96913c94a5d583f086
|
backend/scripts/mktemplate.py
|
backend/scripts/mktemplate.py
|
#!/usr/bin/env python
import json
import rethinkdb as r
import sys
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
parser.add_option("-f", "--file", dest="filename",
help="json file", type="string")
(options, args) = parser.parse_args()
if options.filename is None:
print "You must specify json file"
sys.exit(1)
conn = r.connect('localhost', int(options.port), db='materialscommons')
json_data = open(options.filename)
data = json.load(json_data)
existing = r.table('templates').get(data['id']).run(conn)
if existing:
r.table('templates').get(data['id']).delete().run(conn)
r.table('templates').insert(data).run(conn)
print 'template deleted and re-inserted into the database'
else:
r.table('templates').insert(data).run(conn)
print 'template inserted into the database'
|
#!/usr/bin/env python
import json
import rethinkdb as r
import sys
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
parser.add_option("-f", "--file", dest="filename",
help="json file", type="string")
(options, args) = parser.parse_args()
if options.filename is None:
print "You must specify json file"
sys.exit(1)
conn = r.connect('localhost', int(options.port), db='materialscommons')
json_data = open(options.filename)
print "Loading template file: %s" % (options.filename)
data = json.load(json_data)
existing = r.table('templates').get(data['id']).run(conn)
if existing:
r.table('templates').get(data['id']).delete().run(conn)
r.table('templates').insert(data).run(conn)
print 'template deleted and re-inserted into the database'
else:
r.table('templates').insert(data).run(conn)
print 'template inserted into the database'
|
Update script to show which file it is loading.
|
Update script to show which file it is loading.
|
Python
|
mit
|
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
|
8e8d80e744c99ab1c5552057899bf5470d751a29
|
linked_list.py
|
linked_list.py
|
#!/usr/bin/env python
from __future__ import print_function
class Node(object):
def __init__(self, value):
self._val = value
self._next = None
@property
def next(self):
return self._next
@next.setter
def next(self, value):
self._next = value
@property
def val(self):
return self._val
class LinkedList(object):
def __init__(self):
self._head = None
self.second = None
def insert(self, val):
self.second, self._head = self._head, Node(val)
self._head.next = self.second
def pop(self):
self._head = self._head.next
@property
def head(self):
return self._head
l = LinkedList()
l.insert('Nick')
l.insert('Constantine')
l.insert('Mark')
print(l.head.val)
print(l.head.next.val)
l.pop()
print(l.head.val)
print(l.head.next.val)
|
#!/usr/bin/env python
from __future__ import print_function
class Node(object):
def __init__(self, value):
self._val = value
self._next = None
@property
def next(self):
return self._next
@next.setter
def next(self, value):
self._next = value
@property
def val(self):
return self._val
class LinkedList(object):
def __init__(self):
self._head = None
self.second = None
def insert(self, val):
self.second, self._head = self._head, Node(val)
self._head.next = self.second
def pop(self):
self._head = self._head.next
def size(self):
if not self._head:
return 0
else:
i = 0
z = 1
try:
a = self._head.next
except AttributeError:
return i
while z != 0:
try:
a = a.next
except AttributeError:
z = 0
i += 1
return i
@property
def head(self):
return self._head
l = LinkedList()
l.insert('Nick')
print(l.size())
|
Add semi-working size() function v1
|
Nick: Add semi-working size() function v1
|
Python
|
mit
|
constanthatz/data-structures
|
64ae41be94374b0dae33d37ea1e2f20b233dd809
|
moocng/peerreview/managers.py
|
moocng/peerreview/managers.py
|
# Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(
kq__unit__course=course).order_by('kq__unit__order')
|
# Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(
kq__unit__course=course).order_by(
'kq__unit__order', 'kq__order')
|
Sort by kq too when returning peer review assignments
|
Sort by kq too when returning peer review assignments
|
Python
|
apache-2.0
|
OpenMOOC/moocng,OpenMOOC/moocng,GeographicaGS/moocng,GeographicaGS/moocng,GeographicaGS/moocng,GeographicaGS/moocng
|
3dd5cd27963a0cfeb446a36fcd50c05e7c715eb3
|
cyder/api/v1/endpoints/api.py
|
cyder/api/v1/endpoints/api.py
|
from django.utils.decorators import classonlymethod
from django.views.decorators.csrf import csrf_exempt
from rest_framework import serializers, viewsets
NestedAVFields = ['id', 'attribute', 'value']
class CommonAPISerializer(serializers.ModelSerializer):
pass
class CommonAPINestedAVSerializer(serializers.ModelSerializer):
attribute = serializers.SlugRelatedField(slug_field='name')
class CommonAPIMeta:
pass
class CommonAPIViewSet(viewsets.ModelViewSet):
def __init__(self, *args, **kwargs):
self.queryset = self.model.objects.all()
super(CommonAPIViewSet, self).__init__(*args, **kwargs)
#@classonlymethod
#@csrf_exempt
#def as_view(cls, *args, **kwargs):
# super(CommonAPIViewSet, cls).as_view(*args, **kwargs)
|
from rest_framework import serializers, viewsets
NestedAVFields = ['id', 'attribute', 'value']
class CommonAPISerializer(serializers.ModelSerializer):
pass
class CommonAPINestedAVSerializer(serializers.ModelSerializer):
attribute = serializers.SlugRelatedField(slug_field='name')
class CommonAPIMeta:
pass
class CommonAPIViewSet(viewsets.ModelViewSet):
def __init__(self, *args, **kwargs):
self.queryset = self.model.objects.all()
super(CommonAPIViewSet, self).__init__(*args, **kwargs)
|
Fix earlier folly (commented and useless code)
|
Fix earlier folly (commented and useless code)
|
Python
|
bsd-3-clause
|
akeym/cyder,drkitty/cyder,OSU-Net/cyder,akeym/cyder,akeym/cyder,murrown/cyder,murrown/cyder,murrown/cyder,OSU-Net/cyder,OSU-Net/cyder,murrown/cyder,drkitty/cyder,OSU-Net/cyder,zeeman/cyder,zeeman/cyder,drkitty/cyder,zeeman/cyder,drkitty/cyder,akeym/cyder,zeeman/cyder
|
fd5cad381e8b821bfabbefc9deb4b8a4531844f6
|
rnacentral_pipeline/rnacentral/notify/slack.py
|
rnacentral_pipeline/rnacentral/notify/slack.py
|
"""
Send a notification to slack.
NB: The webhook should be configured in the nextflow profile
"""
import os
import requests
def send_notification(title, message, plain=False):
"""
Send a notification to the configured slack webhook.
"""
SLACK_WEBHOOK = os.getenv('SLACK_WEBHOOK')
if SLACK_WEBHOOK is None:
raise SystemExit("SLACK_WEBHOOK environment variable not defined")
if plain:
slack_json = {
"text" : title + ': ' + message
}
else:
slack_json = {
"text" : title,
"blocks" : [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message
},
},
]
}
try:
response = requests.post(SLACK_WEBHOOK,
json=slack_json,
headers={'Content-Type':'application/json'}
)
response.raise_for_status()
except Exception as request_exception:
raise SystemExit from request_exception
|
"""
Send a notification to slack.
NB: The webhook should be configured in the nextflow profile
"""
import os
import requests
def send_notification(title, message, plain=False):
"""
Send a notification to the configured slack webhook.
"""
SLACK_WEBHOOK = os.getenv('SLACK_WEBHOOK')
if SLACK_WEBHOOK is None:
try:
from rnacentral_pipeline.secrets import SLACK_WEBHOOK
except:
raise SystemExit("SLACK_WEBHOOK environment variable not defined, and couldn't find a secrets file")
if plain:
slack_json = {
"text" : title + ': ' + message
}
else:
slack_json = {
"text" : title,
"blocks" : [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message
},
},
]
}
try:
response = requests.post(SLACK_WEBHOOK,
json=slack_json,
headers={'Content-Type':'application/json'}
)
response.raise_for_status()
except Exception as request_exception:
raise SystemExit from request_exception
|
Add a secrets file in rnac notify
|
Add a secrets file in rnac notify
Nextflow doesn't propagate environment variables from the profile into
the event handler closures. This is the simplest workaround for that.
secrets.py should be on the cluster and symlinked into
rnacentral_pipeline
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
5df350254e966007f80f7a14fde29a8c93316bb3
|
tests/rules/test_git_push.py
|
tests/rules/test_git_push.py
|
import pytest
from thefuck.rules.git_push import match, get_new_command
from tests.utils import Command
@pytest.fixture
def stderr():
return '''fatal: The current branch master has no upstream branch.
To push the current branch and set the remote as upstream, use
git push --set-upstream origin master
'''
def test_match(stderr):
assert match(Command('git push', stderr=stderr))
assert match(Command('git push master', stderr=stderr))
assert not match(Command('git push master'))
assert not match(Command('ls', stderr=stderr))
def test_get_new_command(stderr):
assert get_new_command(Command('git push', stderr=stderr))\
== "git push --set-upstream origin master"
|
import pytest
from thefuck.rules.git_push import match, get_new_command
from tests.utils import Command
@pytest.fixture
def stderr():
return '''fatal: The current branch master has no upstream branch.
To push the current branch and set the remote as upstream, use
git push --set-upstream origin master
'''
def test_match(stderr):
assert match(Command('git push', stderr=stderr))
assert match(Command('git push master', stderr=stderr))
assert not match(Command('git push master'))
assert not match(Command('ls', stderr=stderr))
def test_get_new_command(stderr):
assert get_new_command(Command('git push', stderr=stderr))\
== "git push --set-upstream origin master"
assert get_new_command(Command('git push --quiet', stderr=stderr))\
== "git push --set-upstream origin master --quiet"
|
Check arguments are preserved in git_push
|
Check arguments are preserved in git_push
|
Python
|
mit
|
scorphus/thefuck,mlk/thefuck,Clpsplug/thefuck,SimenB/thefuck,nvbn/thefuck,Clpsplug/thefuck,SimenB/thefuck,mlk/thefuck,nvbn/thefuck,scorphus/thefuck
|
c09a8ce5bb47db4ea4381925ec07199415ae5c39
|
spacy/tests/integration/test_load_languages.py
|
spacy/tests/integration/test_load_languages.py
|
# encoding: utf8
from __future__ import unicode_literals
from ...fr import French
def test_load_french():
nlp = French()
doc = nlp(u'Parlez-vous français?')
|
# encoding: utf8
from __future__ import unicode_literals
from ...fr import French
def test_load_french():
nlp = French()
doc = nlp(u'Parlez-vous français?')
assert doc[0].text == u'Parlez'
assert doc[1].text == u'-'
assert doc[2].text == u'vouz'
assert doc[3].text == u'français'
assert doc[4].text == u'?'
|
Add test for french tokenizer
|
Add test for french tokenizer
|
Python
|
mit
|
raphael0202/spaCy,recognai/spaCy,raphael0202/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,raphael0202/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,banglakit/spaCy,recognai/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,recognai/spaCy,banglakit/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,explosion/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,honnibal/spaCy,aikramer2/spaCy,explosion/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,banglakit/spaCy,recognai/spaCy,explosion/spaCy,honnibal/spaCy,explosion/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy
|
bd89dc8f6812ff824417875c9375499f331bf5e4
|
scripts/maf_limit_to_species.py
|
scripts/maf_limit_to_species.py
|
#!/usr/bin/env python2.3
"""
Read a maf file from stdin and write out a new maf with only blocks having all
of the required in species, after dropping any other species and removing
columns containing only gaps.
usage: %prog species,species2,... < maf
"""
import psyco_full
import bx.align.maf
import copy
import sys
from itertools import *
def main():
species = sys.argv[1].split( ',' )
maf_reader = bx.align.maf.Reader( sys.stdin )
maf_writer = bx.align.maf.Writer( sys.stdout )
for m in maf_reader:
new_components = []
for comp in m.components:
if comp.src.split( '.' )[0] in species:
new_components.append( comp )
m.components = new_components
if len( m.components ) > 1:
maf_writer.write( m )
maf_reader.close()
maf_writer.close()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python2.3
"""
Read a maf file from stdin and write out a new maf with only blocks having all
of the required in species, after dropping any other species and removing
columns containing only gaps.
usage: %prog species,species2,... < maf
"""
import psyco_full
import bx.align.maf
import copy
import sys
from itertools import *
def main():
species = sys.argv[1].split( ',' )
maf_reader = bx.align.maf.Reader( sys.stdin )
maf_writer = bx.align.maf.Writer( sys.stdout )
for m in maf_reader:
new_components = []
for comp in m.components:
if comp.src.split( '.' )[0] in species:
new_components.append( comp )
m.components = new_components
m.remove_all_gap_columns()
if len( m.components ) > 1:
maf_writer.write( m )
maf_reader.close()
maf_writer.close()
if __name__ == "__main__":
main()
|
Remove all-gap columns after removing rows of the alignment
|
Remove all-gap columns after removing rows of the alignment
|
Python
|
mit
|
uhjish/bx-python,uhjish/bx-python,uhjish/bx-python
|
b718c1d817e767c336654001f3aaea5d7327625a
|
wsgi_intercept/requests_intercept.py
|
wsgi_intercept/requests_intercept.py
|
"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
|
"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
import sys
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
def __init__(self, *args, **kwargs):
if 'strict' in kwargs and sys.version_info > (3, 0):
kwargs.pop('strict')
WSGI_HTTPConnection.__init__(self, *args, **kwargs)
HTTPConnection.__init__(self, *args, **kwargs)
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
def __init__(self, *args, **kwargs):
if 'strict' in kwargs and sys.version_info > (3, 0):
kwargs.pop('strict')
WSGI_HTTPSConnection.__init__(self, *args, **kwargs)
HTTPSConnection.__init__(self, *args, **kwargs)
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
|
Deal with request's urllib3 being annoying about 'strict'
|
Deal with request's urllib3 being annoying about 'strict'
These changes are required to get tests to pass in python3.4 (and
presumably others).
This is entirely code from @sashahart, who had done the work earlier
to deal with with some Debian related issues uncovered by @thomasgoirand.
These changes will probably mean the debian packages will need to be
updated when the next version is released.
|
Python
|
mit
|
sileht/python3-wsgi-intercept,cdent/wsgi-intercept
|
2843052a222541e3b7ce45fa633f5df61b10a809
|
test/oracle.py
|
test/oracle.py
|
import qnd
import tensorflow as tf
def model_fn(x, y):
return (y,
0.0,
tf.contrib.framework.get_or_create_global_step().assign_add())
def input_fn(q):
shape = (100,)
return tf.zeros(shape, tf.float32), tf.ones(shape, tf.int32)
train_and_evaluate = qnd.def_train_and_evaluate()
def main():
train_and_evaluate(model_fn, input_fn)
if __name__ == "__main__":
main()
|
import qnd
import tensorflow as tf
def model_fn(x, y):
return (y,
0.0,
tf.contrib.framework.get_or_create_global_step().assign_add())
def input_fn(q):
shape = (100,)
return tf.zeros(shape, tf.float32), tf.ones(shape, tf.int32)
train_and_evaluate = qnd.def_train_and_evaluate(distributed=True)
def main():
train_and_evaluate(model_fn, input_fn)
if __name__ == "__main__":
main()
|
Use distributed flag for xfail test
|
Use distributed flag for xfail test
|
Python
|
unlicense
|
raviqqe/tensorflow-qnd,raviqqe/tensorflow-qnd
|
bf7b8df92fb1cc16fccefe201eefc0ed853eac5d
|
server/api/serializers/rides.py
|
server/api/serializers/rides.py
|
import requests
from django.conf import settings
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from server.api.serializers.chapters import ChapterSerializer
from .riders import RiderSerializer
from server.core.models.rides import Ride, RideRiders
class RideSerializer(serializers.ModelSerializer):
chapter = ChapterSerializer()
riders = RiderSerializer(source='registered_riders', many=True, read_only=True)
class Meta:
model = Ride
fields = ('id', 'name', 'slug', 'strapline', 'description_html', 'start_location', 'end_location', 'start_date', 'end_date',
'chapter', 'rider_capacity', 'riders', 'spaces_left', 'price', 'full_cost', 'currency', 'is_over',
'fundraising_total', 'fundraising_target')
class RideRiderSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True, default=serializers.CurrentUserDefault())
class Meta:
model = RideRiders
fields = ('id', 'ride', 'user', 'signup_date', 'signup_expires', 'status', 'paid', 'expired', 'payload')
validators = [
UniqueTogetherValidator(
queryset=RideRiders.objects.all(),
fields=('user', 'ride'),
message='You have already registered for this ride.'
)
]
|
import requests
from django.conf import settings
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from server.api.serializers.chapters import ChapterSerializer
from .riders import RiderSerializer
from server.core.models.rides import Ride, RideRiders
class RideSerializer(serializers.ModelSerializer):
chapter = ChapterSerializer()
riders = RiderSerializer(source='registered_riders', many=True, read_only=True)
class Meta:
model = Ride
fields = ('id', 'name', 'slug', 'strapline', 'description_html', 'start_location', 'end_location', 'start_date', 'end_date',
'chapter', 'rider_capacity', 'riders', 'spaces_left', 'price', 'full_cost', 'currency', 'is_over',
'fundraising_total', 'fundraising_target')
class RideRiderSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True, default=serializers.CurrentUserDefault())
signup_date = serializers.DateTimeField(required=False)
class Meta:
model = RideRiders
fields = ('id', 'ride', 'user', 'signup_date', 'signup_expires', 'status', 'paid', 'expired', 'payload')
validators = [
UniqueTogetherValidator(
queryset=RideRiders.objects.all(),
fields=('user', 'ride'),
message='You have already registered for this ride.'
)
]
|
Make sure that the registration serialiser doesn't require the signup date.
|
Make sure that the registration serialiser doesn't require the signup date.
Signed-off-by: Michael Willmott <4063ad43ea4e0ae77bf35022808393a246bdfa61@gmail.com>
|
Python
|
mit
|
Techbikers/techbikers,Techbikers/techbikers,mwillmott/techbikers,mwillmott/techbikers,mwillmott/techbikers,Techbikers/techbikers,mwillmott/techbikers,Techbikers/techbikers
|
788dd6f62899fb16aa983c17bc1a5e6eea5317b0
|
FunctionHandler.py
|
FunctionHandler.py
|
import os, sys
from glob import glob
import GlobalVars
def LoadFunction(path, loadAs=''):
loadType = 'l'
name = path
src = __import__('Functions.' + name, globals(), locals(), [])
if loadAs != '':
name = loadAs
if name in GlobalVars.functions:
loadType = 'rel'
del sys.modules['Functions.'+name]
for f in glob ('Functions/%s.pyc' % name):
os.remove(f)
reload(src)
components = name.split('.')
for comp in components[:1]:
src = getattr(src, comp)
ModuleName = str(src).split("from")[0].strip("(").rstrip(" ")
if loadType != 'rel':
print ModuleName + " loaded."
func = src.Instantiate()
GlobalVars.functions.update({name:func})
return loadType
def UnloadFunction(name):
success = True
if name in GlobalVars.functions.keys():
del GlobalVars.functions[name]
else:
success = False
return success
def AutoLoadFunctions():
root = os.path.join('.', 'Functions')
for item in os.listdir(root):
if not os.path.isfile(os.path.join(root, item)):
continue
if not item.endswith('.py'):
continue
try:
if item[:-3] not in GlobalVars.nonDefaultModules:
LoadFunction(item[:-3])
except Exception, x:
print x.args
|
import os, sys
from glob import glob
import GlobalVars
def LoadFunction(path, loadAs=''):
loadType = 'l'
name = path
src = __import__('Functions.' + name, globals(), locals(), [])
if loadAs != '':
name = loadAs
if name in GlobalVars.functions:
loadType = 'rel'
del sys.modules['Functions.'+name]
for f in glob ('Functions/%s.pyc' % name):
os.remove(f)
reload(src)
components = name.split('.')
for comp in components[:1]:
src = getattr(src, comp)
ModuleName = str(src).split("from")[0].strip("(").rstrip(" ")
if loadType != 'rel':
print ModuleName + " loaded."
else:
print ModuleName + " reloaded."
func = src.Instantiate()
GlobalVars.functions.update({name:func})
return loadType
def UnloadFunction(name):
success = True
if name in GlobalVars.functions.keys():
del GlobalVars.functions[name]
else:
success = False
return success
def AutoLoadFunctions():
root = os.path.join('.', 'Functions')
for item in os.listdir(root):
if not os.path.isfile(os.path.join(root, item)):
continue
if not item.endswith('.py'):
continue
try:
if item[:-3] not in GlobalVars.nonDefaultModules:
LoadFunction(item[:-3])
except Exception, x:
print x.args
|
Clean up debug printing further
|
Clean up debug printing further
|
Python
|
mit
|
HubbeKing/Hubbot_Twisted
|
0bb777c0c77e5b7cac8d48f79f78d3a7cf944943
|
backend/uclapi/uclapi/utils.py
|
backend/uclapi/uclapi/utils.py
|
def strtobool(x):
return x.lower() in ("true", "yes", "1", "y")
|
def strtobool(x):
try:
b = x.lower() in ("true", "yes", "1", "y")
return b
except AttributeError:
return False
except NameError
return False
|
Add some failsafes to strtobool
|
Add some failsafes to strtobool
|
Python
|
mit
|
uclapi/uclapi,uclapi/uclapi,uclapi/uclapi,uclapi/uclapi
|
1f914a04adb4ad7d39ca7104e2ea36acc76b18bd
|
pvextractor/tests/test_gui.py
|
pvextractor/tests/test_gui.py
|
import numpy as np
from numpy.testing import assert_allclose
import pytest
from astropy.io import fits
from ..pvextractor import extract_pv_slice
from ..geometry.path import Path
from ..gui import PVSlicer
from .test_slicer import make_test_hdu
try:
import PyQt5
PYQT5OK = True
except ImportError:
PYQT5OK = False
import matplotlib as mpl
if mpl.__version__[0] == '2':
MPLOK = False
else:
MPLOK = True
@pytest.mark.skipif('not PYQT5OK or not MPLOK')
def test_gui():
hdu = make_test_hdu()
pv = PVSlicer(hdu, clim=(-0.02, 2))
pv.show(block=False)
x = [100,200,220,330,340]
y = [100,200,300,420,430]
for i in range(len(x)):
pv.fig.canvas.motion_notify_event(x[i],y[i])
pv.fig.canvas.button_press_event(x[i],y[i],1)
pv.fig.canvas.key_press_event('enter')
pv.fig.canvas.motion_notify_event(310,420)
pv.fig.canvas.button_press_event(410,420,1)
pv.fig.canvas.draw()
assert pv.pv_slice.data.shape == (5,2)
|
import pytest
from distutils.version import LooseVersion
import matplotlib as mpl
from ..gui import PVSlicer
from .test_slicer import make_test_hdu
try:
import PyQt5
PYQT5OK = True
except ImportError:
PYQT5OK = False
if LooseVersion(mpl.__version__) < LooseVersion('2'):
MPLOK = True
else:
MPLOK = False
@pytest.mark.skipif('not PYQT5OK or not MPLOK')
def test_gui():
hdu = make_test_hdu()
pv = PVSlicer(hdu, clim=(-0.02, 2))
pv.show(block=False)
x = [100, 200, 220, 330, 340]
y = [100, 200, 300, 420, 430]
for i in range(len(x)):
pv.fig.canvas.motion_notify_event(x[i], y[i])
pv.fig.canvas.button_press_event(x[i], y[i], 1)
pv.fig.canvas.key_press_event('enter')
pv.fig.canvas.motion_notify_event(310, 420)
pv.fig.canvas.button_press_event(410, 420, 1)
pv.fig.canvas.draw()
assert pv.pv_slice.data.shape == (5, 2)
|
Use LooseVersion to compare version numbers
|
Use LooseVersion to compare version numbers
|
Python
|
bsd-3-clause
|
radio-astro-tools/pvextractor,keflavich/pvextractor
|
0cc4e839d5d7725aba289047cefe77cd89d24593
|
auth_mac/models.py
|
auth_mac/models.py
|
from django.db import models
from django.contrib.auth.models import User
import datetime
def default_expiry_time():
return datetime.datetime.now() + datetime.timedelta(days=1)
def random_string():
return User.objects.make_random_password(16)
class Credentials(models.Model):
"Keeps track of issued MAC credentials"
user = models.ForeignKey(User)
expiry = models.DateTimeField("Expires On", default=default_expiry_time)
identifier = models.CharField("MAC Key Identifier", max_length=16, default=random_string)
key = models.CharField("MAC Key", max_length=16, default=random_string)
def __unicode__(self):
return u"{0}:{1}".format(self.identifier, self.key)
class Nonce(models.Model):
"""Keeps track of any NONCE combinations that we have used"""
nonce = models.CharField("NONCE", max_length=16, null=True, blank=True)
timestamp = models.DateTimeField("Timestamp", auto_now_add=True)
credentials = models.ForeignKey(Credentials)
|
from django.db import models
from django.contrib.auth.models import User
import datetime
def default_expiry_time():
return datetime.datetime.now() + datetime.timedelta(days=1)
def random_string():
return User.objects.make_random_password(16)
class Credentials(models.Model):
"Keeps track of issued MAC credentials"
user = models.ForeignKey(User)
expiry = models.DateTimeField("Expires On", default=default_expiry_time)
identifier = models.CharField("MAC Key Identifier", max_length=16, default=random_string)
key = models.CharField("MAC Key", max_length=16, default=random_string)
def __unicode__(self):
return u"{0}:{1}".format(self.identifier, self.key)
@property
def expired(self):
"""Returns whether or not the credentials have expired"""
if self.expiry < datetime.datetime.now():
return True
return False
class Nonce(models.Model):
"""Keeps track of any NONCE combinations that we have used"""
nonce = models.CharField("NONCE", max_length=16, null=True, blank=True)
timestamp = models.DateTimeField("Timestamp", auto_now_add=True)
credentials = models.ForeignKey(Credentials)
|
Add a model property to tell if credentials have expired
|
Add a model property to tell if credentials have expired
|
Python
|
mit
|
ndevenish/auth_mac
|
87c861f6ed0e73e21983edc3add35954b9f0def5
|
apps/configuration/fields.py
|
apps/configuration/fields.py
|
import unicodedata
from django.forms import fields
class XMLCompatCharField(fields.CharField):
"""
Strip 'control characters', as XML 1.0 does not allow them and the API may
return data in XML.
"""
def to_python(self, value):
value = super().to_python(value=value)
return self.remove_control_characters(value)
@staticmethod
def remove_control_characters(str):
return "".join(ch for ch in str if unicodedata.category(ch)[0] != "C")
|
import unicodedata
from django.forms import fields
class XMLCompatCharField(fields.CharField):
"""
Strip 'control characters', as XML 1.0 does not allow them and the API may
return data in XML.
"""
def to_python(self, value):
value = super().to_python(value=value)
return self.remove_control_characters(value)
@staticmethod
def remove_control_characters(input):
valid_chars = ['\n', '\r']
return "".join(ch for ch in input if
unicodedata.category(ch)[0] != "C" or ch in valid_chars)
|
Allow linebreaks textareas (should be valid in XML)
|
Allow linebreaks textareas (should be valid in XML)
|
Python
|
apache-2.0
|
CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat
|
b61679efce39841120fcdb921acefbc729f4c4fd
|
tests/test_kmeans.py
|
tests/test_kmeans.py
|
import numpy as np
import milk.unsupervised
def test_kmeans():
features = np.r_[np.random.rand(20,3)-.5,.5+np.random.rand(20,3)]
centroids, _ = milk.unsupervised.kmeans(features,2)
positions = [0]*20 + [1]*20
correct = (centroids == positions).sum()
assert correct >= 38 or correct <= 2
|
import numpy as np
import milk.unsupervised
def test_kmeans():
np.random.seed(132)
features = np.r_[np.random.rand(20,3)-.5,.5+np.random.rand(20,3)]
centroids, _ = milk.unsupervised.kmeans(features,2)
positions = [0]*20 + [1]*20
correct = (centroids == positions).sum()
assert correct >= 38 or correct <= 2
def test_kmeans_centroids():
np.random.seed(132)
features = np.random.rand(201,30)
for k in [2,3,5,10]:
indices,centroids = milk.unsupervised.kmeans(features, k)
for i in xrange(k):
assert np.allclose(centroids[i], features[indices == i].mean(0))
|
Make sure results make sense
|
Make sure results make sense
|
Python
|
mit
|
luispedro/milk,pombredanne/milk,luispedro/milk,pombredanne/milk,luispedro/milk,pombredanne/milk
|
e676877492057d7b370431f6896154702c8459f1
|
webshack/auto_inject.py
|
webshack/auto_inject.py
|
from urllib.parse import urljoin
from urllib.request import urlopen
from urllib.error import URLError
import sys
GITHUB_USERS = [('Polymer', '0.5.2')]
def resolve_missing_user(user, branch, package):
assets = ["{}.html".format(package),
"{}.css".format(package),
"{}.js".format(package)]
base_url = "https://raw.githubusercontent.com/{user}/{package}/{branch}/".format(**locals())
matched_assets = []
for asset in assets:
asset_url = urljoin(base_url, asset)
try:
with urlopen(asset_url):
pass
matched_assets.append(asset)
except URLError:
pass
if matched_assets:
print(" Matched.")
data = {'base': base_url, 'assets': {a: a for a in matched_assets}}
print('---')
print('{}:'.format(package))
print(' base: {}'.format(base_url))
print(' assets:')
for asset in matched_assets:
print(' {0}: {0}'.format(asset))
print('---')
return True
return False
def resolve_missing(package):
print('Trying to resolve missing package from GitHub repositories...')
for user, branch in GITHUB_USERS:
print(' {}...'.format(user))
if resolve_missing_user(user, branch, package):
return
|
from urllib.parse import urljoin
from urllib.request import urlopen
from urllib.error import URLError
import sys
ENORMOUS_INJECTION_HACK = False
GITHUB_USERS = [('Polymer', '0.5.2')]
def resolve_missing_user(user, branch, package):
assets = ["{}.html".format(package),
"{}.css".format(package),
"{}.js".format(package)]
base_url = "https://raw.githubusercontent.com/{user}/{package}/{branch}/".format(**locals())
matched_assets = []
for asset in assets:
asset_url = urljoin(base_url, asset)
try:
with urlopen(asset_url):
pass
matched_assets.append(asset)
except URLError:
pass
if matched_assets:
print(" Matched.")
data = {'base': base_url, 'assets': {a: a for a in matched_assets}}
if ENORMOUS_INJECTION_HACK:
target = open('webshack/standard_packages.yaml', 'a')
else:
target = sys.stdout
print('---')
print('{}:'.format(package), file=target)
print(' base: {}'.format(base_url), file=target)
print(' assets:', file=target)
for asset in matched_assets:
print(' {0}: {0}'.format(asset), file=target)
if not ENORMOUS_INJECTION_HACK:
print('---')
return True
return False
def resolve_missing(package):
print('Trying to resolve missing package from GitHub repositories...')
for user, branch in GITHUB_USERS:
print(' {}...'.format(user))
if resolve_missing_user(user, branch, package):
return
|
Add a hack to auto-inject new deps
|
Add a hack to auto-inject new deps
|
Python
|
mit
|
prophile/webshack
|
0e53ae11cb1cc53979edb1f17162e8b1d89ad809
|
user/models.py
|
user/models.py
|
from django.db import models
# Create your models here.
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
# Extends User model. Defines sn and notifications for a User.
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
sn = models.CharField(max_length=60)
notifications = models.ForeignKey(Notifications, on_delete=models.CASCADE)
# Defines user's email notification settings.
class Notifications(models.Model):
update = models.BooleanField(default=True)
response = models.BooleanField(default=True)
#
# The following functions define signals so that the Profile model
# will be automatically created/updated whenever the Django User object
# is created/updated. This makes it so you never have to call the Profile
# object's save method, all saving is done with the User model.
#
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save()
|
Define initial schema for user and email notifications
|
Define initial schema for user and email notifications
|
Python
|
apache-2.0
|
ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints
|
172feb5997a826181a0ec381c171a0a2cc854e4c
|
yolapy/configuration.py
|
yolapy/configuration.py
|
"""Configuration.
Yolapy.configuration provides a key-value store used by the Yola client.
Data is stored here in the module, benefits include:
* Configuration is decoupled from application logic.
* When instantiating multiple service models, each contains its own client.
This module allows for configuration to happen once then consumed multiple
times by each client.
See README for example of use.
"""
config = {}
_default = object()
def configure(**kwargs):
"""Save all keyword arguments as (key=value) configuration."""
config.update(kwargs)
def get_config(key, default=_default):
"""Lookup the value of a configuration key using an optional default."""
value = config.get(key, default)
if value == _default:
raise KeyError('%s is not configured' % key)
return value
|
"""Configuration.
Yolapy.configuration provides a key-value store used by the Yola client.
Data is stored here in the module, benefits include:
* Configuration is decoupled from application logic.
* When instantiating multiple service models, each contains its own client.
This module allows for configuration to happen once then consumed multiple
times by each client.
See README for example of use.
"""
config = {}
_missing = object()
def configure(**kwargs):
"""Save all keyword arguments as (key=value) configuration."""
config.update(kwargs)
def get_config(key, default=_missing):
"""Lookup the value of a configuration key using an optional default."""
value = config.get(key, default)
if value == _missing:
raise KeyError('%s is not configured' % key)
return value
|
Improve varname for missing config
|
Improve varname for missing config
|
Python
|
mit
|
yola/yolapy
|
b96cb194c8edd54fda9868d69fda515ac8beb29f
|
vumi/dispatchers/__init__.py
|
vumi/dispatchers/__init__.py
|
"""The vumi.dispatchers API."""
__all__ = ["BaseDispatchWorker", "BaseDispatchRouter", "SimpleDispatchRouter",
"TransportToTransportRouter", "ToAddrRouter",
"FromAddrMultiplexRouter", "UserGroupingRouter"]
from vumi.dispatchers.base import (BaseDispatchWorker, BaseDispatchRouter,
SimpleDispatchRouter,
TransportToTransportRouter, ToAddrRouter,
FromAddrMultiplexRouter,
UserGroupingRouter)
|
"""The vumi.dispatchers API."""
__all__ = ["BaseDispatchWorker", "BaseDispatchRouter", "SimpleDispatchRouter",
"TransportToTransportRouter", "ToAddrRouter",
"FromAddrMultiplexRouter", "UserGroupingRouter",
"ContentKeywordRouter"]
from vumi.dispatchers.base import (BaseDispatchWorker, BaseDispatchRouter,
SimpleDispatchRouter,
TransportToTransportRouter, ToAddrRouter,
FromAddrMultiplexRouter,
UserGroupingRouter, ContentKeywordRouter)
|
Add ContentKeywordRouter to vumi.dispatchers API.
|
Add ContentKeywordRouter to vumi.dispatchers API.
|
Python
|
bsd-3-clause
|
harrissoerja/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,TouK/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,vishwaprakashmishra/xmatrix
|
041e1545c99681c8cf9e43d364877d1ff43342d0
|
augur/datasources/augur_db/test_augur_db.py
|
augur/datasources/augur_db/test_augur_db.py
|
import os
import pytest
@pytest.fixture(scope="module")
def augur_db():
import augur
augur_app = augur.Application()
return augur_app['augur_db']()
# def test_repoid(augur_db):
# assert ghtorrent.repoid('rails', 'rails') >= 1000
# def test_userid(augur_db):
# assert ghtorrent.userid('howderek') >= 1000
"""
Pandas testing format
assert ghtorrent.<function>('owner', 'repo').isin(['<data that should be in dataframe>']).any
The tests check if a value is anywhere in the dataframe
"""
|
import os
import pytest
@pytest.fixture(scope="module")
def augur_db():
import augur
augur_app = augur.Application()
return augur_app['augur_db']()
# def test_repoid(augur_db):
# assert ghtorrent.repoid('rails', 'rails') >= 1000
# def test_userid(augur_db):
# assert ghtorrent.userid('howderek') >= 1000
"""
Pandas testing format
assert ghtorrent.<function>('owner', 'repo').isin(['<data that should be in dataframe>']).any
The tests check if a value is anywhere in the dataframe
"""
def test_issues_first_time_opened(augur_db):
# repo_id
assert augur_db.issues_first_time_opened(
1, repo_id=25001, period='day').isin(["2019-05-23 00:00:00+00:00"]).any
assert augur_db.issues_first_time_opened(
1, repo_id=25001, period='week').isin(["2019-05-20 00:00:00+00:00"]).any
# repo_gorup_id
assert augur_db.issues_first_time_opened(1, period='day').isin([
"2019-05-23 00:00:00+00:00"]).any
|
Add Unit test for new contributors of issues
|
Add Unit test for new contributors of issues
Signed-off-by: Bingwen Ma <27def536c643ce1f88ca2c07ff6169767bd9a90f@gmail.com>
|
Python
|
mit
|
OSSHealth/ghdata,OSSHealth/ghdata,OSSHealth/ghdata
|
cd1c3645d733ab16355fe516bb2e505f87d49ace
|
backdrop/contrib/evl_upload.py
|
backdrop/contrib/evl_upload.py
|
from datetime import datetime
import itertools
from tests.support.test_helpers import d_tz
def ceg_volumes(rows):
def ceg_keys(rows):
return [
"_timestamp", "timeSpan", "relicensing_web", "relicensing_ivr",
"relicensing_agent", "sorn_web", "sorn_ivr", "sorn_agent",
"agent_automated_dupes", "calls_answered_by_advisor"
]
def ceg_rows(rows):
for column in itertools.count(3):
date = ceg_date(rows, column)
if not isinstance(date, datetime):
return
if date >= d_tz(2012, 4, 1):
yield [
date, "month", rows[5][column], rows[6][column],
rows[9][column], rows[11][column], rows[12][column],
rows[13][column], rows[15][column], rows[17][column]
]
def ceg_date(rows, column):
try:
return rows[3][column]
except IndexError:
return None
yield ceg_keys(rows)
for row in ceg_rows(rows):
yield row
|
from datetime import datetime
import itertools
from tests.support.test_helpers import d_tz
def ceg_volumes(rows):
def ceg_keys(rows):
return [
"_timestamp", "timeSpan", "relicensing_web", "relicensing_ivr",
"relicensing_agent", "sorn_web", "sorn_ivr", "sorn_agent",
"agent_automated_dupes", "calls_answered_by_advisor"
]
def ceg_rows(rows):
rows = list(rows)
for column in itertools.count(3):
date = ceg_date(rows, column)
if not isinstance(date, datetime):
return
if date >= d_tz(2012, 4, 1):
yield [
date, "month", rows[5][column], rows[6][column],
rows[9][column], rows[11][column], rows[12][column],
rows[13][column], rows[15][column], rows[17][column]
]
def ceg_date(rows, column):
try:
return rows[3][column]
except IndexError:
return None
yield ceg_keys(rows)
for row in ceg_rows(rows):
yield row
|
Convert rows to list in EVL CEG parser
|
Convert rows to list in EVL CEG parser
It needs to access cells directly
|
Python
|
mit
|
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
|
7a04bb7692b4838e0abe9ba586fc4748ed9cd5d4
|
tests/integration/blueprints/site/test_homepage.py
|
tests/integration/blueprints/site/test_homepage.py
|
"""
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from tests.helpers import http_client
def test_homepage(site_app, site):
with http_client(site_app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
|
"""
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from tests.helpers import http_client
def test_homepage(site_app, site):
with http_client(site_app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
assert response.location is None
def test_homepage_with_root_redirect(make_site_app, site):
site_app = make_site_app(ROOT_REDIRECT_TARGET='welcome')
with http_client(site_app) as client:
response = client.get('/')
assert response.status_code == 307
assert response.location == 'http://www.acmecon.test/welcome'
|
Test custom root path redirect
|
Test custom root path redirect
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
cdfb5c0c074e9143eeb84d914225dbcfb63151ba
|
common/djangoapps/dark_lang/models.py
|
common/djangoapps/dark_lang/models.py
|
"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
return [lang.strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
|
"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
|
Put language modal in alphabetical order LMS-2302
|
Put language modal in alphabetical order LMS-2302
|
Python
|
agpl-3.0
|
Softmotions/edx-platform,rismalrv/edx-platform,ovnicraft/edx-platform,jbzdak/edx-platform,nttks/jenkins-test,philanthropy-u/edx-platform,dkarakats/edx-platform,AkA84/edx-platform,kursitet/edx-platform,eestay/edx-platform,atsolakid/edx-platform,kxliugang/edx-platform,zadgroup/edx-platform,B-MOOC/edx-platform,romain-li/edx-platform,martynovp/edx-platform,deepsrijit1105/edx-platform,rhndg/openedx,doganov/edx-platform,cognitiveclass/edx-platform,ahmadiga/min_edx,chand3040/cloud_that,LICEF/edx-platform,utecuy/edx-platform,Unow/edx-platform,jelugbo/tundex,zerobatu/edx-platform,IndonesiaX/edx-platform,dcosentino/edx-platform,ampax/edx-platform-backup,cselis86/edx-platform,DNFcode/edx-platform,stvstnfrd/edx-platform,cpennington/edx-platform,fly19890211/edx-platform,sameetb-cuelogic/edx-platform-test,ESOedX/edx-platform,xingyepei/edx-platform,zhenzhai/edx-platform,Kalyzee/edx-platform,cselis86/edx-platform,tiagochiavericosta/edx-platform,prarthitm/edxplatform,devs1991/test_edx_docmode,eemirtekin/edx-platform,motion2015/a3,DNFcode/edx-platform,kmoocdev/edx-platform,shubhdev/edx-platform,kmoocdev/edx-platform,amir-qayyum-khan/edx-platform,eduNEXT/edx-platform,kursitet/edx-platform,Ayub-Khan/edx-platform,fintech-circle/edx-platform,shubhdev/edxOnBaadal,auferack08/edx-platform,OmarIthawi/edx-platform,UXE/local-edx,jamiefolsom/edx-platform,dcosentino/edx-platform,openfun/edx-platform,Lektorium-LLC/edx-platform,kxliugang/edx-platform,hkawasaki/kawasaki-aio8-0,arbrandes/edx-platform,ampax/edx-platform,hkawasaki/kawasaki-aio8-0,jazkarta/edx-platform,eemirtekin/edx-platform,ahmedaljazzar/edx-platform,jruiperezv/ANALYSE,CredoReference/edx-platform,bdero/edx-platform,wwj718/ANALYSE,Stanford-Online/edx-platform,MSOpenTech/edx-platform,JCBarahona/edX,alu042/edx-platform,Shrhawk/edx-platform,kmoocdev2/edx-platform,jelugbo/tundex,Endika/edx-platform,don-github/edx-platform,Kalyzee/edx-platform,angelapper/edx-platform,marcore/edx-platform,waheedahmed/edx-platform,UXE/local-edx,jazztpt/edx-platform,zhenzhai/edx-platform,IndonesiaX/edx-platform,chauhanhardik/populo_2,antonve/s4-project-mooc,procangroup/edx-platform,antonve/s4-project-mooc,mcgachey/edx-platform,zerobatu/edx-platform,shubhdev/openedx,appsembler/edx-platform,SivilTaram/edx-platform,DefyVentures/edx-platform,ferabra/edx-platform,kmoocdev2/edx-platform,edx-solutions/edx-platform,devs1991/test_edx_docmode,Semi-global/edx-platform,AkA84/edx-platform,Softmotions/edx-platform,Endika/edx-platform,inares/edx-platform,yokose-ks/edx-platform,shurihell/testasia,halvertoluke/edx-platform,waheedahmed/edx-platform,xuxiao19910803/edx,olexiim/edx-platform,eestay/edx-platform,ubc/edx-platform,edry/edx-platform,knehez/edx-platform,deepsrijit1105/edx-platform,Shrhawk/edx-platform,4eek/edx-platform,carsongee/edx-platform,tanmaykm/edx-platform,carsongee/edx-platform,nttks/edx-platform,Endika/edx-platform,don-github/edx-platform,chauhanhardik/populo,waheedahmed/edx-platform,zadgroup/edx-platform,eduNEXT/edunext-platform,adoosii/edx-platform,jazkarta/edx-platform-for-isc,nikolas/edx-platform,nanolearningllc/edx-platform-cypress,caesar2164/edx-platform,amir-qayyum-khan/edx-platform,jelugbo/tundex,ampax/edx-platform-backup,longmen21/edx-platform,beni55/edx-platform,vasyarv/edx-platform,ovnicraft/edx-platform,jonathan-beard/edx-platform,solashirai/edx-platform,bdero/edx-platform,motion2015/edx-platform,appsembler/edx-platform,appliedx/edx-platform,chand3040/cloud_that,torchingloom/edx-platform,nanolearningllc/edx-platform-cypress-2,bigdatauniversity/edx-platform,B-MOOC/edx-platform,procangroup/edx-platform,Shrhawk/edx-platform,dcosentino/edx-platform,atsolakid/edx-platform,y12uc231/edx-platform,rue89-tech/edx-platform,devs1991/test_edx_docmode,gsehub/edx-platform,fintech-circle/edx-platform,gsehub/edx-platform,TeachAtTUM/edx-platform,CourseTalk/edx-platform,Edraak/circleci-edx-platform,EDUlib/edx-platform,eestay/edx-platform,Lektorium-LLC/edx-platform,shubhdev/openedx,jbzdak/edx-platform,benpatterson/edx-platform,jswope00/griffinx,jazkarta/edx-platform-for-isc,vismartltd/edx-platform,hkawasaki/kawasaki-aio8-2,simbs/edx-platform,xuxiao19910803/edx-platform,antonve/s4-project-mooc,arbrandes/edx-platform,IONISx/edx-platform,ampax/edx-platform-backup,OmarIthawi/edx-platform,knehez/edx-platform,itsjeyd/edx-platform,ahmadio/edx-platform,longmen21/edx-platform,Semi-global/edx-platform,nttks/edx-platform,wwj718/edx-platform,solashirai/edx-platform,miptliot/edx-platform,zhenzhai/edx-platform,ahmadiga/min_edx,bitifirefly/edx-platform,DefyVentures/edx-platform,teltek/edx-platform,auferack08/edx-platform,sudheerchintala/LearnEraPlatForm,doismellburning/edx-platform,zerobatu/edx-platform,solashirai/edx-platform,chauhanhardik/populo_2,mbareta/edx-platform-ft,jjmiranda/edx-platform,jonathan-beard/edx-platform,pepeportela/edx-platform,shurihell/testasia,etzhou/edx-platform,rue89-tech/edx-platform,vasyarv/edx-platform,prarthitm/edxplatform,jswope00/GAI,simbs/edx-platform,eduNEXT/edunext-platform,olexiim/edx-platform,motion2015/a3,arbrandes/edx-platform,louyihua/edx-platform,mitocw/edx-platform,ahmadiga/min_edx,hkawasaki/kawasaki-aio8-1,cecep-edu/edx-platform,chand3040/cloud_that,xinjiguaike/edx-platform,msegado/edx-platform,beni55/edx-platform,beni55/edx-platform,vikas1885/test1,utecuy/edx-platform,SivilTaram/edx-platform,nanolearning/edx-platform,procangroup/edx-platform,RPI-OPENEDX/edx-platform,SivilTaram/edx-platform,playm2mboy/edx-platform,jswope00/griffinx,pabloborrego93/edx-platform,jazkarta/edx-platform,JioEducation/edx-platform,hastexo/edx-platform,jzoldak/edx-platform,dkarakats/edx-platform,analyseuc3m/ANALYSE-v1,B-MOOC/edx-platform,Edraak/edraak-platform,longmen21/edx-platform,motion2015/edx-platform,angelapper/edx-platform,vikas1885/test1,DefyVentures/edx-platform,gsehub/edx-platform,jolyonb/edx-platform,knehez/edx-platform,ovnicraft/edx-platform,LICEF/edx-platform,ferabra/edx-platform,JCBarahona/edX,bdero/edx-platform,ubc/edx-platform,edx-solutions/edx-platform,nikolas/edx-platform,MSOpenTech/edx-platform,AkA84/edx-platform,ESOedX/edx-platform,adoosii/edx-platform,halvertoluke/edx-platform,mahendra-r/edx-platform,arifsetiawan/edx-platform,shabab12/edx-platform,Semi-global/edx-platform,lduarte1991/edx-platform,CredoReference/edx-platform,eemirtekin/edx-platform,MSOpenTech/edx-platform,iivic/BoiseStateX,knehez/edx-platform,jzoldak/edx-platform,openfun/edx-platform,Ayub-Khan/edx-platform,MakeHer/edx-platform,ak2703/edx-platform,nttks/edx-platform,doganov/edx-platform,simbs/edx-platform,defance/edx-platform,Edraak/circleci-edx-platform,jamiefolsom/edx-platform,10clouds/edx-platform,shubhdev/edx-platform,atsolakid/edx-platform,ZLLab-Mooc/edx-platform,romain-li/edx-platform,mcgachey/edx-platform,antoviaque/edx-platform,stvstnfrd/edx-platform,edx/edx-platform,chauhanhardik/populo_2,hkawasaki/kawasaki-aio8-2,wwj718/ANALYSE,MakeHer/edx-platform,synergeticsedx/deployment-wipro,kmoocdev/edx-platform,valtech-mooc/edx-platform,lduarte1991/edx-platform,hkawasaki/kawasaki-aio8-0,romain-li/edx-platform,devs1991/test_edx_docmode,nagyistoce/edx-platform,shashank971/edx-platform,mushtaqak/edx-platform,cselis86/edx-platform,vasyarv/edx-platform,zubair-arbi/edx-platform,mjirayu/sit_academy,mtlchun/edx,vikas1885/test1,ovnicraft/edx-platform,angelapper/edx-platform,antoviaque/edx-platform,cpennington/edx-platform,prarthitm/edxplatform,deepsrijit1105/edx-platform,y12uc231/edx-platform,nagyistoce/edx-platform,zofuthan/edx-platform,unicri/edx-platform,nanolearningllc/edx-platform-cypress,cecep-edu/edx-platform,gsehub/edx-platform,carsongee/edx-platform,xingyepei/edx-platform,msegado/edx-platform,hamzehd/edx-platform,doganov/edx-platform,sudheerchintala/LearnEraPlatForm,wwj718/edx-platform,jazkarta/edx-platform-for-isc,don-github/edx-platform,leansoft/edx-platform,ampax/edx-platform-backup,zerobatu/edx-platform,shabab12/edx-platform,IndonesiaX/edx-platform,SivilTaram/edx-platform,franosincic/edx-platform,olexiim/edx-platform,shubhdev/edxOnBaadal,nttks/edx-platform,mcgachey/edx-platform,peterm-itr/edx-platform,zhenzhai/edx-platform,kmoocdev2/edx-platform,mcgachey/edx-platform,kamalx/edx-platform,amir-qayyum-khan/edx-platform,eemirtekin/edx-platform,arifsetiawan/edx-platform,mitocw/edx-platform,chudaol/edx-platform,alexthered/kienhoc-platform,adoosii/edx-platform,J861449197/edx-platform,a-parhom/edx-platform,jswope00/GAI,benpatterson/edx-platform,ubc/edx-platform,rue89-tech/edx-platform,y12uc231/edx-platform,torchingloom/edx-platform,TeachAtTUM/edx-platform,sudheerchintala/LearnEraPlatForm,nanolearningllc/edx-platform-cypress-2,nikolas/edx-platform,mjirayu/sit_academy,jjmiranda/edx-platform,wwj718/edx-platform,beni55/edx-platform,abdoosh00/edraak,analyseuc3m/ANALYSE-v1,teltek/edx-platform,UXE/local-edx,playm2mboy/edx-platform,jazztpt/edx-platform,zubair-arbi/edx-platform,LearnEra/LearnEraPlaftform,hmcmooc/muddx-platform,fly19890211/edx-platform,mahendra-r/edx-platform,OmarIthawi/edx-platform,naresh21/synergetics-edx-platform,rismalrv/edx-platform,mitocw/edx-platform,chauhanhardik/populo_2,synergeticsedx/deployment-wipro,Edraak/edraak-platform,openfun/edx-platform,rismalrv/edx-platform,lduarte1991/edx-platform,arifsetiawan/edx-platform,zubair-arbi/edx-platform,miptliot/edx-platform,Softmotions/edx-platform,arifsetiawan/edx-platform,cyanna/edx-platform,fly19890211/edx-platform,jazkarta/edx-platform-for-isc,torchingloom/edx-platform,yokose-ks/edx-platform,fintech-circle/edx-platform,ahmadio/edx-platform,OmarIthawi/edx-platform,nanolearningllc/edx-platform-cypress-2,CourseTalk/edx-platform,bitifirefly/edx-platform,bitifirefly/edx-platform,mushtaqak/edx-platform,hamzehd/edx-platform,Edraak/circleci-edx-platform,edry/edx-platform,eestay/edx-platform,Semi-global/edx-platform,pabloborrego93/edx-platform,IONISx/edx-platform,alu042/edx-platform,jjmiranda/edx-platform,vikas1885/test1,fly19890211/edx-platform,shashank971/edx-platform,jbzdak/edx-platform,shabab12/edx-platform,martynovp/edx-platform,mahendra-r/edx-platform,a-parhom/edx-platform,unicri/edx-platform,jamesblunt/edx-platform,ESOedX/edx-platform,stvstnfrd/edx-platform,tanmaykm/edx-platform,bitifirefly/edx-platform,4eek/edx-platform,ahmadiga/min_edx,hkawasaki/kawasaki-aio8-1,marcore/edx-platform,DNFcode/edx-platform,xuxiao19910803/edx-platform,jzoldak/edx-platform,kmoocdev2/edx-platform,jruiperezv/ANALYSE,Lektorium-LLC/edx-platform,IONISx/edx-platform,Livit/Livit.Learn.EdX,jamiefolsom/edx-platform,Edraak/edx-platform,zubair-arbi/edx-platform,knehez/edx-platform,nttks/jenkins-test,DNFcode/edx-platform,jbassen/edx-platform,jazztpt/edx-platform,don-github/edx-platform,alexthered/kienhoc-platform,jolyonb/edx-platform,jamesblunt/edx-platform,stvstnfrd/edx-platform,chrisndodge/edx-platform,xinjiguaike/edx-platform,10clouds/edx-platform,teltek/edx-platform,nanolearningllc/edx-platform-cypress,etzhou/edx-platform,Edraak/circleci-edx-platform,Endika/edx-platform,kmoocdev2/edx-platform,bdero/edx-platform,MSOpenTech/edx-platform,mtlchun/edx,torchingloom/edx-platform,jbzdak/edx-platform,jamesblunt/edx-platform,yokose-ks/edx-platform,polimediaupv/edx-platform,mjirayu/sit_academy,kamalx/edx-platform,eestay/edx-platform,DNFcode/edx-platform,mjirayu/sit_academy,pabloborrego93/edx-platform,romain-li/edx-platform,dcosentino/edx-platform,edx/edx-platform,vasyarv/edx-platform,cognitiveclass/edx-platform,alu042/edx-platform,zadgroup/edx-platform,playm2mboy/edx-platform,unicri/edx-platform,cyanna/edx-platform,raccoongang/edx-platform,tiagochiavericosta/edx-platform,Edraak/edraak-platform,xuxiao19910803/edx-platform,mbareta/edx-platform-ft,adoosii/edx-platform,beacloudgenius/edx-platform,xuxiao19910803/edx-platform,nagyistoce/edx-platform,SravanthiSinha/edx-platform,kursitet/edx-platform,rhndg/openedx,mushtaqak/edx-platform,xuxiao19910803/edx-platform,dkarakats/edx-platform,nagyistoce/edx-platform,sudheerchintala/LearnEraPlatForm,sameetb-cuelogic/edx-platform-test,philanthropy-u/edx-platform,olexiim/edx-platform,morenopc/edx-platform,SravanthiSinha/edx-platform,ZLLab-Mooc/edx-platform,LearnEra/LearnEraPlaftform,mtlchun/edx,WatanabeYasumasa/edx-platform,jelugbo/tundex,mjirayu/sit_academy,abdoosh00/edraak,xinjiguaike/edx-platform,analyseuc3m/ANALYSE-v1,jswope00/GAI,utecuy/edx-platform,doismellburning/edx-platform,Unow/edx-platform,tiagochiavericosta/edx-platform,beacloudgenius/edx-platform,edry/edx-platform,nagyistoce/edx-platform,proversity-org/edx-platform,JioEducation/edx-platform,raccoongang/edx-platform,martynovp/edx-platform,polimediaupv/edx-platform,synergeticsedx/deployment-wipro,peterm-itr/edx-platform,doismellburning/edx-platform,RPI-OPENEDX/edx-platform,IONISx/edx-platform,Ayub-Khan/edx-platform,vismartltd/edx-platform,hkawasaki/kawasaki-aio8-1,4eek/edx-platform,ak2703/edx-platform,proversity-org/edx-platform,kamalx/edx-platform,antoviaque/edx-platform,motion2015/a3,mushtaqak/edx-platform,gymnasium/edx-platform,don-github/edx-platform,ESOedX/edx-platform,louyihua/edx-platform,openfun/edx-platform,mahendra-r/edx-platform,jazztpt/edx-platform,JCBarahona/edX,LICEF/edx-platform,leansoft/edx-platform,waheedahmed/edx-platform,ferabra/edx-platform,shubhdev/edx-platform,xinjiguaike/edx-platform,Softmotions/edx-platform,hastexo/edx-platform,wwj718/ANALYSE,etzhou/edx-platform,polimediaupv/edx-platform,Kalyzee/edx-platform,ZLLab-Mooc/edx-platform,sameetb-cuelogic/edx-platform-test,nanolearning/edx-platform,ferabra/edx-platform,nanolearningllc/edx-platform-cypress-2,halvertoluke/edx-platform,JCBarahona/edX,jbassen/edx-platform,BehavioralInsightsTeam/edx-platform,RPI-OPENEDX/edx-platform,inares/edx-platform,ahmadio/edx-platform,edx-solutions/edx-platform,IndonesiaX/edx-platform,hmcmooc/muddx-platform,dsajkl/123,hmcmooc/muddx-platform,raccoongang/edx-platform,naresh21/synergetics-edx-platform,utecuy/edx-platform,miptliot/edx-platform,eemirtekin/edx-platform,rue89-tech/edx-platform,Edraak/edx-platform,eduNEXT/edx-platform,chauhanhardik/populo_2,tanmaykm/edx-platform,olexiim/edx-platform,UOMx/edx-platform,xuxiao19910803/edx,jjmiranda/edx-platform,vasyarv/edx-platform,appliedx/edx-platform,Kalyzee/edx-platform,hkawasaki/kawasaki-aio8-2,nikolas/edx-platform,chudaol/edx-platform,vismartltd/edx-platform,jruiperezv/ANALYSE,synergeticsedx/deployment-wipro,TeachAtTUM/edx-platform,prarthitm/edxplatform,pomegranited/edx-platform,Stanford-Online/edx-platform,zadgroup/edx-platform,Semi-global/edx-platform,kxliugang/edx-platform,proversity-org/edx-platform,chauhanhardik/populo,wwj718/ANALYSE,marcore/edx-platform,Softmotions/edx-platform,rue89-tech/edx-platform,cyanna/edx-platform,ak2703/edx-platform,xinjiguaike/edx-platform,shashank971/edx-platform,jazkarta/edx-platform,andyzsf/edx,andyzsf/edx,appliedx/edx-platform,zofuthan/edx-platform,chauhanhardik/populo,kamalx/edx-platform,nanolearningllc/edx-platform-cypress,iivic/BoiseStateX,dsajkl/reqiop,rhndg/openedx,zofuthan/edx-platform,pepeportela/edx-platform,Unow/edx-platform,nanolearningllc/edx-platform-cypress-2,ZLLab-Mooc/edx-platform,vismartltd/edx-platform,doismellburning/edx-platform,edry/edx-platform,jazztpt/edx-platform,doganov/edx-platform,hamzehd/edx-platform,tiagochiavericosta/edx-platform,valtech-mooc/edx-platform,shubhdev/edxOnBaadal,jruiperezv/ANALYSE,simbs/edx-platform,motion2015/edx-platform,jonathan-beard/edx-platform,etzhou/edx-platform,arbrandes/edx-platform,eduNEXT/edunext-platform,Livit/Livit.Learn.EdX,caesar2164/edx-platform,SivilTaram/edx-platform,BehavioralInsightsTeam/edx-platform,cpennington/edx-platform,longmen21/edx-platform,Unow/edx-platform,bigdatauniversity/edx-platform,Stanford-Online/edx-platform,zofuthan/edx-platform,eduNEXT/edx-platform,franosincic/edx-platform,WatanabeYasumasa/edx-platform,ampax/edx-platform-backup,tanmaykm/edx-platform,mbareta/edx-platform-ft,kmoocdev/edx-platform,valtech-mooc/edx-platform,10clouds/edx-platform,louyihua/edx-platform,louyihua/edx-platform,chand3040/cloud_that,mahendra-r/edx-platform,Kalyzee/edx-platform,10clouds/edx-platform,Livit/Livit.Learn.EdX,dsajkl/reqiop,auferack08/edx-platform,waheedahmed/edx-platform,SravanthiSinha/edx-platform,andyzsf/edx,shubhdev/openedx,leansoft/edx-platform,iivic/BoiseStateX,peterm-itr/edx-platform,jolyonb/edx-platform,defance/edx-platform,chrisndodge/edx-platform,xingyepei/edx-platform,chudaol/edx-platform,vismartltd/edx-platform,gymnasium/edx-platform,deepsrijit1105/edx-platform,jswope00/griffinx,msegado/edx-platform,nttks/jenkins-test,IONISx/edx-platform,procangroup/edx-platform,msegado/edx-platform,J861449197/edx-platform,chudaol/edx-platform,polimediaupv/edx-platform,pabloborrego93/edx-platform,shubhdev/edxOnBaadal,shashank971/edx-platform,gymnasium/edx-platform,shubhdev/edx-platform,morenopc/edx-platform,ZLLab-Mooc/edx-platform,LICEF/edx-platform,morenopc/edx-platform,ahmadio/edx-platform,zhenzhai/edx-platform,valtech-mooc/edx-platform,auferack08/edx-platform,cognitiveclass/edx-platform,AkA84/edx-platform,shubhdev/edxOnBaadal,shubhdev/edx-platform,Edraak/edx-platform,pomegranited/edx-platform,alexthered/kienhoc-platform,JioEducation/edx-platform,TeachAtTUM/edx-platform,Edraak/edraak-platform,RPI-OPENEDX/edx-platform,LearnEra/LearnEraPlaftform,nttks/jenkins-test,bitifirefly/edx-platform,wwj718/ANALYSE,raccoongang/edx-platform,halvertoluke/edx-platform,CredoReference/edx-platform,cecep-edu/edx-platform,wwj718/edx-platform,devs1991/test_edx_docmode,antonve/s4-project-mooc,motion2015/edx-platform,ovnicraft/edx-platform,yokose-ks/edx-platform,jonathan-beard/edx-platform,doismellburning/edx-platform,Livit/Livit.Learn.EdX,unicri/edx-platform,naresh21/synergetics-edx-platform,nttks/jenkins-test,4eek/edx-platform,dsajkl/123,chauhanhardik/populo,appsembler/edx-platform,cognitiveclass/edx-platform,ferabra/edx-platform,edx/edx-platform,pomegranited/edx-platform,jamiefolsom/edx-platform,mtlchun/edx,dsajkl/123,cselis86/edx-platform,shabab12/edx-platform,RPI-OPENEDX/edx-platform,jelugbo/tundex,lduarte1991/edx-platform,edx-solutions/edx-platform,itsjeyd/edx-platform,UOMx/edx-platform,mushtaqak/edx-platform,franosincic/edx-platform,proversity-org/edx-platform,longmen21/edx-platform,philanthropy-u/edx-platform,playm2mboy/edx-platform,utecuy/edx-platform,MakeHer/edx-platform,openfun/edx-platform,BehavioralInsightsTeam/edx-platform,nanolearning/edx-platform,JioEducation/edx-platform,Shrhawk/edx-platform,a-parhom/edx-platform,amir-qayyum-khan/edx-platform,halvertoluke/edx-platform,morenopc/edx-platform,zofuthan/edx-platform,caesar2164/edx-platform,caesar2164/edx-platform,AkA84/edx-platform,jazkarta/edx-platform-for-isc,alexthered/kienhoc-platform,CredoReference/edx-platform,simbs/edx-platform,zerobatu/edx-platform,4eek/edx-platform,kxliugang/edx-platform,jbassen/edx-platform,analyseuc3m/ANALYSE-v1,cyanna/edx-platform,hastexo/edx-platform,Edraak/edx-platform,philanthropy-u/edx-platform,iivic/BoiseStateX,EDUlib/edx-platform,DefyVentures/edx-platform,ahmadiga/min_edx,devs1991/test_edx_docmode,y12uc231/edx-platform,jbassen/edx-platform,kxliugang/edx-platform,hkawasaki/kawasaki-aio8-2,appliedx/edx-platform,kmoocdev/edx-platform,ubc/edx-platform,inares/edx-platform,Lektorium-LLC/edx-platform,tiagochiavericosta/edx-platform,zubair-arbi/edx-platform,Stanford-Online/edx-platform,xuxiao19910803/edx,jbzdak/edx-platform,unicri/edx-platform,franosincic/edx-platform,cyanna/edx-platform,shubhdev/openedx,benpatterson/edx-platform,solashirai/edx-platform,cecep-edu/edx-platform,peterm-itr/edx-platform,ahmedaljazzar/edx-platform,vikas1885/test1,JCBarahona/edX,nikolas/edx-platform,ak2703/edx-platform,DefyVentures/edx-platform,yokose-ks/edx-platform,CourseTalk/edx-platform,eduNEXT/edx-platform,pepeportela/edx-platform,Edraak/edx-platform,xingyepei/edx-platform,BehavioralInsightsTeam/edx-platform,UOMx/edx-platform,bigdatauniversity/edx-platform,MakeHer/edx-platform,inares/edx-platform,jazkarta/edx-platform,hastexo/edx-platform,mtlchun/edx,beacloudgenius/edx-platform,zadgroup/edx-platform,itsjeyd/edx-platform,IndonesiaX/edx-platform,jamiefolsom/edx-platform,teltek/edx-platform,ampax/edx-platform,benpatterson/edx-platform,LICEF/edx-platform,leansoft/edx-platform,rhndg/openedx,xuxiao19910803/edx,EDUlib/edx-platform,iivic/BoiseStateX,chudaol/edx-platform,bigdatauniversity/edx-platform,benpatterson/edx-platform,J861449197/edx-platform,LearnEra/LearnEraPlaftform,sameetb-cuelogic/edx-platform-test,devs1991/test_edx_docmode,abdoosh00/edraak,SravanthiSinha/edx-platform,kamalx/edx-platform,shurihell/testasia,cselis86/edx-platform,martynovp/edx-platform,marcore/edx-platform,xingyepei/edx-platform,hamzehd/edx-platform,antoviaque/edx-platform,morenopc/edx-platform,mitocw/edx-platform,pepeportela/edx-platform,carsongee/edx-platform,cecep-edu/edx-platform,dsajkl/reqiop,hamzehd/edx-platform,ahmadio/edx-platform,beni55/edx-platform,naresh21/synergetics-edx-platform,Shrhawk/edx-platform,y12uc231/edx-platform,jswope00/griffinx,atsolakid/edx-platform,rismalrv/edx-platform,J861449197/edx-platform,SravanthiSinha/edx-platform,angelapper/edx-platform,CourseTalk/edx-platform,bigdatauniversity/edx-platform,B-MOOC/edx-platform,cognitiveclass/edx-platform,edry/edx-platform,dcosentino/edx-platform,nanolearning/edx-platform,kursitet/edx-platform,etzhou/edx-platform,martynovp/edx-platform,alu042/edx-platform,WatanabeYasumasa/edx-platform,torchingloom/edx-platform,arifsetiawan/edx-platform,adoosii/edx-platform,dkarakats/edx-platform,shurihell/testasia,J861449197/edx-platform,miptliot/edx-platform,chrisndodge/edx-platform,Ayub-Khan/edx-platform,franosincic/edx-platform,EDUlib/edx-platform,defance/edx-platform,solashirai/edx-platform,edx/edx-platform,motion2015/a3,hkawasaki/kawasaki-aio8-1,dsajkl/123,jbassen/edx-platform,atsolakid/edx-platform,defance/edx-platform,jolyonb/edx-platform,mcgachey/edx-platform,itsjeyd/edx-platform,polimediaupv/edx-platform,msegado/edx-platform,appsembler/edx-platform,jruiperezv/ANALYSE,rismalrv/edx-platform,motion2015/a3,beacloudgenius/edx-platform,romain-li/edx-platform,sameetb-cuelogic/edx-platform-test,abdoosh00/edraak,doganov/edx-platform,a-parhom/edx-platform,ahmedaljazzar/edx-platform,jamesblunt/edx-platform,jswope00/griffinx,Edraak/circleci-edx-platform,MSOpenTech/edx-platform,hkawasaki/kawasaki-aio8-0,UXE/local-edx,jamesblunt/edx-platform,shurihell/testasia,leansoft/edx-platform,inares/edx-platform,motion2015/edx-platform,MakeHer/edx-platform,pomegranited/edx-platform,ampax/edx-platform,dsajkl/123,chrisndodge/edx-platform,chand3040/cloud_that,wwj718/edx-platform,fly19890211/edx-platform,rhndg/openedx,beacloudgenius/edx-platform,andyzsf/edx,shubhdev/openedx,dsajkl/reqiop,UOMx/edx-platform,B-MOOC/edx-platform,eduNEXT/edunext-platform,nttks/edx-platform,valtech-mooc/edx-platform,playm2mboy/edx-platform,fintech-circle/edx-platform,devs1991/test_edx_docmode,chauhanhardik/populo,Ayub-Khan/edx-platform,xuxiao19910803/edx,jzoldak/edx-platform,ahmedaljazzar/edx-platform,WatanabeYasumasa/edx-platform,jazkarta/edx-platform,shashank971/edx-platform,hmcmooc/muddx-platform,kursitet/edx-platform,cpennington/edx-platform,mbareta/edx-platform-ft,jswope00/GAI,ak2703/edx-platform,ubc/edx-platform,ampax/edx-platform,nanolearningllc/edx-platform-cypress,pomegranited/edx-platform,jonathan-beard/edx-platform,gymnasium/edx-platform,dkarakats/edx-platform,appliedx/edx-platform,antonve/s4-project-mooc,alexthered/kienhoc-platform,nanolearning/edx-platform
|
14b9ef43fd244d4709d14478ec0714325ca37cdb
|
tests/builtins/test_sum.py
|
tests/builtins/test_sum.py
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class SumTests(TranspileTestCase):
def test_sum_list(self):
self.assertCodeExecution("""
print(sum([1, 2, 3, 4, 5, 6, 7]))
""")
def test_sum_tuple(self):
self.assertCodeExecution("""
print(sum((1, 2, 3, 4, 5, 6, 7)))
""")
def test_sum_iterator(self):
self.assertCodeExecution("""
i = iter([1, 2])
print(sum(i))
print(sum(i))
""")
def test_sum_mix_floats_and_ints(self):
self.assertCodeExecution("""
print(sum([1, 1.414, 2, 3.14159]))
""")
class BuiltinSumFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["sum"]
not_implemented = [
'test_bytearray',
'test_frozenzet',
]
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class SumTests(TranspileTestCase):
def test_sum_list(self):
self.assertCodeExecution("""
print(sum([1, 2, 3, 4, 5, 6, 7]))
""")
def test_sum_tuple(self):
self.assertCodeExecution("""
print(sum((1, 2, 3, 4, 5, 6, 7)))
""")
def test_sum_iterator(self):
self.assertCodeExecution("""
i = iter([1, 2])
print(sum(i))
print(sum(i))
""")
def test_sum_mix_floats_and_ints(self):
self.assertCodeExecution("""
print(sum([1, 1.414, 2, 3.14159]))
""")
class BuiltinSumFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["sum"]
not_implemented = [
'test_frozenzet',
]
|
Fix unexpected success on sum(bytearray())
|
Fix unexpected success on sum(bytearray())
|
Python
|
bsd-3-clause
|
cflee/voc,cflee/voc,freakboy3742/voc,freakboy3742/voc
|
9ff92d0a437e5af08fbf996ed0e3362cbd9cf2c9
|
tests/instrumentdb_test.py
|
tests/instrumentdb_test.py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
'Test the functions in the instrumentdb module.'
import os.path
import unittest as ut
import stripeline.instrumentdb as idb
class TestInstrumentDb(ut.TestCase):
def test_paths(self):
self.assertTrue(os.path.exists(idb.instrument_db_path()))
self.assertTrue(os.path.exists(idb.focal_plane_db_file_name()))
self.assertTrue(os.path.exists(idb.detector_db_file_name()))
self.assertTrue(os.path.exists(idb.scanning_strategy_db_file_name()))
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
'Test the functions in the instrumentdb module.'
import os.path
import unittest as ut
import stripeline.instrumentdb as idb
class TestInstrumentDb(ut.TestCase):
def test_paths(self):
self.assertTrue(os.path.exists(idb.instrument_db_path()),
'Path "{0}" not found'.format(idb.instrument_db_path()))
for file_name in (idb.focal_plane_db_file_name(),
idb.detector_db_file_name(),
idb.scanning_strategy_db_file_name()):
self.assertTrue(os.path.exists(file_name),
'File "{0}" not found'.format(file_name))
|
Print more helpful messages when tests fail
|
Print more helpful messages when tests fail
|
Python
|
mit
|
ziotom78/stripeline,ziotom78/stripeline
|
7966f771c4b5450625d5247c6bf5369901457d9a
|
capstone/player/monte_carlo.py
|
capstone/player/monte_carlo.py
|
import random
from collections import defaultdict, Counter
from . import Player
from ..util import utility
class MonteCarlo(Player):
name = 'MonteCarlo'
def __init__(self, n_sims=1000):
self.n_sims = n_sims
def __repr__(self):
return type(self).name
def __str__(self):
return type(self).name
def move(self, game):
counter = defaultdict(int)
for i in range(self.n_sims):
for move in game.legal_moves():
new_game = game.copy()
new_game.make_move(move)
while not new_game.is_over():
rand_move = random.choice(new_game.legal_moves())
new_game.make_move(rand_move)
counter[move] += utility(new_game, game.cur_player())
m = Counter(counter).most_common(1)
return m[0][0]
##########
# Player #
##########
def choose_move(self, game):
return self.move(game)
|
import random
from collections import defaultdict, Counter
from . import Player
from ..util import utility
class MonteCarlo(Player):
name = 'MonteCarlo'
def __init__(self, n_sims=1000):
self.n_sims = n_sims
def __repr__(self):
return type(self).name
def __str__(self):
return type(self).name
##########
# Player #
##########
def choose_move(self, game):
counter = defaultdict(int)
for i in range(self.n_sims):
for move in game.legal_moves():
new_game = game.copy()
new_game.make_move(move)
while not new_game.is_over():
rand_move = random.choice(new_game.legal_moves())
new_game.make_move(rand_move)
counter[move] += utility(new_game, game.cur_player())
best_move, count = Counter(counter).most_common(1)[0]
return best_move
|
Move MonteCarlo move to choose_move
|
Move MonteCarlo move to choose_move
|
Python
|
mit
|
davidrobles/mlnd-capstone-code
|
d0db4010ca9c6d2a6cbc27ae0029dd1ccfc6de42
|
evexml/forms.py
|
evexml/forms.py
|
from django import forms
from django.forms.fields import IntegerField, CharField
import evelink.account
class AddAPIForm(forms.Form):
key_id = IntegerField()
v_code = CharField(max_length=64, min_length=1)
def clean(self):
self._clean()
return super(AddAPIForm, self).clean()
def _clean(self):
"""Check the access mask and characters of the supplied keypair.
"""
key_id = self.cleaned_data.get('key_id')
v_code = self.cleaned_data.get('v_code')
if not (key_id and v_code):
return
api = evelink.api.API(api_key=(key_id, v_code))
account = evelink.account.Account(api)
try:
key_info = account.key_info().result
except evelink.api.APIError as error:
self.add_error(None, error.message)
return
if key_info['type'] != 'account':
self.add_error(None, 'The API key should select Character: All')
if key_info['access_mask'] != 4294967295:
self.add_error(None, 'The API key should have full access')
if key_info['expire_ts']:
self.add_error(None, 'The API key should have no expiry checked')
|
from django import forms
from django.forms.fields import IntegerField, CharField
import evelink.account
class AddAPIForm(forms.Form):
key_id = IntegerField()
v_code = CharField(max_length=64, min_length=1)
def clean(self):
super(AddAPIForm, self).clean()
self._clean()
return self.cleaned_data
def _clean(self):
"""Check the access mask and characters of the supplied keypair.
"""
key_id = self.cleaned_data.get('key_id')
v_code = self.cleaned_data.get('v_code')
if not (key_id and v_code):
return
api = evelink.api.API(api_key=(key_id, v_code))
account = evelink.account.Account(api)
try:
key_info = account.key_info().result
except evelink.api.APIError as error:
self.add_error(None, error.message)
return
if key_info['type'] != 'account':
self.add_error(None, 'The API key should select Character: All')
if key_info['access_mask'] != 4294967295:
self.add_error(None, 'The API key should have full access')
if key_info['expire_ts']:
self.add_error(None, 'The API key should have no expiry checked')
|
Swap order of clean calls
|
Swap order of clean calls
|
Python
|
mit
|
randomic/aniauth-tdd,randomic/aniauth-tdd
|
ba0ea7491fab383992013a8379592657eedfe1ce
|
scripts/contrib/model_info.py
|
scripts/contrib/model_info.py
|
#!/usr/bin/env python3
import sys
import argparse
import numpy as np
import yaml
DESC = "Prints version and model type from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
def main():
args = parse_args()
model = np.load(args.model)
if S2S_SPECIAL_NODE not in model:
print("No special Marian YAML node found in the model")
exit(1)
yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
if not args.key:
print(yaml_text)
exit(0)
# fix the invalid trailing unicode character '#x0000' added to the YAML
# string by the C++ cnpy library
try:
yaml_node = yaml.load(yaml_text)
except yaml.reader.ReaderError:
yaml_node = yaml.load(yaml_text[:-1])
print(yaml_node[args.key])
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-m", "--model", help="model file", required=True)
parser.add_argument("-k", "--key", help="print value for specific key")
return parser.parse_args()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import sys
import argparse
import numpy as np
import yaml
DESC = "Prints keys and values from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
def main():
args = parse_args()
model = np.load(args.model)
if args.special:
if S2S_SPECIAL_NODE not in model:
print("No special Marian YAML node found in the model")
exit(1)
yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
if not args.key:
print(yaml_text)
exit(0)
# fix the invalid trailing unicode character '#x0000' added to the YAML
# string by the C++ cnpy library
try:
yaml_node = yaml.load(yaml_text)
except yaml.reader.ReaderError:
yaml_node = yaml.load(yaml_text[:-1])
print(yaml_node[args.key])
else:
if args.key:
if args.key not in model:
print("Key not found")
exit(1)
print(model[args.key])
else:
for key in model:
print(key)
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-m", "--model", help="model file", required=True)
parser.add_argument("-k", "--key", help="print value for specific key")
parser.add_argument("-s", "--special", action="store_true",
help="print values from special:model.yml node")
return parser.parse_args()
if __name__ == "__main__":
main()
|
Add printing value for any key from model.npz
|
Add printing value for any key from model.npz
|
Python
|
mit
|
emjotde/amunmt,emjotde/amunmt,marian-nmt/marian-train,emjotde/amunmt,amunmt/marian,emjotde/amunn,amunmt/marian,emjotde/amunn,emjotde/amunmt,marian-nmt/marian-train,emjotde/amunn,marian-nmt/marian-train,emjotde/amunn,marian-nmt/marian-train,emjotde/Marian,marian-nmt/marian-train,emjotde/Marian,amunmt/marian
|
48e405f0f2027c82403c96b58023f1308c3f7c14
|
model/orderbook.py
|
model/orderbook.py
|
# -*- encoding:utf8 -*-
import os
from model.oandapy import oandapy
class OrderBook(object):
def get_latest_orderbook(self, instrument, period, history):
oanda_token = os.environ.get('OANDA_TOKEN')
oanda = oandapy.API(environment="practice", access_token=oanda_token)
orders = oanda.get_orderbook(instrument=instrument)
try:
timeset = orders.keys()
timeset.sort()
timeset.reverse()
target_time = timeset[history]
except:
return None
order = orders[target_time]
order['time'] = target_time
return order
|
# -*- encoding:utf8 -*-
import os
from model.oandapy import oandapy
class OrderBook(object):
def get_latest_orderbook(self, instrument, period, history):
oanda_token = os.environ.get('OANDA_TOKEN')
oanda_environment = os.environ.get('OANDA_ENVIRONMENT', 'practice')
oanda = oandapy.API(environment=oanda_environment, access_token=oanda_token)
orders = oanda.get_orderbook(instrument=instrument)
try:
timeset = orders.keys()
timeset.sort()
timeset.reverse()
target_time = timeset[history]
except:
return None
order = orders[target_time]
order['time'] = target_time
return order
|
Add oanda environment selector from runtime environments.
|
Add oanda environment selector from runtime environments.
|
Python
|
mit
|
supistar/OandaOrderbook,supistar/OandaOrderbook,supistar/OandaOrderbook
|
082f366402ca2084542a6306624f1f467297ebae
|
bin/task_usage_index.py
|
bin/task_usage_index.py
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import numpy as np
import task_usage
def main(data_path, index_path, report_each=10000):
print('Looking for data in "{}"...'.format(data_path))
paths = sorted(glob.glob('{}/**/*.sqlite3'.format(data_path)))
print('Processing {} databases...'.format(len(paths)))
index = []
count = 0
for path in paths:
data = task_usage.count_job_task_samples(path)
for i in range(data.shape[0]):
index.append({
'path': path,
'job': int(data[i, 0]),
'task': int(data[i, 1]),
'length': int(data[i, 2]),
})
count += 1
if count % report_each == 0:
print('Processed: {}'.format(count))
print('Saving into "{}"...'.format(index_path))
with open(index_path, 'w') as file:
json.dump({'index': index}, file, indent=4)
if __name__ == '__main__':
assert(len(sys.argv) == 3)
main(sys.argv[1], sys.argv[2])
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import numpy as np
import task_usage
def main(data_path, index_path, report_each=10000):
print('Looking for data in "{}"...'.format(data_path))
paths = sorted(glob.glob('{}/**/*.sqlite3'.format(data_path)))
total = len(paths)
print('Processing {} databases...'.format(total))
index = []
count = 0
for path in paths:
data = task_usage.count_job_task_samples(path)
for i in range(data.shape[0]):
index.append({
'path': path,
'job': int(data[i, 0]),
'task': int(data[i, 1]),
'length': int(data[i, 2]),
})
count += 1
if count % report_each == 0 or count == total:
print('Processed: {} ({:.2f}%)'.format(count, 100 * count / total))
print('Saving into "{}"...'.format(index_path))
with open(index_path, 'w') as file:
json.dump({'index': index}, file, indent=4)
if __name__ == '__main__':
assert(len(sys.argv) == 3)
main(sys.argv[1], sys.argv[2])
|
Print the percentage from the task-usage-index script
|
Print the percentage from the task-usage-index script
|
Python
|
mit
|
learning-on-chip/google-cluster-prediction
|
0fe990cf476dcd0cdea56c39de1dad6003d81851
|
statbot/mention.py
|
statbot/mention.py
|
#
# mention.py
#
# statbot - Store Discord records for later analysis
# Copyright (c) 2017 Ammon Smith
#
# statbot is available free of charge under the terms of the MIT
# License. You are free to redistribute and/or modify it under those
# terms. It is distributed in the hopes that it will be useful, but
# WITHOUT ANY WARRANTY. See the LICENSE file for more details.
#
from enum import auto, Enum
__all__ = [
'MentionType',
]
class MentionType(Enum):
USER = auto()
ROLE = auto()
CHANNEL = auto()
|
#
# mention.py
#
# statbot - Store Discord records for later analysis
# Copyright (c) 2017 Ammon Smith
#
# statbot is available free of charge under the terms of the MIT
# License. You are free to redistribute and/or modify it under those
# terms. It is distributed in the hopes that it will be useful, but
# WITHOUT ANY WARRANTY. See the LICENSE file for more details.
#
from enum import Enum
__all__ = [
'MentionType',
]
class MentionType(Enum):
USER = 0
ROLE = 1
CHANNEL = 2
|
Change MentionType to use fixed enum values.
|
Change MentionType to use fixed enum values.
|
Python
|
mit
|
strinking/statbot,strinking/statbot
|
06c2fe1bd836f4adfcff4eb35cc29203e10a729d
|
blinkytape/animation.py
|
blinkytape/animation.py
|
# TBD: Some animations mutate a pattern: shift it, fade it, etc.
# Not all animations need a pattern
# I need a rainbow pattern for fun
# TBD: How do you do random pixels? is it a pattern that is permuted by the
# animation? YES; patterns are static, animations do things with patterns,
# rotate them, scramble them, scale them, sort them, etcetera
class Animation(object):
def __init__(self, frame_period_sec):
if frame_period_sec < 0: raise ValueError
self._frame_period_sec = frame_period_sec
@property
def frame_period_sec(self):
return self._frame_period_sec
def begin(self):
pass
def next_frame(self):
pass
def end(self):
pass
|
# TBD: Some animations mutate a pattern: shift it, fade it, etc.
# Not all animations need a pattern
# I need a rainbow pattern for fun
# TBD: How do you do random pixels? is it a pattern that is permuted by the
# animation? YES; patterns are static, animations do things with patterns,
# rotate them, scramble them, scale them, sort them, etcetera
class Animation(object):
def __init__(self, frame_period_sec):
if frame_period_sec < 0: raise ValueError
self._frame_period_sec = frame_period_sec
@property
def frame_period_sec(self):
return self._frame_period_sec
@property
def finished(self):
raise NotImplementedError('Animation must implement finished property')
def begin(self):
pass
def next_frame(self):
raise NotImplementedError('Animation must implement next_frame method')
def end(self):
pass
|
Add abstract method exceptions to make Animation inheritance easier
|
Add abstract method exceptions to make Animation inheritance easier
|
Python
|
mit
|
jonspeicher/blinkyfun
|
f2d34fa3153448ab6a893fba45ae48b52d7759db
|
chipy_org/apps/profiles/urls.py
|
chipy_org/apps/profiles/urls.py
|
from django.conf.urls.defaults import *
from django.contrib.auth.decorators import login_required
from profiles.views import (ProfilesList,
ProfileEdit,
)
urlpatterns = patterns("",
url(r'^list/$', ProfilesList.as_view(), name='list'),
url(r'^edit/$', ProfileEdit.as_view(), name='edit'),
)
|
from django.conf.urls.defaults import *
from django.contrib.auth.decorators import login_required
from .views import ProfilesList, ProfileEdit
urlpatterns = patterns("",
url(r'^list/$', ProfilesList.as_view(), name='list'),
url(r'^edit/$', login_required(ProfileEdit).as_view(), name='edit'),
)
|
Add login required for profile edit
|
Add login required for profile edit
|
Python
|
mit
|
agfor/chipy.org,brianray/chipy.org,chicagopython/chipy.org,bharathelangovan/chipy.org,bharathelangovan/chipy.org,chicagopython/chipy.org,bharathelangovan/chipy.org,chicagopython/chipy.org,tanyaschlusser/chipy.org,agfor/chipy.org,tanyaschlusser/chipy.org,tanyaschlusser/chipy.org,brianray/chipy.org,chicagopython/chipy.org,brianray/chipy.org,agfor/chipy.org
|
3f236d74615dced53c57628ae1b5f2c74f9e1de5
|
examples/rate_limiting_test.py
|
examples/rate_limiting_test.py
|
from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(3.5) # The arg is max calls per second
def print_item(self, item):
print(item)
def test_rate_limited_printing(self):
print("\nRunning rate-limited print test:")
for item in xrange(1, 11):
self.print_item(item)
|
"""
This test demonstrates the use of the "rate_limited" decorator.
You can use this decorator on any method to rate-limit it.
"""
import unittest
from seleniumbase.common import decorators
class MyTestClass(unittest.TestCase):
@decorators.rate_limited(3.5) # The arg is max calls per second
def print_item(self, item):
print(item)
def test_rate_limited_printing(self):
print("\nRunning rate-limited print test:")
for item in xrange(1, 11):
self.print_item(item)
|
Update the rate_limited decorator test
|
Update the rate_limited decorator test
|
Python
|
mit
|
seleniumbase/SeleniumBase,possoumous/Watchers,possoumous/Watchers,mdmintz/SeleniumBase,possoumous/Watchers,ktp420/SeleniumBase,seleniumbase/SeleniumBase,ktp420/SeleniumBase,mdmintz/SeleniumBase,ktp420/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/seleniumspot,ktp420/SeleniumBase,mdmintz/seleniumspot,seleniumbase/SeleniumBase,possoumous/Watchers
|
2a23e72f7ad01976bcd80aa91f89882e2a37cbf6
|
test/test_model.py
|
test/test_model.py
|
# coding: utf-8
import os, sys
sys.path.append(os.path.join(sys.path[0], '..'))
from carlo import model, entity, generate
def test_minimal_model():
m = model(entity('const', {'int': lambda: 42})).build()
assert [('const', {'int': 42})] == m.create()
m = model(entity('const2', {'str': lambda: 'hello'})).build()
assert [('const2', {'str': 'hello'})] == m.create()
def test_model_with_multiple_entities():
m = model(
entity('first', {'name': lambda: 'elves'}),
entity('second', {'name': lambda: 'humans'})).build()
assert [('first', {'name': 'elves'}),
('second', {'name': 'humans'})] == m.create()
def test_model_with_multiple_params():
m = model(entity('human', {
'head': lambda: 1,
'hands': lambda: 2,
'name': lambda: 'Hurin',
})).build()
assert [('human', {'head': 1, 'hands': 2, 'name': 'Hurin'})] == m.create()
|
# coding: utf-8
import os, sys
sys.path.append(os.path.join(sys.path[0], '..'))
from carlo import model, entity, generate
def test_minimal_model():
m = model(entity('const', {'int': lambda: 42})).build()
assert [('const', {'int': 42})] == m.create()
m = model(entity('const2', {'str': lambda: 'hello'})).build()
assert [('const2', {'str': 'hello'})] == m.create()
def test_model_with_multiple_entities():
m = model(
entity('first', {'name': lambda: 'elves'}),
entity('second', {'name': lambda: 'humans'})).build()
assert [('first', {'name': 'elves'}),
('second', {'name': 'humans'})] == m.create()
def test_model_with_multiple_params():
m = model(entity('human', {
'head': lambda: 1,
'hands': lambda: 2,
'name': lambda: 'Hurin',
})).build()
assert [('human', {'head': 1, 'hands': 2, 'name': 'Hurin'})] == m.create()
# error handling
def test_same_enitities_should_throw_error():
pass
def test_same_params_should_throw_error():
pass
|
Test blueprints for corner cases
|
Test blueprints for corner cases
|
Python
|
mit
|
ahitrin/carlo
|
b6fc4a8db76b3aad100c6e40ab1b0fb9977dfd0d
|
changes/api/project_index.py
|
changes/api/project_index.py
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectIndexAPIView(APIView):
def get(self):
queryset = Project.query.order_by(Project.name.asc())
# queryset = Build.query.options(
# joinedload(Build.project),
# joinedload(Build.author),
# ).order_by(Build.date_created.desc(), Build.date_started.desc())
# if change:
# queryset = queryset.filter_by(change=change)
project_list = list(queryset)
context = {
'projects': project_list,
}
return self.respond(context)
def get_stream_channels(self):
return []
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectIndexAPIView(APIView):
def get(self):
queryset = Project.query.order_by(Project.name.asc())
project_list = list(queryset)
context = {
'projects': [],
}
for project in project_list:
data = self.serialize(project)
data['recentBuilds'] = list(Build.query.options(
joinedload(Build.project),
joinedload(Build.author),
).filter_by(
project=project,
).order_by(
Build.date_created.desc(),
)[:3])
context['projects'].append(data)
return self.respond(context)
def get_stream_channels(self):
return ['builds:*']
|
Add recentBuilds and stream to project index
|
Add recentBuilds and stream to project index
|
Python
|
apache-2.0
|
wfxiang08/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,dropbox/changes
|
4b56e0da85cec4aa89b8105c3a7ca416a2f7919e
|
wdim/client/blob.py
|
wdim/client/blob.py
|
import json
import hashlib
from wdim import orm
from wdim.orm import fields
from wdim.orm import exceptions
class Blob(orm.Storable):
HASH_METHOD = 'sha256'
_id = fields.StringField(unique=True)
data = fields.DictField()
@classmethod
async def create(cls, data):
sha = hashlib.new(cls.HASH_METHOD, json.dumps(data).encode('utf-8')).hexdigest()
try:
# Classmethod supers need arguments for some reason
return await super(Blob, cls).create(_id=sha, data=data)
except exceptions.UniqueViolation:
return await cls.load(sha)
@property
def hash(self):
return self._id
|
import json
import hashlib
from typing import Any, Dict
from wdim import orm
from wdim.orm import fields
from wdim.orm import exceptions
class Blob(orm.Storable):
HASH_METHOD = 'sha256'
_id = fields.StringField(unique=True)
data = fields.DictField()
@classmethod
async def create(cls, data: Dict[str, Any]) -> 'Blob':
sha = hashlib.new(cls.HASH_METHOD, json.dumps(data).encode('utf-8')).hexdigest()
try:
# Classmethod supers need arguments for some reason
return await super(Blob, cls).create(_id=sha, data=data)
except exceptions.UniqueViolation:
return await cls.load(sha)
@property
def hash(self) -> str:
return self._id
def __getitem__(self, key):
return self.data[key]
|
Allow Blob to be accessed with __getitem__
|
Allow Blob to be accessed with __getitem__
|
Python
|
mit
|
chrisseto/Still
|
a78445cfada5cc1f77a7887dc5241071bef69989
|
compass/tests/test_models.py
|
compass/tests/test_models.py
|
from django.test import TestCase
from compass.models import (Category,
Book)
class CategoryTestCase(TestCase):
def test_can_add_category(self,):
Category.create(title="Mock Category")
self.assertEqual(Category.find("Mock Category").count(), 1)
class BookTestCase(TestCase):
def test_can_add_book(self):
category = Category.create(title="Mock Category")
Book.create(title="Mock Book", category=category)
self.assertEqual(Book.find("Mock Book").count(), 1)
|
from django.test import TestCase
from compass.models import (Category,
Book, Compass)
class CategoryTestCase(TestCase):
def test_can_add_category(self,):
Category.create(title="Mock Category")
self.assertEqual(Category.find("Mock Category").count(), 1)
class BookTestCase(TestCase):
def test_can_add_book(self):
category = Category.create(title="Mock Category")
Book.create(title="Mock Book", category=category)
self.assertEqual(Book.find("Mock Book").count(), 1)
class CompassTestCase(TestCase):
def test_correct_title_if_not_title_and_category(self,):
heading = Compass.heading(title="", category="")
self.assertEqual(heading, "All books")
def test_correct_title_if_not_category(self,):
heading = Compass.heading(title="Title 1", category="")
self.assertEqual(heading, "All book titles like Title 1")
def test_correct_title_if_not_title(self,):
heading = Compass.heading(title="", category="Category 1")
self.assertEqual(heading, "All book titles under Category 1")
|
Test correct heading returned in search results
|
Test correct heading returned in search results
|
Python
|
mit
|
andela-osule/bookworm,andela-osule/bookworm
|
eaa2ef92eba11d44bf5159342e314b932d79f58d
|
fedora/__init__.py
|
fedora/__init__.py
|
# Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
# Needed for our unit tests
from fedora.wsgi.test import websetup
__all__ = ('_', 'release', '__version__',
'accounts', 'client', 'tg', 'websetup')
|
# Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
__all__ = ('_', 'release', '__version__',
'accounts', 'client', 'tg', 'websetup')
|
Undo the webtest import... it's causing runtime failiure and unittests are currently broken anyway.
|
Undo the webtest import... it's causing runtime failiure and unittests are
currently broken anyway.
|
Python
|
lgpl-2.1
|
fedora-infra/python-fedora
|
662287761b8549a86d3fb8c05ec37d47491da120
|
flatblocks/urls.py
|
flatblocks/urls.py
|
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
re_path("^edit/(?P<pk>\d+)/$", staff_member_required(edit), name="flatblocks-edit"),
]
|
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
re_path(
r"^edit/(?P<pk>\d+)/$",
staff_member_required(edit),
name="flatblocks-edit",
),
]
|
Use raw string notation for regular expression.
|
Use raw string notation for regular expression.
|
Python
|
bsd-3-clause
|
funkybob/django-flatblocks,funkybob/django-flatblocks
|
1cc6ec9f328d3ce045a4a1a50138b11c0b23cc3a
|
pyfr/ctypesutil.py
|
pyfr/ctypesutil.py
|
# -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'win32':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
|
# -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
# If an explicit override has been given then use it
lpath = os.environ.get('PYFR_{0}_LIBRARY_PATH'.format(name.upper()))
if lpath:
return ctypes.CDLL(lpath)
# Otherwise synthesise the library name and start searching
lname = platform_libname(name)
# Start with system search path
try:
return ctypes.CDLL(lname)
# ..and if this fails then run our own search
except OSError:
for sd in platform_libdirs():
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'win32':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
|
Enable library paths to be explicitly specified.
|
Enable library paths to be explicitly specified.
All shared libraries loaded through the load_library function
can bow be specified explicitly through a suitable environmental
variable
PYFR_<LIB>_LIBRARY_PATH=/path/to/lib.here
where <LIB> corresponds to the name of the library, e.g. METIS.
|
Python
|
bsd-3-clause
|
BrianVermeire/PyFR
|
8237291e194aa900857fe382d0b8cefb7806c331
|
ocradmin/ocrmodels/models.py
|
ocradmin/ocrmodels/models.py
|
from django.db import models
from django.contrib.auth.models import User
from tagging.fields import TagField
import tagging
# OCR model, erm, model
class OcrModel(models.Model):
"""
OCR model objects.
"""
user = models.ForeignKey(User)
derived_from = models.ForeignKey("self", null=True, blank=True)
tags = TagField()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
created_on = models.DateField(auto_now_add=True)
updated_on = models.DateField(null=True, blank=True)
public = models.BooleanField(default=True)
file = models.FileField(upload_to="models")
type = models.CharField(max_length=20,
choices=[("char", "Character"), ("lang", "Language")])
app = models.CharField(max_length=20,
choices=[("ocropus", "Ocropus"), ("tesseract", "Tesseract")])
def __unicode__(self):
"""
String representation.
"""
return self.name
|
from django.db import models
from django.contrib.auth.models import User
from tagging.fields import TagField
import tagging
# OCR model, erm, model
class OcrModel(models.Model):
"""
OCR model objects.
"""
user = models.ForeignKey(User)
derived_from = models.ForeignKey("self", null=True, blank=True)
tags = TagField()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
created_on = models.DateField(auto_now_add=True)
updated_on = models.DateField(null=True, blank=True)
public = models.BooleanField(default=True)
file = models.FileField(upload_to="models")
type = models.CharField(max_length=20,
choices=[("char", "Character"), ("lang", "Language")])
app = models.CharField(max_length=20,
choices=[("ocropus", "Ocropus"), ("tesseract", "Tesseract")])
def __unicode__(self):
"""
String representation.
"""
return "<%s: %s>" % (self.__class__.__name__, self.name)
|
Improve unicode method. Whitespace cleanup
|
Improve unicode method. Whitespace cleanup
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
7a99695c7612609de294a6905820fad3e41afc43
|
marketpulse/devices/models.py
|
marketpulse/devices/models.py
|
from django.db import models
class Device(models.Model):
"""Model for FfxOS devices data."""
model = models.CharField(max_length=120)
manufacturer = models.CharField(max_length=120)
def __unicode__(self):
return '{0}, {1}'.format(self.manufacturer, self.model)
|
from django.db import models
class Device(models.Model):
"""Model for FfxOS devices data."""
model = models.CharField(max_length=120)
manufacturer = models.CharField(max_length=120)
def __unicode__(self):
return '{0}, {1}'.format(self.manufacturer, self.model)
class Meta:
ordering = ['manufacturer', 'model']
|
Order devices by manufacturer and model.
|
Order devices by manufacturer and model.
|
Python
|
mpl-2.0
|
johngian/marketpulse,akatsoulas/marketpulse,johngian/marketpulse,mozilla/marketpulse,mozilla/marketpulse,johngian/marketpulse,akatsoulas/marketpulse,mozilla/marketpulse,johngian/marketpulse,akatsoulas/marketpulse,akatsoulas/marketpulse,mozilla/marketpulse
|
a760beb8d66222b456b160344eb0b4b7fccbf84a
|
Lib/test/test_linuxaudiodev.py
|
Lib/test/test_linuxaudiodev.py
|
from test_support import verbose, findfile, TestFailed
import linuxaudiodev
import errno
import os
def play_sound_file(path):
fp = open(path, 'r')
data = fp.read()
fp.close()
try:
a = linuxaudiodev.open('w')
except linuxaudiodev.error, msg:
if msg[0] in (errno.EACCES, errno.ENODEV):
raise ImportError, msg
raise TestFailed, msg
else:
a.write(data)
a.close()
def test():
play_sound_file(findfile('audiotest.au'))
test()
|
from test_support import verbose, findfile, TestFailed, TestSkipped
import linuxaudiodev
import errno
import os
def play_sound_file(path):
fp = open(path, 'r')
data = fp.read()
fp.close()
try:
a = linuxaudiodev.open('w')
except linuxaudiodev.error, msg:
if msg[0] in (errno.EACCES, errno.ENODEV):
raise TestSkipped, msg
raise TestFailed, msg
else:
a.write(data)
a.close()
def test():
play_sound_file(findfile('audiotest.au'))
test()
|
Raise TestSkipped, not ImportError. Honesty's the best policy.
|
Raise TestSkipped, not ImportError.
Honesty's the best policy.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
70db9410173183c83d80ca23e56ceb0d627fcbae
|
scripts/indices.py
|
scripts/indices.py
|
# Indices that need to be added manually:
#
# invoke shell --no-transaction
from pymongo import ASCENDING, DESCENDING
db['user'].create_index([
('emails', ASCENDING),
])
db['user'].create_index([
('external_accounts', ASCENDING),
])
db['user'].create_index([
('emails', ASCENDING),
('username', ASCENDING),
])
db['node'].create_index([
('is_deleted', ASCENDING),
('is_collection', ASCENDING),
('is_public', ASCENDING),
('institution_id', ASCENDING),
('is_registration', ASCENDING),
('contributors', ASCENDING),
])
db['node'].create_index([
('tags', ASCENDING),
('is_public', ASCENDING),
('is_deleted', ASCENDING),
('institution_id', ASCENDING),
])
|
# Indices that need to be added manually:
#
# invoke shell --no-transaction
from pymongo import ASCENDING, DESCENDING
db['storedfilenode'].create_index([
('tags', ASCENDING),
])
db['user'].create_index([
('emails', ASCENDING),
])
db['user'].create_index([
('external_accounts', ASCENDING),
])
db['user'].create_index([
('emails', ASCENDING),
('username', ASCENDING),
])
db['node'].create_index([
('is_deleted', ASCENDING),
('is_collection', ASCENDING),
('is_public', ASCENDING),
('institution_id', ASCENDING),
('is_registration', ASCENDING),
('contributors', ASCENDING),
])
db['node'].create_index([
('tags', ASCENDING),
('is_public', ASCENDING),
('is_deleted', ASCENDING),
('institution_id', ASCENDING),
])
|
Add index on file tags field
|
Add index on file tags field
|
Python
|
apache-2.0
|
baylee-d/osf.io,abought/osf.io,Johnetordoff/osf.io,felliott/osf.io,alexschiller/osf.io,mluo613/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,aaxelb/osf.io,wearpants/osf.io,hmoco/osf.io,mfraezz/osf.io,caseyrollins/osf.io,leb2dg/osf.io,emetsger/osf.io,caneruguz/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,wearpants/osf.io,laurenrevere/osf.io,abought/osf.io,acshi/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,felliott/osf.io,laurenrevere/osf.io,caneruguz/osf.io,mluke93/osf.io,chennan47/osf.io,SSJohns/osf.io,crcresearch/osf.io,SSJohns/osf.io,mluke93/osf.io,hmoco/osf.io,chrisseto/osf.io,kwierman/osf.io,samchrisinger/osf.io,mluo613/osf.io,emetsger/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,erinspace/osf.io,wearpants/osf.io,crcresearch/osf.io,chennan47/osf.io,caneruguz/osf.io,caneruguz/osf.io,rdhyee/osf.io,Nesiehr/osf.io,pattisdr/osf.io,cslzchen/osf.io,samchrisinger/osf.io,chennan47/osf.io,samchrisinger/osf.io,cslzchen/osf.io,Nesiehr/osf.io,aaxelb/osf.io,mluke93/osf.io,amyshi188/osf.io,cslzchen/osf.io,icereval/osf.io,cwisecarver/osf.io,mattclark/osf.io,abought/osf.io,HalcyonChimera/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,aaxelb/osf.io,alexschiller/osf.io,TomBaxter/osf.io,zamattiac/osf.io,leb2dg/osf.io,monikagrabowska/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,monikagrabowska/osf.io,mluo613/osf.io,binoculars/osf.io,emetsger/osf.io,adlius/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,zamattiac/osf.io,pattisdr/osf.io,mfraezz/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,rdhyee/osf.io,sloria/osf.io,saradbowman/osf.io,mattclark/osf.io,mfraezz/osf.io,mluo613/osf.io,chrisseto/osf.io,acshi/osf.io,abought/osf.io,hmoco/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,emetsger/osf.io,SSJohns/osf.io,cwisecarver/osf.io,caseyrollins/osf.io,jnayak1/osf.io,adlius/osf.io,acshi/osf.io,adlius/osf.io,felliott/osf.io,erinspace/osf.io,Johnetordoff/osf.io,mluo613/osf.io,binoculars/osf.io,hmoco/osf.io,caseyrollins/osf.io,mattclark/osf.io,monikagrabowska/osf.io,erinspace/osf.io,mfraezz/osf.io,jnayak1/osf.io,TomBaxter/osf.io,chrisseto/osf.io,Nesiehr/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,samchrisinger/osf.io,jnayak1/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,kwierman/osf.io,kwierman/osf.io,Nesiehr/osf.io,mluke93/osf.io,acshi/osf.io,kwierman/osf.io,adlius/osf.io,aaxelb/osf.io,cwisecarver/osf.io,rdhyee/osf.io,baylee-d/osf.io,acshi/osf.io,pattisdr/osf.io,alexschiller/osf.io,zamattiac/osf.io,brianjgeiger/osf.io,DanielSBrown/osf.io,binoculars/osf.io,zamattiac/osf.io,TomBaxter/osf.io,icereval/osf.io,sloria/osf.io,wearpants/osf.io,felliott/osf.io,icereval/osf.io,monikagrabowska/osf.io
|
ecbabd56f6afc4474402d3293bf11e3b6eb2e8f4
|
server/__init__.py
|
server/__init__.py
|
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import(
genRESTEndPointsForSlicerCLIsInSubDirs,
genRESTEndPointsForSlicerCLIsInDocker
)
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
# cliRootDir = os.path.dirname(__file__)
# genRESTEndPointsForSlicerCLIsInSubDirs(info, 'HistomicsTK', cliRootDir)
genRESTEndPointsForSlicerCLIsInDocker(info,
'HistomicsTK',
'dsarchive/histomicstk')
|
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import(
genRESTEndPointsForSlicerCLIsInSubDirs,
genRESTEndPointsForSlicerCLIsInDocker
)
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
# cliRootDir = os.path.dirname(__file__)
# genRESTEndPointsForSlicerCLIsInSubDirs(info, 'HistomicsTK', cliRootDir)
_ = genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk'
)
|
Switch to generating REST end points from docker image
|
Switch to generating REST end points from docker image
|
Python
|
apache-2.0
|
DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK
|
56dc9af410907780faba79699d274bef96a18675
|
functionaltests/common/base.py
|
functionaltests/common/base.py
|
"""
Copyright 2015 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import tempest_lib.base
from functionaltests.common.config import read_config
class BaseDesignateTest(tempest_lib.base.BaseTestCase):
def __init__(self, *args, **kwargs):
super(BaseDesignateTest, self).__init__(*args, **kwargs)
@classmethod
def setUpClass(cls):
super(BaseDesignateTest, cls).setUpClass()
read_config()
|
"""
Copyright 2015 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import tempest_lib.base
from functionaltests.common.config import read_config
class BaseDesignateTest(tempest_lib.base.BaseTestCase):
@classmethod
def setUpClass(cls):
super(BaseDesignateTest, cls).setUpClass()
read_config()
|
Remove unnecessary __init__ from functionaltests
|
Remove unnecessary __init__ from functionaltests
The __init__ just passes the same arguments, so it is not necessary
to implement it. This patch removes it for the cleanup.
Change-Id: Ib465356c47d06bfc66bef69126b089be24d19474
|
Python
|
apache-2.0
|
openstack/designate,openstack/designate,openstack/designate
|
40ca8cde872704438fecd22ae98bc7db610de1f9
|
services/flickr.py
|
services/flickr.py
|
import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
# (None, 'access only your public photos'),
# ('read', 'access your public and private photos'),
# ('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
https = False
def get_authorize_params(self, redirect_uri):
params = super(Flickr, self).get_authorize_params(redirect_uri)
params['perms'] = self.available_permissions[0][0]
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json[u'person'][u'nsid']
|
import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access only your public photos'),
('read', 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
https = False
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['perms'] = scopes[0]
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json[u'person'][u'nsid']
|
Rewrite Flickr to use the new scope selection system
|
Rewrite Flickr to use the new scope selection system
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
|
267b0634546c55ebb42d6b1b9c3deca9d7408cc2
|
run_tests.py
|
run_tests.py
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules"""
def main(sdk_path, test_path):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover(test_path)
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) != 2:
print 'Error: Exactly 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = "tests"
main(SDK_PATH, TEST_PATH)
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_path):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover(test_path)
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) != 1:
print 'Error: Exactly 1 argument required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = "tests"
main(SDK_PATH, TEST_PATH)
|
Fix test runner to accept 1 arg
|
Fix test runner to accept 1 arg
|
Python
|
mit
|
the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,bvisness/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,1fish2/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,tsteward/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance
|
def9d7037a3c629f63e1a0d8c1721501abc110cd
|
linguee_api/downloaders/httpx_downloader.py
|
linguee_api/downloaders/httpx_downloader.py
|
import httpx
from linguee_api.downloaders.interfaces import DownloaderError, IDownloader
class HTTPXDownloader(IDownloader):
"""
Real downloader.
Sends request to linguee.com to read the page.
"""
async def download(self, url: str) -> str:
async with httpx.AsyncClient() as client:
try:
response = await client.get(url)
except httpx.ConnectError as e:
raise DownloaderError(str(e)) from e
if response.status_code != 200:
raise DownloaderError(
f"The Linguee server returned {response.status_code}"
)
return response.text
|
import httpx
from linguee_api.downloaders.interfaces import DownloaderError, IDownloader
ERROR_503 = (
"The Linguee server returned 503. The API proxy was temporarily blocked by "
"Linguee. For more details, see https://github.com/imankulov/linguee-api#"
"the-api-server-returns-the-linguee-server-returned-503"
)
class HTTPXDownloader(IDownloader):
"""
Real downloader.
Sends request to linguee.com to read the page.
"""
async def download(self, url: str) -> str:
async with httpx.AsyncClient() as client:
try:
response = await client.get(url)
except httpx.ConnectError as e:
raise DownloaderError(str(e)) from e
if response.status_code == 503:
raise DownloaderError(ERROR_503)
if response.status_code != 200:
raise DownloaderError(
f"The Linguee server returned {response.status_code}"
)
return response.text
|
Update the 503 error message.
|
Update the 503 error message.
|
Python
|
mit
|
imankulov/linguee-api
|
ffa00eaea02cda8258bf42d4fa733fb8693e2f0c
|
chemtrails/apps.py
|
chemtrails/apps.py
|
# -*- coding: utf-8 -*-
from django.apps import AppConfig
from django.conf import settings
from django.db.models.signals import m2m_changed, post_migrate, post_save, pre_delete
from neomodel import config
config.AUTO_INSTALL_LABELS = False
class ChemTrailsConfig(AppConfig):
name = 'chemtrails'
def ready(self):
from .signals.handlers import (
m2m_changed_handler, post_migrate_handler,
post_save_handler, pre_delete_handler
)
m2m_changed.connect(receiver=m2m_changed_handler,
dispatch_uid='chemtrails.signals.handlers.m2m_changed_handler')
post_save.connect(receiver=post_save_handler,
dispatch_uid='chemtrails.signals.handlers.post_save_handler')
pre_delete.connect(receiver=pre_delete_handler,
dispatch_uid='chemtrails.signals.handlers.pre_delete_handler')
post_migrate.connect(receiver=post_migrate_handler,
dispatch_uid='neomodel.core.install_all_labels')
# Neo4j config
config.DATABASE_URL = getattr(settings, 'NEOMODEL_NEO4J_BOLT_URL', config.DATABASE_URL)
config.FORCE_TIMEZONE = getattr(settings, 'NEOMODEL_FORCE_TIMEZONE', False)
|
# -*- coding: utf-8 -*-
import os
from django.apps import AppConfig
from django.conf import settings
from django.db.models.signals import m2m_changed, post_migrate, post_save, pre_delete
from neomodel import config
config.AUTO_INSTALL_LABELS = False
class ChemTrailsConfig(AppConfig):
name = 'chemtrails'
def ready(self):
from .signals.handlers import (
m2m_changed_handler, post_migrate_handler,
post_save_handler, pre_delete_handler
)
m2m_changed.connect(receiver=m2m_changed_handler,
dispatch_uid='chemtrails.signals.handlers.m2m_changed_handler')
post_save.connect(receiver=post_save_handler,
dispatch_uid='chemtrails.signals.handlers.post_save_handler')
pre_delete.connect(receiver=pre_delete_handler,
dispatch_uid='chemtrails.signals.handlers.pre_delete_handler')
post_migrate.connect(receiver=post_migrate_handler,
dispatch_uid='neomodel.core.install_all_labels')
# Neo4j config
config.DATABASE_URL = getattr(settings, 'NEOMODEL_NEO4J_BOLT_URL',
os.environ.get('NEOMODEL_NEO4J_BOLT_URL', config.DATABASE_URL))
config.FORCE_TIMEZONE = getattr(settings, 'NEOMODEL_FORCE_TIMEZONE',
os.environ.get('NEOMODEL_FORCE_TIMEZONE', False))
|
Read Neo4j config from ENV if present
|
Read Neo4j config from ENV if present
|
Python
|
mit
|
inonit/django-chemtrails,inonit/django-chemtrails,inonit/django-chemtrails
|
7a688f0712ff323668955a21ea335f3308fcc840
|
wurstmineberg.45s.py
|
wurstmineberg.45s.py
|
#!/usr/local/bin/python3
import requests
people = requests.get('https://api.wurstmineberg.de/v2/people.json').json()
status = requests.get('https://api.wurstmineberg.de/v2/world/wurstmineberg/status.json').json()
print(len(status['list']))
print('---')
print('Version: {}|color=gray'.format(status['version']))
for wmb_id in status['list']:
display_name = people['people'].get(wmb_id, {}).get('name', wmb_id)
print('{}|href=https://wurstmineberg.de/people/{} color=#2889be'.format(display_name, wmb_id))
|
#!/usr/local/bin/python3
import requests
people = requests.get('https://api.wurstmineberg.de/v2/people.json').json()
status = requests.get('https://api.wurstmineberg.de/v2/world/wurstmineberg/status.json').json()
print(len(status['list']))
print('---')
print('Version: {}|color=gray'.format(status['version']))
for wmb_id in status['list']:
display_name = people['people'].get(wmb_id, {}).get('name', wmb_id)
print('{}|href=https://wurstmineberg.de/people/{} color=#2889be'.format(display_name, wmb_id))
print('---')
print('Start Minecraft | bash=/usr/bin/open param1=-a param2=Minecraft terminal=false')
|
Add “Start Minecraft” menu item
|
Add “Start Minecraft” menu item
From https://github.com/matryer/bitbar-plugins/blob/master/Games/minecraftplayers.1m.py
|
Python
|
mit
|
wurstmineberg/bitbar-server-status
|
d4db750d2ff2e18c9fced49fffe7a3073880078b
|
InvenTree/common/apps.py
|
InvenTree/common/apps.py
|
# -*- coding: utf-8 -*-
from django.apps import AppConfig
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
pass
|
# -*- coding: utf-8 -*-
import logging
from django.apps import AppConfig
logger = logging.getLogger('inventree')
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.clear_restart_flag()
def clear_restart_flag(self):
"""
Clear the SERVER_RESTART_REQUIRED setting
"""
try:
import common.models
if common.models.InvenTreeSetting.get_setting('SERVER_RESTART_REQUIRED'):
logger.info("Clearing SERVER_RESTART_REQUIRED flag")
common.models.InvenTreeSetting.set_setting('SERVER_RESTART_REQUIRED', False, None)
except:
pass
|
Clear the SERVER_RESTART_REQUIRED flag automatically when the server reloads
|
Clear the SERVER_RESTART_REQUIRED flag automatically when the server reloads
|
Python
|
mit
|
SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree
|
ae918211a85654d7eaa848cbd09f717d0339f844
|
database_email_backend/backend.py
|
database_email_backend/backend.py
|
#-*- coding: utf-8 -*-
from email.MIMEBase import MIMEBase
from django.core.mail.backends.base import BaseEmailBackend
from database_email_backend.models import Email, Attachment
class DatabaseEmailBackend(BaseEmailBackend):
def send_messages(self, email_messages):
if not email_messages:
return
for message in email_messages:
email = Email.objects.create(
from_email = message.from_email,
to_emails = ', '.join(message.to),
cc_emails = ', '.join(message.cc),
bcc_emails = ', '.join(message.bcc),
all_recipients = ', '.join(message.recipients()),
subject = message.subject,
body = message.body,
raw = message.message().as_string()
)
for attachment in message.attachments:
if isinstance(attachment, tuple):
filename, content, mimetype = attachment
elif isinstance(attachment, MIMEBase):
filename = attachment.get_filename()
content = attachment.get_payload(decode=True)
mimetype = None
else:
continue
Attachment.objects.create(
email=email,
filename=filename,
content=content,
mimetype=mimetype
)
|
#-*- coding: utf-8 -*-
from email.MIMEBase import MIMEBase
from django.core.mail.backends.base import BaseEmailBackend
from database_email_backend.models import Email, Attachment
class DatabaseEmailBackend(BaseEmailBackend):
def send_messages(self, email_messages):
if not email_messages:
return
for message in email_messages:
email = Email.objects.create(
from_email = u'%s' % message.from_email,
to_emails = u', '.join(message.to),
cc_emails = u', '.join(message.cc),
bcc_emails = u', '.join(message.bcc),
all_recipients = u', '.join(message.recipients()),
subject = u'%s' % message.subject,
body = u'%s' % message.body,
raw = u'%s' % message.message().as_string()
)
for attachment in message.attachments:
if isinstance(attachment, tuple):
filename, content, mimetype = attachment
elif isinstance(attachment, MIMEBase):
filename = attachment.get_filename()
content = attachment.get_payload(decode=True)
mimetype = None
else:
continue
Attachment.objects.create(
email=email,
filename=filename,
content=content,
mimetype=mimetype
)
|
Convert everything to unicode strings before inserting to DB
|
Convert everything to unicode strings before inserting to DB
|
Python
|
mit
|
machtfit/django-database-email-backend,machtfit/django-database-email-backend,jbinary/django-database-email-backend,stefanfoulis/django-database-email-backend,jbinary/django-database-email-backend
|
b4c97d3b7b914c193c018a1d808f0815778996b4
|
keystone/common/sql/data_migration_repo/versions/002_password_created_at_not_nullable.py
|
keystone/common/sql/data_migration_repo/versions/002_password_created_at_not_nullable.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# A null initial migration to open this repo. Do not re-use replace this with
# a real migration, add additional ones in subsequent version scripts.
def upgrade(migrate_engine):
pass
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def upgrade(migrate_engine):
pass
|
Remove comment from previous migration
|
Remove comment from previous migration
The migration was using a comment from the first one.
Change-Id: I25dc9ca79f30f156bfc4296c44e141991119635e
|
Python
|
apache-2.0
|
ilay09/keystone,rajalokan/keystone,mahak/keystone,openstack/keystone,ilay09/keystone,openstack/keystone,mahak/keystone,mahak/keystone,openstack/keystone,rajalokan/keystone,rajalokan/keystone,ilay09/keystone
|
cd0b6af73dd49b4da851a75232b5829b91b9030c
|
genome_designer/conf/demo_settings.py
|
genome_designer/conf/demo_settings.py
|
"""
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
'main.demo_view_overrides.login_demo_account',
'django.contrib.auth.views.logout'
]
|
"""
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.refresh_materialized_variant_table',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
'main.demo_view_overrides.login_demo_account',
'django.contrib.auth.views.logout'
]
|
Allow refresh materialized view in DEMO_MODE.
|
Allow refresh materialized view in DEMO_MODE.
|
Python
|
mit
|
woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,churchlab/millstone,woodymit/millstone_accidental_source,woodymit/millstone,churchlab/millstone,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone,churchlab/millstone,churchlab/millstone
|
e073e020d46953e15f0fb30d2947028c42261fc1
|
cropimg/widgets.py
|
cropimg/widgets.py
|
from django.forms.widgets import Input, ClearableFileInput
from django.template.loader import render_to_string
class CIImgWidget(ClearableFileInput):
def render(self, name, value, attrs=None):
try:
attrs["data-value"] = getattr(value, "url", "")
except ValueError: # attribute has no file associated with it.
attrs["data-value"] = ""
return super(CIImgWidget, self).render(name, value, attrs)
class CIThumbnailWidget(Input):
input_type = "text"
def render(self, name, value, attrs=None, renderer=None):
if attrs:
attrs.update(self.attrs)
attrs["type"] = "hidden"
input_field = super(CIThumbnailWidget, self).render(name, value, attrs)
return render_to_string("cropimg/cropimg_widget.html",
{
"name": name, "value": value, "attrs": attrs,
"input_field": input_field
})
class Media:
js = ("cropimg/js/jquery_init.js", "cropimg/js/cropimg.jquery.js",
"cropimg/js/cropimg_init.js")
css = {"all": ["cropimg/resource/cropimg.css"]}
|
from django.forms.widgets import Input, ClearableFileInput
from django.template.loader import render_to_string
class CIImgWidget(ClearableFileInput):
def render(self, name, value, attrs=None, renderer=None, **kwargs):
try:
attrs["data-value"] = getattr(value, "url", "")
except ValueError: # attribute has no file associated with it.
attrs["data-value"] = ""
return super(CIImgWidget, self).render(name, value, attrs)
class CIThumbnailWidget(Input):
input_type = "text"
def render(self, name, value, attrs=None, renderer=None, **kwargs):
if attrs:
attrs.update(self.attrs)
attrs["type"] = "hidden"
input_field = super(CIThumbnailWidget, self).render(name, value, attrs)
return render_to_string("cropimg/cropimg_widget.html",
{
"name": name, "value": value, "attrs": attrs,
"input_field": input_field
})
class Media:
js = ("cropimg/js/jquery_init.js", "cropimg/js/cropimg.jquery.js",
"cropimg/js/cropimg_init.js")
css = {"all": ["cropimg/resource/cropimg.css"]}
|
Make sure that the admin widget also supports Django 2
|
Make sure that the admin widget also supports Django 2
|
Python
|
mit
|
rewardz/cropimg-django,rewardz/cropimg-django,rewardz/cropimg-django
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.