commit stringlengths 40 40 | old_file stringlengths 4 236 | new_file stringlengths 4 236 | old_contents stringlengths 1 3.26k | new_contents stringlengths 16 4.43k | subject stringlengths 16 624 | message stringlengths 17 3.29k | lang stringclasses 5
values | license stringclasses 13
values | repos stringlengths 5 91.5k |
|---|---|---|---|---|---|---|---|---|---|
c23553f48652ed3ed65e473c79732dddc6c5341b | sample_code.py | sample_code.py | @commands.command
async def my_cmd():
await client.say('hi') | import discord
import asyncio
client = discord.Client()
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run('token')
| Set sample code to discord.py basic example | Set sample code to discord.py basic example
| Python | mit | TheTrain2000/async2rewrite |
d63302f10bf9972680c189a25f995b713e72562f | demo/apps/catalogue/models.py | demo/apps/catalogue/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('name', classname='full'),
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
from oscar.apps.catalogue.models import * # noqa
| from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
def save(self, *args, **kwargs):
self.name = self.title
super(Category, self).save(*args, **kwargs)
from oscar.apps.catalogue.models import * # noqa
| Set name field on save | Set name field on save
| Python | mit | pgovers/oscar-wagtail-demo,pgovers/oscar-wagtail-demo |
50b6778ae43b8945b2073630e351ab759b007a3e | tests/social/youtube/test_tasks.py | tests/social/youtube/test_tasks.py | # -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='revyver')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='revyver')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
| # -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='iceymoon')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='iceymoon')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
| Switch to Jen's channel to (hopefully) make these tests faster. | Switch to Jen's channel to (hopefully) make these tests faster.
| Python | apache-2.0 | hello-base/web,hello-base/web,hello-base/web,hello-base/web |
6666351757c2c2083a88158a132f446112109b9d | tests/test_redshift/test_server.py | tests/test_redshift/test_server.py | from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
json_data = json.loads(res.data.decode("utf-8"))
clusters = json_data['DescribeClustersResponse'][
'DescribeClustersResult']['Clusters']
list(clusters).should.equal([])
| from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
result = res.data.decode("utf-8")
result.should.contain("<DescribeClustersResponse><DescribeClustersResult><Clusters></Clusters></DescribeClustersResult")
| Fix redshift server to default to xml. | Fix redshift server to default to xml.
| Python | apache-2.0 | heddle317/moto,kefo/moto,botify-labs/moto,kefo/moto,Affirm/moto,ZuluPro/moto,Affirm/moto,Brett55/moto,2rs2ts/moto,dbfr3qs/moto,dbfr3qs/moto,okomestudio/moto,heddle317/moto,heddle317/moto,ZuluPro/moto,okomestudio/moto,botify-labs/moto,whummer/moto,william-richard/moto,gjtempleton/moto,dbfr3qs/moto,heddle317/moto,rocky4570/moto,gjtempleton/moto,gjtempleton/moto,okomestudio/moto,2rs2ts/moto,whummer/moto,spulec/moto,ZuluPro/moto,rocky4570/moto,rocky4570/moto,rocky4570/moto,kefo/moto,william-richard/moto,botify-labs/moto,kefo/moto,william-richard/moto,okomestudio/moto,Brett55/moto,Brett55/moto,ZuluPro/moto,gjtempleton/moto,william-richard/moto,botify-labs/moto,2rs2ts/moto,Brett55/moto,spulec/moto,okomestudio/moto,whummer/moto,dbfr3qs/moto,Brett55/moto,botify-labs/moto,rocky4570/moto,2rs2ts/moto,Affirm/moto,ZuluPro/moto,rocky4570/moto,whummer/moto,heddle317/moto,ZuluPro/moto,Affirm/moto,kefo/moto,spulec/moto,Affirm/moto,spulec/moto,william-richard/moto,botify-labs/moto,dbfr3qs/moto,william-richard/moto,dbfr3qs/moto,2rs2ts/moto,Brett55/moto,spulec/moto,Affirm/moto,gjtempleton/moto,spulec/moto,okomestudio/moto,whummer/moto,whummer/moto |
f7059eb02ee93bdd0f998acde385a04ac91c63df | sparrow.py | sparrow.py | #!/usr/bin/env python
from ConfigParser import SafeConfigParser
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
config_file_name = 'config.ini'
#SECURE YOUR CONFIG FILE - Don't put it in source code
parser = SafeConfigParser()
parser.read(config_file_name)
API_KEY = parser.get(config_file_name,'API_KEY') #AKA 'Consumer Key'
API_SECRET = parser.get(config_file_name,'API_SECRET') #AKA 'Consumer Secret'
ACCESS_TOKEN = parser.get(config_file_name,'ACCESS_TOKEN') #AKA 'OAUTH Token'
ACCESS_TOKEN_SECRET = parser.get(config_file_name,'ACCESS_TOKEN_SECRET') #AKA 'OAUTH Token Secret'
twitter = Twython(API_KEY, API_SECRET,
ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
| #!/usr/bin/env python
import json
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
with open('creds.json') as f:
credentials = json.loads(f.read())
#SECURE YOUR CONFIG FILE - Don't put it in source code
twitter = Twython(credentials["consumer_key"],
credentials["consumer_secret"],
credentials["access_token_key"],
credentials["access_token_secret"])
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
| Update method of loading creds | Update method of loading creds
Changed the way of loading credentials from config.ini file to a json credentials file. | Python | mit | fmcorey/sparrow,fmcorey/sparrow |
c5d656cff3e7ac218cc41805dfb8c19f63cd4250 | run_server.py | run_server.py | #!/usr/bin/env python3
from shorter.web import app
if __name__ == "__main__":
app.run()
| #!/usr/bin/env python3
from shorter.database import (
User,
db_session,
)
from shorter.web import app
if __name__ == "__main__":
# makes testing easier
test_user_created = db_session.query(User).filter_by(
username='jimmy').one_or_none()
if not test_user_created:
db_session.add(
User(username='jimmy', password='secret'))
db_session.commit()
app.run()
| Create a testing user on starting the server | Create a testing user on starting the server
| Python | agpl-3.0 | mapleoin/shorter |
108c696d032462ac3cdc00e45ead09136e80634a | tests/foomodulegen-auto.py | tests/foomodulegen-auto.py | #! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
if __name__ == '__main__':
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
| #! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
def main():
if sys.argv[1] == '-d':
del sys.argv[1]
import pdb
pdb.set_trace()
my_module_gen()
else:
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
if __name__ == '__main__':
main()
| Add a debug switch (-d) to enable debugger | Add a debug switch (-d) to enable debugger | Python | lgpl-2.1 | gjcarneiro/pybindgen,cawka/pybindgen-old,ftalbrecht/pybindgen,gjcarneiro/pybindgen,cawka/pybindgen-old,ftalbrecht/pybindgen,gjcarneiro/pybindgen,ftalbrecht/pybindgen,cawka/pybindgen-old,gjcarneiro/pybindgen,ftalbrecht/pybindgen,cawka/pybindgen-old |
4387a8a38664abe86f0ff9d531ab3ba937f9adf7 | tests/unit/test_main_views.py | tests/unit/test_main_views.py | import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def setup(self):
self.patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
def test_page_load(self, db_session):
db_session.add(self.patient1)
db_session.commit()
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
| import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def test_setup(self, db_session):
patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
user = User()
consultant = User()
meeting = Meeting()
db_session.add(patient1)
db_session.commit()
def test_page_load(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
def test_kept_in_db(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
assert req_pass.status_code == 200
| Add Unit tests for views | Add Unit tests for views
| Python | mit | stefpiatek/mdt-flask-app,stefpiatek/mdt-flask-app |
a7a1d513003a65c5c9772ba75631247decff444d | utils/utils.py | utils/utils.py | from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
current_site = get_current_site(request)
return add_domain(current_site.domain, path, request.is_secure())
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
| from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
"""Retrieve current site site
Always returns as http (never https)
"""
current_site = get_current_site(request)
site_url = add_domain(current_site.domain, path, request.is_secure())
return site_url.replace('https', 'http')
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
| Make site url be http, not https | Make site url be http, not https
| Python | bsd-3-clause | uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam |
6d635a94121a9038c7c5b80b9851a086e69728b6 | scripts/wiggle_to_binned_array.py | scripts/wiggle_to_binned_array.py | #!/usr/bin/env python
"""
usage: %prog score_file out_file
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def read_scores( f ):
scores_by_chrom = dict()
return scores_by_chrom
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
| #!/usr/bin/env python
"""
usage: %prog score_file out_file
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
comp_type = None
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
| Allow specifying compression type on command line. | Allow specifying compression type on command line.
| Python | mit | uhjish/bx-python,uhjish/bx-python,uhjish/bx-python |
dc981dbd1b29d9586453f325f99b1c413c494800 | account/managers.py | account/managers.py | from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address = self.create(user=user, email=email, **kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
| from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address, __ = self.get_or_create(user=user, email=email, default=kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
| Use get_or_create instead of just create | Use get_or_create instead of just create
| Python | mit | gem/geonode-user-accounts,gem/geonode-user-accounts,gem/geonode-user-accounts |
80df385acb9f39d0a5f01dc41954b7035ecafb2d | upnp_inspector/__init__.py | upnp_inspector/__init__.py | # -*- coding: utf-8 -*-
__version_info__ = (0, 2, 3)
__version__ = '%d.%d.%d' % __version_info__[:3]
| # -*- coding: utf-8 -*-
__version__ = "0.3.dev0"
| Switch to PEP 440 compliant version string and update to 0.3.dev0. | Switch to PEP 440 compliant version string and update to 0.3.dev0.
Update to 0.3.dev0 since this is the version already stated in the
NEWS file.
| Python | mit | coherence-project/UPnP-Inspector |
16c1352ecf8583615e482c431ec5183fdb718f67 | split_file.py | split_file.py | from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use."""
return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))
| from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def merge_quotations(statements):
"""If there are an odd number of "s in a statement, assume that we
broke the middle of a string. We recombine that string."""
cur = None
for i in statements:
if i.count('"') % 2 != 0:
if cur is None:
cur = i
else:
yield (cur + ' ' + i)
cur = None
elif cur is None:
yield i
else:
cur += ' ' + i
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use.
We additionally merge lines inside of quotations."""
return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
| Make splitting more robust to periods in strings | Make splitting more robust to periods in strings
| Python | mit | JasonGross/coq-tools,JasonGross/coq-tools |
5f49fb8c7c0f9e7a05d4f9b730d7f3e872229d60 | test/completion/definition.py | test/completion/definition.py | """
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
| """
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
# Note: len('isinstance(') == 11
#? 11 isinstance
isinstance()
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None,)
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None, )
# Note: len('isinstance(None, ') == 17
##? 17 isinstance
isinstance(None, )
# Note: len('isinstance( ') == 12
##? 12 isinstance
isinstance( )
| Add blackbox tests using column number | Add blackbox tests using column number
| Python | mit | flurischt/jedi,WoLpH/jedi,mfussenegger/jedi,dwillmer/jedi,tjwei/jedi,jonashaag/jedi,jonashaag/jedi,tjwei/jedi,flurischt/jedi,dwillmer/jedi,mfussenegger/jedi,WoLpH/jedi |
192e24cdafff2bb780ef9cc87853c48e9e41cb4a | stationspinner/accounting/management/commands/characters.py | stationspinner/accounting/management/commands/characters.py | from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('CharacterID\t\t {0} APIKey\t\t {1}'.format(char.pk, char.owner_key.pk)) | from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('{0}\t\tCharacterID\t\t {1} APIKey\t\t {2}'.format(char.name,
char.pk,
char.owner_key.pk)) | Simplify the output for copypaste | Simplify the output for copypaste
| Python | agpl-3.0 | kriberg/stationspinner,kriberg/stationspinner |
e0de6546fb58af113d18cf7e836407e3f8a1a985 | contrib/bosco/bosco-cluster-remote-hosts.py | contrib/bosco/bosco-cluster-remote-hosts.py | #!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
import htcondor
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
| #!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
| Delete unused import htcondor (SOFTWARE-4687) | Delete unused import htcondor (SOFTWARE-4687)
| Python | apache-2.0 | brianhlin/htcondor-ce,matyasselmeci/htcondor-ce,matyasselmeci/htcondor-ce,brianhlin/htcondor-ce,matyasselmeci/htcondor-ce,brianhlin/htcondor-ce |
ded6f27721e54f2c7dab3016209927678d85b90d | aldryn_faq/forms.py | aldryn_faq/forms.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
def clean_slug(self):
slug = self.cleaned_data['slug']
translations_model = Category._meta.translations_model
categories_with_slug = translations_model.objects.filter(slug=slug)
if self.instance.pk:
# Make sure to exclude references from this master :)
categories_with_slug = categories_with_slug.exclude(
master_id=self.instance.pk)
if categories_with_slug.exists():
raise forms.ValidationError(
'A category with this slug already exists.')
return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
# def clean_slug(self):
# slug = self.cleaned_data['slug']
# translations_model = Category._meta.translations_model
# categories_with_slug = translations_model.objects.filter(slug=slug)
# if self.instance.pk:
# # Make sure to exclude references from this master :)
# categories_with_slug = categories_with_slug.exclude(
# master_id=self.instance.pk)
# if categories_with_slug.exists():
# raise forms.ValidationError(
# 'A category with this slug already exists.')
# return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
| Remove no longer used code | Remove no longer used code
| Python | bsd-3-clause | czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq |
db134f36ae54ef135037d0c912068fc678df54cf | examples/controllers.py | examples/controllers.py | #!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1' )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
| #!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1', port=6633 )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
| Allow RemoteController to connect to correct port. | Allow RemoteController to connect to correct port.
Fixes #584
| Python | bsd-3-clause | mininet/mininet,mininet/mininet,mininet/mininet |
d9db4735a1c879e967af5fff30c8322ea3f5121a | hackfmi/urls.py | hackfmi/urls.py | from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
| from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^search/(?P<name>\w+)/$', 'members.views.search', name='search'),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
| Add url for searching user by name | Add url for searching user by name
| Python | mit | Hackfmi/Diaphanum,Hackfmi/Diaphanum |
9dab9c08c57ab0548beaa32765a2f064d2ec6544 | tests/app/test_application.py | tests/app/test_application.py | """
Tests for the application infrastructure
"""
from flask import json
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
| """
Tests for the application infrastructure
"""
import mock
import pytest
from flask import json
from elasticsearch.exceptions import ConnectionError
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
@mock.patch('elasticsearch.transport.Urllib3HttpConnection.perform_request', side_effect=ConnectionError(500))
def test_elastic_search_client_performs_retries_on_connection_error(self, perform_request):
with pytest.raises(ConnectionError):
self.client.get('/')
# FlaskElasticsearch attaches the es client to the context in flask_elasticsearch.py
from flask import _app_ctx_stack
assert perform_request.call_count == 1 + _app_ctx_stack.top.elasticsearch.transport.max_retries
assert perform_request.call_count == 1 + 3
| Add a test to indicate/ ensure that flask is performing retries … | Add a test to indicate/ ensure that flask is performing retries …
The retry functionality is buried in the elasticsearch.transport.Transport class and
can be effected by passing max_retries to the elasticsearch.client.ElasticSearch object
(https://github.com/elastic/elasticsearch-py/blob/master/elasticsearch/client/__init__.py#L184)
or the flask_elasticsearch.FlaskElasticsearch.init_app method. This test is an attempt
to surface this behaviour.
| Python | mit | alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api |
a2b4b732c15c3cfefb345354bca8fc6de47d4820 | appengine_config.py | appengine_config.py | """`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib') | """`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')
import os
# Called only if the current namespace is not set.
def namespace_manager_default_namespace_for_request():
# The returned string will be used as the Google Apps domain.
applicationVersion="Default"
if "CURRENT_VERSION_ID" in os.environ:
applicationVersion = os.environ["CURRENT_VERSION_ID"].split('.')[0]
return applicationVersion
| Enable NDB Shared memory namespace partioning using engine Version ID | Enable NDB Shared memory namespace partioning using engine Version ID
| Python | apache-2.0 | dbs/schemaorg,vholland/schemaorg,schemaorg/schemaorg,vholland/schemaorg,tfrancart/schemaorg,schemaorg/schemaorg,unor/schemaorg,schemaorg/schemaorg,dbs/schemaorg,vholland/schemaorg,dbs/schemaorg,tfrancart/schemaorg,tfrancart/schemaorg,vholland/schemaorg,schemaorg/schemaorg,dbs/schemaorg,tfrancart/schemaorg,unor/schemaorg,schemaorg/schemaorg,unor/schemaorg |
ed6146566d57105af88855c6b8668b4f76e98dbf | xmanager/xm_local/__init__.py | xmanager/xm_local/__init__.py | # Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import Caip
from xmanager.xm_local.executors import Kubernetes
from xmanager.xm_local.executors import Local
from xmanager.xm_local.executors import TensorboardCapability
create_experiment = experiment.create_experiment
| # Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import *
create_experiment = experiment.create_experiment
| Make `DockerOptions` part of the `xm_local` module | Make `DockerOptions` part of the `xm_local` module
PiperOrigin-RevId: 376139511
Change-Id: Ia0ec1337b9ef2c175dea6b0c45e0a99b285d2b31
GitOrigin-RevId: 799d3ef6a98a6e4922b0b60c190c0d82cd538548
| Python | apache-2.0 | deepmind/xmanager,deepmind/xmanager |
55bc355fc97eb5e034e86e7c55919d8cca0edb2b | feincms/context_processors.py | feincms/context_processors.py | from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
def appcontent_parameters(request):
# Remove in FeinCMS 1.4.
return {}
| from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
| Remove deprecated appcontent_parameters context processor | Remove deprecated appcontent_parameters context processor
It did nothing for some time anyway.
| Python | bsd-3-clause | matthiask/feincms2-content,mjl/feincms,feincms/feincms,joshuajonah/feincms,matthiask/feincms2-content,matthiask/feincms2-content,joshuajonah/feincms,matthiask/django-content-editor,michaelkuty/feincms,pjdelport/feincms,nickburlett/feincms,michaelkuty/feincms,michaelkuty/feincms,feincms/feincms,feincms/feincms,matthiask/django-content-editor,mjl/feincms,nickburlett/feincms,pjdelport/feincms,nickburlett/feincms,nickburlett/feincms,joshuajonah/feincms,michaelkuty/feincms,matthiask/django-content-editor,mjl/feincms,joshuajonah/feincms,pjdelport/feincms,matthiask/django-content-editor |
3170407aaaeffbc76e31e5fc78d4dacd008e27d2 | backbone_calendar/ajax/mixins.py | backbone_calendar/ajax/mixins.py | from django import http
from django.utils import simplejson as json
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
| import json
from django import http
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
| Use json and not simplejson | Use json and not simplejson
| Python | agpl-3.0 | rezometz/django-backbone-calendar,rezometz/django-backbone-calendar,rezometz/django-backbone-calendar |
52fddb061bf5f282da75df4462dd735d9fdc041a | sgfs/actions/create_structure.py | sgfs/actions/create_structure.py | from sgfs import SGFS
from sgactions.utils import notify
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title='Preview Folders' if dry_run else 'Creating Folders',
message='\n'.join(commands) or 'Everything is up to date.',
)
| from sgfs import SGFS
from sgactions.utils import notify, progress
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
title='Preview Folders' if dry_run else 'Creating Folders'
progress(title=title, message='Running; please wait...')
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title=title,
message='\n'.join(commands) or 'Everything is up to date.',
)
| Use new sgactions progress dialog | Use new sgactions progress dialog
| Python | bsd-3-clause | westernx/sgfs,westernx/sgfs |
3b15911c669d072bee1a171696636162d23bd07e | spec/openpassword/config_spec.py | spec/openpassword/config_spec.py | from nose.tools import assert_equals
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
assert_equals(cfg.get_path(), "path/to/keychain")
| from nose.tools import *
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
eq_(cfg.get_path(), "path/to/keychain")
| Update config test to use eq_ matcher | Update config test to use eq_ matcher
| Python | mit | openpassword/blimey,openpassword/blimey |
c073131ac4b951affdac454824bb3eed913cd931 | huxley/api/tests/committee.py | huxley/api/tests/committee.py | import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
| # Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
| Add copyright header to CommitteeDetailGetTestCase. | Add copyright header to CommitteeDetailGetTestCase.
| Python | bsd-3-clause | bmun/huxley,ctmunwebmaster/huxley,nathanielparke/huxley,ctmunwebmaster/huxley,bmun/huxley,bmun/huxley,ctmunwebmaster/huxley,nathanielparke/huxley,bmun/huxley,nathanielparke/huxley,nathanielparke/huxley,ctmunwebmaster/huxley |
2a68505e36358900e045f74a8b2885486f6a302e | framework/guid/model.py | framework/guid/model.py | from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField(backref='guid')
_meta = {
'optimistic': True
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def __init__(self, *args, **kwargs):
"""Overridden constructor. When a GuidStoredObject is instantiated,
create a new Guid if the object doesn't already have one, then attach
the Guid to the StoredObject.
Note: This requires saving the StoredObject once and the Guid twice to
ensure correct back-references; this could be made more efficient if
modular-odm could handle back-references of objects that have not been
saved.
"""
# Call superclass constructor
super(GuidStoredObject, self).__init__(*args, **kwargs)
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
self.save()
# Add self to GUID
guid.referent = self
guid.save()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
| from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField()
_meta = {
'optimistic': True,
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def _ensure_guid(self):
"""Create GUID record if current record doesn't already have one, then
point GUID to self.
"""
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
guid.referent = (guid._primary_key, self._name)
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
def __init__(self, *args, **kwargs):
""" Ensure GUID after initialization. """
super(GuidStoredObject, self).__init__(*args, **kwargs)
self._ensure_guid()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
| Remove backref on GUID; factor out _ensure_guid | Remove backref on GUID; factor out _ensure_guid
| Python | apache-2.0 | arpitar/osf.io,rdhyee/osf.io,lyndsysimon/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,abought/osf.io,CenterForOpenScience/osf.io,asanfilippo7/osf.io,emetsger/osf.io,kwierman/osf.io,barbour-em/osf.io,icereval/osf.io,Johnetordoff/osf.io,jolene-esposito/osf.io,RomanZWang/osf.io,haoyuchen1992/osf.io,barbour-em/osf.io,binoculars/osf.io,acshi/osf.io,sbt9uc/osf.io,kushG/osf.io,mluo613/osf.io,KAsante95/osf.io,RomanZWang/osf.io,amyshi188/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,icereval/osf.io,dplorimer/osf,brandonPurvis/osf.io,samchrisinger/osf.io,hmoco/osf.io,ckc6cz/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,ticklemepierce/osf.io,DanielSBrown/osf.io,mluke93/osf.io,haoyuchen1992/osf.io,samanehsan/osf.io,TomBaxter/osf.io,arpitar/osf.io,GaryKriebel/osf.io,jmcarp/osf.io,alexschiller/osf.io,revanthkolli/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,mattclark/osf.io,hmoco/osf.io,GaryKriebel/osf.io,zkraime/osf.io,sbt9uc/osf.io,kch8qx/osf.io,mluo613/osf.io,himanshuo/osf.io,monikagrabowska/osf.io,reinaH/osf.io,TomHeatwole/osf.io,samchrisinger/osf.io,emetsger/osf.io,arpitar/osf.io,asanfilippo7/osf.io,lamdnhan/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,reinaH/osf.io,danielneis/osf.io,billyhunt/osf.io,jmcarp/osf.io,ckc6cz/osf.io,erinspace/osf.io,ticklemepierce/osf.io,GaryKriebel/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,brianjgeiger/osf.io,GageGaskins/osf.io,mattclark/osf.io,caseyrygt/osf.io,himanshuo/osf.io,emetsger/osf.io,kch8qx/osf.io,cosenal/osf.io,jinluyuan/osf.io,felliott/osf.io,HarryRybacki/osf.io,bdyetton/prettychart,emetsger/osf.io,barbour-em/osf.io,jolene-esposito/osf.io,revanthkolli/osf.io,MerlinZhang/osf.io,zkraime/osf.io,crcresearch/osf.io,doublebits/osf.io,chennan47/osf.io,GageGaskins/osf.io,pattisdr/osf.io,SSJohns/osf.io,njantrania/osf.io,dplorimer/osf,fabianvf/osf.io,caneruguz/osf.io,rdhyee/osf.io,hmoco/osf.io,mfraezz/osf.io,zkraime/osf.io,Ghalko/osf.io,kushG/osf.io,fabianvf/osf.io,samchrisinger/osf.io,AndrewSallans/osf.io,jinluyuan/osf.io,chennan47/osf.io,amyshi188/osf.io,jinluyuan/osf.io,leb2dg/osf.io,chrisseto/osf.io,danielneis/osf.io,acshi/osf.io,caseyrygt/osf.io,brianjgeiger/osf.io,ZobairAlijan/osf.io,jeffreyliu3230/osf.io,njantrania/osf.io,CenterForOpenScience/osf.io,lamdnhan/osf.io,bdyetton/prettychart,dplorimer/osf,mattclark/osf.io,caseyrollins/osf.io,mluke93/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,KAsante95/osf.io,doublebits/osf.io,jnayak1/osf.io,doublebits/osf.io,zachjanicki/osf.io,caneruguz/osf.io,adlius/osf.io,jolene-esposito/osf.io,cldershem/osf.io,felliott/osf.io,saradbowman/osf.io,Ghalko/osf.io,sbt9uc/osf.io,Nesiehr/osf.io,sloria/osf.io,ckc6cz/osf.io,binoculars/osf.io,cslzchen/osf.io,bdyetton/prettychart,cslzchen/osf.io,cosenal/osf.io,brandonPurvis/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,caseyrollins/osf.io,MerlinZhang/osf.io,fabianvf/osf.io,sloria/osf.io,DanielSBrown/osf.io,reinaH/osf.io,kwierman/osf.io,zamattiac/osf.io,zamattiac/osf.io,hmoco/osf.io,ckc6cz/osf.io,zamattiac/osf.io,danielneis/osf.io,alexschiller/osf.io,zkraime/osf.io,GaryKriebel/osf.io,erinspace/osf.io,mluo613/osf.io,alexschiller/osf.io,fabianvf/osf.io,aaxelb/osf.io,Nesiehr/osf.io,doublebits/osf.io,baylee-d/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,sbt9uc/osf.io,mluke93/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,jeffreyliu3230/osf.io,felliott/osf.io,acshi/osf.io,jnayak1/osf.io,lamdnhan/osf.io,jolene-esposito/osf.io,erinspace/osf.io,amyshi188/osf.io,crcresearch/osf.io,acshi/osf.io,mluo613/osf.io,samanehsan/osf.io,zachjanicki/osf.io,bdyetton/prettychart,brandonPurvis/osf.io,njantrania/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,abought/osf.io,MerlinZhang/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,kch8qx/osf.io,lamdnhan/osf.io,leb2dg/osf.io,revanthkolli/osf.io,zachjanicki/osf.io,cldershem/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,chrisseto/osf.io,njantrania/osf.io,saradbowman/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,cosenal/osf.io,HarryRybacki/osf.io,jeffreyliu3230/osf.io,revanthkolli/osf.io,jnayak1/osf.io,icereval/osf.io,cwisecarver/osf.io,cslzchen/osf.io,chennan47/osf.io,jnayak1/osf.io,kch8qx/osf.io,wearpants/osf.io,kwierman/osf.io,kushG/osf.io,jinluyuan/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,AndrewSallans/osf.io,danielneis/osf.io,petermalcolm/osf.io,petermalcolm/osf.io,wearpants/osf.io,cwisecarver/osf.io,HarryRybacki/osf.io,Ghalko/osf.io,dplorimer/osf,mfraezz/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,felliott/osf.io,Nesiehr/osf.io,acshi/osf.io,ticklemepierce/osf.io,mfraezz/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,jeffreyliu3230/osf.io,doublebits/osf.io,RomanZWang/osf.io,arpitar/osf.io,billyhunt/osf.io,barbour-em/osf.io,MerlinZhang/osf.io,himanshuo/osf.io,TomBaxter/osf.io,petermalcolm/osf.io,ZobairAlijan/osf.io,ticklemepierce/osf.io,caseyrollins/osf.io,leb2dg/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,GageGaskins/osf.io,KAsante95/osf.io,binoculars/osf.io,zamattiac/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,samanehsan/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,samanehsan/osf.io,KAsante95/osf.io,cosenal/osf.io,adlius/osf.io,baylee-d/osf.io,RomanZWang/osf.io,baylee-d/osf.io,SSJohns/osf.io,zachjanicki/osf.io,chrisseto/osf.io,sloria/osf.io,SSJohns/osf.io,TomHeatwole/osf.io,adlius/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,chrisseto/osf.io,jmcarp/osf.io,kushG/osf.io,cldershem/osf.io,TomHeatwole/osf.io,mluo613/osf.io,reinaH/osf.io,abought/osf.io,mluke93/osf.io,jmcarp/osf.io,cldershem/osf.io,crcresearch/osf.io,kwierman/osf.io,Ghalko/osf.io,himanshuo/osf.io,wearpants/osf.io,laurenrevere/osf.io,leb2dg/osf.io,HarryRybacki/osf.io,adlius/osf.io,pattisdr/osf.io,abought/osf.io,ZobairAlijan/osf.io |
21b022362a09c4e408b9375a38505975e8c7f965 | comet/utility/test/test_whitelist.py | comet/utility/test/test_whitelist.py | from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0))
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0))
)
| from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
None
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0)),
None
)
| Remove assertIsNone for Python 2.6 compatibility | Remove assertIsNone for Python 2.6 compatibility
| Python | bsd-2-clause | jdswinbank/Comet,jdswinbank/Comet |
40616138673205b3b4f3150a659ab02830b2bbc0 | tests/test_player_creation.py | tests/test_player_creation.py | from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
app.post('/players', params)
res = app.get('/players')
assert res.status_int == 200
assert res.content_type == 'application/json'
assert res.json == expected
| from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
post_response = app.post('/players', params)
assert post_response.status_int == 201
get_response = app.get('/players')
assert get_response.status_int == 200
assert get_response.content_type == 'application/json'
assert get_response.json == expected
| Update player creation test to verify POST status code. | Update player creation test to verify POST status code.
| Python | mit | dropshot/dropshot-server |
4921d58775faa65423fac321ef68f065b2499813 | experiments/hydrotrend-uq-1/plot_results.py | experiments/hydrotrend-uq-1/plot_results.py | #!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
tab_file = 'dakota.dat'
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-dace-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_stdev.png')
| #!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
from dakota_utils.convert import has_interface_column, strip_interface_column
tab_file = 'dakota.dat'
if has_interface_column(tab_file):
strip_interface_column(tab_file)
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-uq-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_stdev.png')
| Update script for Dakota 6.1 tabular output file | Update script for Dakota 6.1 tabular output file
| Python | mit | mcflugen/dakota-experiments,mdpiper/dakota-experiments,mdpiper/dakota-experiments,mdpiper/dakota-experiments,mcflugen/dakota-experiments |
a263926614a2f9c0c5c41d19282db79ac5e79e7e | gittip/orm/__init__.py | gittip/orm/__init__.py | from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback() | from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback() | Remove the convenience functions, reorganize around the SQLAlchemy class | Remove the convenience functions, reorganize around the SQLAlchemy class
| Python | mit | eXcomm/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,bountysource/www.gittip.com,MikeFair/www.gittip.com,eXcomm/gratipay.com,eXcomm/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,bountysource/www.gittip.com,bountysource/www.gittip.com,gratipay/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,MikeFair/www.gittip.com,mccolgst/www.gittip.com,MikeFair/www.gittip.com,studio666/gratipay.com,bountysource/www.gittip.com |
c8779edcb4078c799b7112625b5495f63a00e428 | l10n_ro_partner_unique/models/res_partner.py | l10n_ro_partner_unique/models/res_partner.py | # Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
| # Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id.id if self.company_id else False),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
| Add vat unique per comapny | Add vat unique per comapny
| Python | agpl-3.0 | OCA/l10n-romania,OCA/l10n-romania |
548cfea821bf1b0b92ce09c54405554d264b5395 | tests/integration/session/test_timeout.py | tests/integration/session/test_timeout.py | import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
| import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def tearDown(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 45 * 60
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 30
super().tearDown()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
| Fix CSRF missing errors that happen occasionally in tests | Fix CSRF missing errors that happen occasionally in tests
| Python | mit | ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner |
36c2e7449b7817a66b60eaff4c8518ae6d4f4a01 | categories/tests.py | categories/tests.py | from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
def test_category_list(self):
categories = Category.objects.all()
response_data = CategorySerializer(categories, many=True).data
url = reverse('categories:category_list')
response = self.client.get(url, format='json')
self.assertEqual(response.data, response_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
| from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
| Remove categoy_list test until urls will fixed. | Remove categoy_list test until urls will fixed.
| Python | apache-2.0 | belatrix/BackendAllStars |
40958981df401a898a39ddad45c2b48669a44ee7 | setup.py | setup.py | #!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=[
"parsimonious>=0.5,<0.6",
]
)
| #!/usr/bin/env python
import os
import sys
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
_install_requires = [
"parsimonious>=0.5,<0.6",
]
if sys.version_info[:2] <= (2, 6):
_install_requires.append("argparse==1.2.1")
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=_install_requires,
)
| Support CLI on Python 2.6 | Support CLI on Python 2.6
| Python | bsd-2-clause | mwilliamson/python-mammoth,JoshBarr/python-mammoth |
139d09ecd83694dd92d393b64d1d9b0ad05e9f4c | setup.py | setup.py | import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
| import distutils.core
# Uploading to PyPI
# =================
# The first time only:
# $ python setup.py register -r pypi
#
# Every version bump:
# $ git tag <version>; git push
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
| Add instructions for bumping the version. | Add instructions for bumping the version.
| Python | mit | kalekundert/nonstdlib,KenKundert/nonstdlib,kalekundert/nonstdlib,KenKundert/nonstdlib |
1f9bc1b6f9a796458d104c01b9a344cbb0c84a9b | Lib/fontParts/fontshell/groups.py | Lib/fontParts/fontshell/groups.py | import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
| import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _get_base_side1KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide1Groups")
def _get_base_side2KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide2Groups")
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
| Add defcon implementation of group lookup methods. | Add defcon implementation of group lookup methods.
| Python | mit | robofab-developers/fontParts,robofab-developers/fontParts |
fbfd656d0c11bfbc6500fcdffdfae422ab50a08f | lancet/contrib/dploi.py | lancet/contrib/dploi.py | import click
@click.command()
@click.argument('environment')
@click.pass_obj
def ssh(lancet, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
lancet.defer_to_shell('ssh', '-p', str(config.get('port', 20)), host)
| from shlex import quote
import click
@click.command()
@click.option('-p', '--print/--exec', 'print_cmd', default=False,
help='Print the command instead of executing it.')
@click.argument('environment')
@click.pass_obj
def ssh(lancet, print_cmd, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
cmd = ['ssh', '-p', str(config.get('port', 20)), host]
if print_cmd:
click.echo(' '.join(quote(s) for s in cmd))
else:
lancet.defer_to_shell(*cmd)
| Allow to print the ssh command | Allow to print the ssh command
| Python | mit | GaretJax/lancet,GaretJax/lancet |
d03657217cfd019bb55a4895a4cc6b0a80068ff0 | bluebottle/bb_projects/migrations/0003_auto_20160815_1658.py | bluebottle/bb_projects/migrations/0003_auto_20160815_1658.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
try:
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
except ProjectPhase.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
| Make the status data migration optional | Make the status data migration optional
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle |
12c833b1097579ca4a0162dca0d789b787f7d237 | oscar/core/compat.py | oscar/core/compat.py | from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in forieng key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
| from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
| Add two settings related to custom user model | Add two settings related to custom user model
| Python | bsd-3-clause | josesanch/django-oscar,binarydud/django-oscar,Idematica/django-oscar,QLGu/django-oscar,dongguangming/django-oscar,kapari/django-oscar,jmt4/django-oscar,amirrpp/django-oscar,WadeYuChen/django-oscar,rocopartners/django-oscar,rocopartners/django-oscar,amirrpp/django-oscar,ka7eh/django-oscar,ahmetdaglarbas/e-commerce,jinnykoo/christmas,michaelkuty/django-oscar,django-oscar/django-oscar,saadatqadri/django-oscar,jinnykoo/christmas,michaelkuty/django-oscar,monikasulik/django-oscar,pdonadeo/django-oscar,pasqualguerrero/django-oscar,QLGu/django-oscar,kapari/django-oscar,pasqualguerrero/django-oscar,vovanbo/django-oscar,eddiep1101/django-oscar,saadatqadri/django-oscar,faratro/django-oscar,ka7eh/django-oscar,spartonia/django-oscar,ka7eh/django-oscar,WillisXChen/django-oscar,rocopartners/django-oscar,nickpack/django-oscar,bschuon/django-oscar,adamend/django-oscar,amirrpp/django-oscar,manevant/django-oscar,bschuon/django-oscar,jinnykoo/wuyisj,jlmadurga/django-oscar,taedori81/django-oscar,john-parton/django-oscar,solarissmoke/django-oscar,WadeYuChen/django-oscar,john-parton/django-oscar,faratro/django-oscar,MatthewWilkes/django-oscar,mexeniz/django-oscar,jlmadurga/django-oscar,jinnykoo/wuyisj.com,pdonadeo/django-oscar,sonofatailor/django-oscar,QLGu/django-oscar,anentropic/django-oscar,nfletton/django-oscar,manevant/django-oscar,jlmadurga/django-oscar,lijoantony/django-oscar,john-parton/django-oscar,solarissmoke/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,itbabu/django-oscar,saadatqadri/django-oscar,okfish/django-oscar,nfletton/django-oscar,Jannes123/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,anentropic/django-oscar,jinnykoo/wuyisj,anentropic/django-oscar,kapari/django-oscar,sasha0/django-oscar,thechampanurag/django-oscar,amirrpp/django-oscar,bnprk/django-oscar,ademuk/django-oscar,kapari/django-oscar,dongguangming/django-oscar,Bogh/django-oscar,adamend/django-oscar,pdonadeo/django-oscar,makielab/django-oscar,kapt/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,spartonia/django-oscar,mexeniz/django-oscar,makielab/django-oscar,jinnykoo/wuyisj,WillisXChen/django-oscar,jmt4/django-oscar,marcoantoniooliveira/labweb,WadeYuChen/django-oscar,bnprk/django-oscar,makielab/django-oscar,MatthewWilkes/django-oscar,taedori81/django-oscar,pdonadeo/django-oscar,binarydud/django-oscar,nickpack/django-oscar,marcoantoniooliveira/labweb,ademuk/django-oscar,Bogh/django-oscar,adamend/django-oscar,nickpack/django-oscar,rocopartners/django-oscar,michaelkuty/django-oscar,DrOctogon/unwash_ecom,john-parton/django-oscar,faratro/django-oscar,spartonia/django-oscar,sonofatailor/django-oscar,Bogh/django-oscar,vovanbo/django-oscar,monikasulik/django-oscar,ademuk/django-oscar,itbabu/django-oscar,solarissmoke/django-oscar,itbabu/django-oscar,WadeYuChen/django-oscar,mexeniz/django-oscar,WillisXChen/django-oscar,sasha0/django-oscar,vovanbo/django-oscar,DrOctogon/unwash_ecom,eddiep1101/django-oscar,Jannes123/django-oscar,faratro/django-oscar,QLGu/django-oscar,lijoantony/django-oscar,ademuk/django-oscar,machtfit/django-oscar,kapt/django-oscar,adamend/django-oscar,jmt4/django-oscar,Bogh/django-oscar,josesanch/django-oscar,sonofatailor/django-oscar,bnprk/django-oscar,okfish/django-oscar,jinnykoo/wuyisj.com,vovanbo/django-oscar,thechampanurag/django-oscar,josesanch/django-oscar,makielab/django-oscar,saadatqadri/django-oscar,nfletton/django-oscar,okfish/django-oscar,taedori81/django-oscar,binarydud/django-oscar,WillisXChen/django-oscar,manevant/django-oscar,Jannes123/django-oscar,binarydud/django-oscar,bschuon/django-oscar,django-oscar/django-oscar,jinnykoo/christmas,machtfit/django-oscar,jlmadurga/django-oscar,marcoantoniooliveira/labweb,lijoantony/django-oscar,sasha0/django-oscar,dongguangming/django-oscar,jinnykoo/wuyisj,DrOctogon/unwash_ecom,spartonia/django-oscar,MatthewWilkes/django-oscar,nickpack/django-oscar,ka7eh/django-oscar,mexeniz/django-oscar,thechampanurag/django-oscar,jmt4/django-oscar,elliotthill/django-oscar,bnprk/django-oscar,pasqualguerrero/django-oscar,pasqualguerrero/django-oscar,WillisXChen/django-oscar,Jannes123/django-oscar,kapt/django-oscar,sonofatailor/django-oscar,jinnykoo/wuyisj.com,Idematica/django-oscar,sasha0/django-oscar,okfish/django-oscar,marcoantoniooliveira/labweb,thechampanurag/django-oscar,eddiep1101/django-oscar,anentropic/django-oscar,lijoantony/django-oscar,michaelkuty/django-oscar,elliotthill/django-oscar,Idematica/django-oscar,ahmetdaglarbas/e-commerce,monikasulik/django-oscar,MatthewWilkes/django-oscar,ahmetdaglarbas/e-commerce,django-oscar/django-oscar,manevant/django-oscar,dongguangming/django-oscar,monikasulik/django-oscar,machtfit/django-oscar,bschuon/django-oscar,elliotthill/django-oscar,eddiep1101/django-oscar,ahmetdaglarbas/e-commerce |
04f36fab2168fb9cd34d3c6fc7f31533c90b9149 | app/clients/statsd/statsd_client.py | app/clients/statsd/statsd_client.py | from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(stat, delta, rate)
| from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
| Format the stat name with environmenbt | Format the stat name with environmenbt
| Python | mit | alphagov/notifications-api,alphagov/notifications-api |
da03ad3386d45d310514f2b5ef3145fbcf5b773d | dashboard/ratings/tests/factories.py | dashboard/ratings/tests/factories.py | """
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = timezone.now() - datetime.timedelta(days=random.randint(0, 5))
submission_date = timezone.now() + datetime.timedelta(days=random.randint(1, 5))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = random.randint(0,100)
documentation = random.randint(0,100)
problem_solving = random.randint(0,100)
effort = random.randint(0,100)
creativity = random.randint(0,100)
originality = random.randint(0,100)
submission = factory.SubFactory(SubmissionFactory)
| """
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import factory.fuzzy
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=30))
submission_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=100))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = factory.fuzzy.FuzzyInteger(0,100)
documentation = factory.fuzzy.FuzzyInteger(0,100)
problem_solving = factory.fuzzy.FuzzyInteger(0,100)
effort = factory.fuzzy.FuzzyInteger(0,100)
creativity = factory.fuzzy.FuzzyInteger(0,100)
originality = factory.fuzzy.FuzzyInteger(0,100)
submission = factory.SubFactory(SubmissionFactory)
| Make sure seeder creates random values | Make sure seeder creates random values
| Python | mit | daltonamitchell/rating-dashboard,daltonamitchell/rating-dashboard,daltonamitchell/rating-dashboard |
79b0584887075eb1732770d1732ae07147ec21b6 | tests/mpd/protocol/test_status.py | tests/mpd/protocol/test_status.py | from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track()
self.core.tracklist.add([track])
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: ')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
| from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track(uri='dummy:/a')
self.backend.library.dummy_library = [track]
self.core.tracklist.add(uris=[track.uri]).get()
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: dummy:/a')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
| Stop using tracklist add tracks in mpd status test | tests: Stop using tracklist add tracks in mpd status test
| Python | apache-2.0 | ZenithDK/mopidy,quartz55/mopidy,tkem/mopidy,dbrgn/mopidy,rawdlite/mopidy,ali/mopidy,glogiotatidis/mopidy,quartz55/mopidy,bacontext/mopidy,bencevans/mopidy,kingosticks/mopidy,ZenithDK/mopidy,tkem/mopidy,dbrgn/mopidy,tkem/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,adamcik/mopidy,bacontext/mopidy,bacontext/mopidy,pacificIT/mopidy,diandiankan/mopidy,diandiankan/mopidy,swak/mopidy,ZenithDK/mopidy,tkem/mopidy,dbrgn/mopidy,jmarsik/mopidy,jcass77/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,adamcik/mopidy,mopidy/mopidy,jodal/mopidy,hkariti/mopidy,jcass77/mopidy,ali/mopidy,swak/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,mopidy/mopidy,bencevans/mopidy,pacificIT/mopidy,mokieyue/mopidy,diandiankan/mopidy,rawdlite/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,ali/mopidy,jodal/mopidy,jcass77/mopidy,mopidy/mopidy,vrs01/mopidy,jmarsik/mopidy,bacontext/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,mokieyue/mopidy,bencevans/mopidy,adamcik/mopidy,vrs01/mopidy,SuperStarPL/mopidy,quartz55/mopidy,vrs01/mopidy,pacificIT/mopidy,hkariti/mopidy,hkariti/mopidy,swak/mopidy,hkariti/mopidy,mokieyue/mopidy,jmarsik/mopidy,mokieyue/mopidy,dbrgn/mopidy,jodal/mopidy,SuperStarPL/mopidy,rawdlite/mopidy,bencevans/mopidy,quartz55/mopidy,rawdlite/mopidy,vrs01/mopidy,ali/mopidy,swak/mopidy,ZenithDK/mopidy |
31dd9f5ec73db577bf00d7411ecffeba30691d0c | django_lean/lean_analytics/models.py | django_lean/lean_analytics/models.py | from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
| from django.conf import settings
from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
| Make it possible to disable enrollment and goal record analytics. | Make it possible to disable enrollment and goal record analytics.
| Python | bsd-3-clause | e-loue/django-lean,e-loue/django-lean |
7da561d7bf3affecce8b10b50818591ccebe0ba2 | dog/core/cog.py | dog/core/cog.py | class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
| import logging
class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
self.logger = logging.getLogger('cog.' + type(self).__name__.lower())
| Add logger attribute in Cog baseclass | Add logger attribute in Cog baseclass
I don't feel like refactoring all of my cog code to use this attribute at the moment, so I'll just leave this here for now.
| Python | mit | sliceofcode/dogbot,slice/dogbot,slice/dogbot,sliceofcode/dogbot,slice/dogbot |
eafafd3d90024c552a6a607871c1441e358eb927 | Bar.py | Bar.py | import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
| import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
if self.xTickLabelPoints is None:
self.xTickLabelPoints = \
[x + (self.width / 2.0) for x in self.xValues]
if self.xTickLabels is None:
self.xTickLabels = self.xValues
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
| Fix bar graph x-axis centering. | Fix bar graph x-axis centering.
| Python | bsd-3-clause | alexras/boomslang |
320214ca1636415bc4d677ba9e3b40f0bf24c8f9 | openprescribing/frontend/migrations/0008_create_searchbookmark.py | openprescribing/frontend/migrations/0008_create_searchbookmark.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_auto_20160908_0811'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_add_cost_per_fields'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
| Fix multiple leaf nodes in migrations | Fix multiple leaf nodes in migrations
| Python | mit | ebmdatalab/openprescribing,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc |
106eaf7d22bf4039756c0ae32c125d475eb4c109 | utils/html.py | utils/html.py | #coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.result = []
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html):
s = HTMLTextExtractor()
s.feed(html)
return s.get_text()
| #coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self, newlines=True):
HTMLParser.__init__(self)
self.result = []
self.newlines = newlines
def handle_starttag(self, tag, attrs):
if self.newlines:
if tag == 'br':
self.result.append('\n')
elif tag == 'p':
self.result.append('\n')
def handle_endtag(self, tag):
if self.newlines:
if tag == 'p':
self.result.append('\n')
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html, newlines=True):
s = HTMLTextExtractor(newlines)
s.feed(html)
return s.get_text()
| Add new-line support to HTML text extractor | Add new-line support to HTML text extractor
| Python | artistic-2.0 | UltrosBot/Ultros,UltrosBot/Ultros |
d48fd8b11fe2d9edef0ca7044df8659244a13821 | Telegram/Telegram_Harmonbot.py | Telegram/Telegram_Harmonbot.py |
import telegram
import telegram.ext
import os
import dotenv
version = "0.1.4"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token)
def test(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
|
import telegram
import telegram.ext
import os
import dotenv
version = "0.2.0"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token, use_context = True)
def test(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
| Update to context based callbacks | [Telegram] Update to context based callbacks
| Python | mit | Harmon758/Harmonbot,Harmon758/Harmonbot |
a174b827b36293d90babfcdf557bdbb9c9d0b655 | ibei/__init__.py | ibei/__init__.py | # -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
| # -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
__version__ = "0.0.2"
| Add version information in module | Add version information in module
| Python | mit | jrsmith3/tec,jrsmith3/ibei,jrsmith3/tec |
aeb3ce72205051039e6339f83a2b7dec37f8b8c9 | idlk/__init__.py | idlk/__init__.py | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
| from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
| Normalize filename to NFC before computing the hash | Normalize filename to NFC before computing the hash
| Python | mit | znerol/py-idlk |
9fb8b0a72740ba155c76a5812706612b656980f4 | openprocurement/auctions/flash/constants.py | openprocurement/auctions/flash/constants.py | # -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
"openprocurement.auctions.core.plugins",
]
| # -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
]
| Add view_locations for plugins in core | Add view_locations for plugins in core
| Python | apache-2.0 | openprocurement/openprocurement.auctions.flash |
8d9f3214cc5663dc29f7dcf3a03bc373a51d010b | core/admin/start.py | core/admin/start.py | #!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn -w 4 -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
| #!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn --threads ", str(os.cpu_count()),
" -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
| Use threads in gunicorn rather than processes | Use threads in gunicorn rather than processes
This ensures that we share the auth-cache... will enable memory savings
and may improve performances when a higher number of cores is available
"smarter default"
| Python | mit | kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io |
e8ac68b33b3b7bf54baa36b89ac90e9e5a666599 | magnum/conf/services.py | magnum/conf/services.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.StrOpt('host',
help=_('Name of this node. This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, or IP address. '
'However, the node name must be valid within '
'an AMQP key, and if using ZeroMQ, a valid '
'hostname, FQDN, or IP address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
| # Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.HostAddressOpt('host',
help=_('Name of this node. This can be an opaque '
'identifier. It is not necessarily a hostname, '
'FQDN, or IP address. However, the node name '
'must be valid within an AMQP key, and if using '
'ZeroMQ, a valid hostname, FQDN, or IP '
'address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
| Use HostAddressOpt for opts that accept IP and hostnames | Use HostAddressOpt for opts that accept IP and hostnames
Some configuration options were accepting both IP addresses
and hostnames. Since there was no specific OSLO opt type to
support this, we were using ``StrOpt``. The change [1] that
added support for ``HostAddressOpt`` type was merged in Ocata
and became available for use with oslo version 3.22.
This patch changes the opt type of configuration options to use
this more relevant opt type - HostAddressOpt.
[1] I77bdb64b7e6e56ce761d76696bc4448a9bd325eb
Change-Id: Id179ad55d4344a7dc2214896290890862b560e0c
| Python | apache-2.0 | openstack/magnum,ArchiFleKs/magnum,ArchiFleKs/magnum,openstack/magnum |
381cf72695185fda93d0d9685fad887d445b4a72 | mesonwrap/inventory.py | mesonwrap/inventory.py | RESTRICTED_PROJECTS = [
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
| RESTRICTED_PROJECTS = [
'cidata',
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
| Add cidata to the list of restricted projects | Add cidata to the list of restricted projects
| Python | apache-2.0 | mesonbuild/wrapweb,mesonbuild/wrapweb,mesonbuild/wrapweb |
8ffd6ffecd7ce713446385b6cd108e50fb041403 | __main__.py | __main__.py | from . import *
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
| from . import *
import readline
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
| Add readline support for the REPL | Add readline support for the REPL
| Python | isc | gvx/isle |
c654bc1fdacdb355b7e03c853ebcdc919ac5f91d | tests/capture/test_capture.py | tests/capture/test_capture.py | from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == 'tcp.port==6666,dns' | from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
possible_results = ['tcp.port==8888,http', 'tcp.port==6666,dns']
assert params[decode_index + 1] in possible_results
possible_results.remove(params[decode_index + 1])
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == possible_results[0]
| Fix tests to avoid dict ordering problem | Fix tests to avoid dict ordering problem
| Python | mit | KimiNewt/pyshark,eaufavor/pyshark-ssl |
3e9a4f27ad05b3ecd2a4c013ff0f3b04e5fe44aa | tests/test_list_generators.py | tests/test_list_generators.py | import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(
valid_data.VALID_GENERATOR_CONFIGURATION, self.agent_id
)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
| import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
| Fix agent creation configuration to make tests great again | Fix agent creation configuration to make tests great again
lint
| Python | bsd-3-clause | craft-ai/craft-ai-client-python,craft-ai/craft-ai-client-python |
4420eb020d96004c5373584781c7b130de7b90e9 | reg/__init__.py | reg/__init__.py | # flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
| # flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
from .sentinel import Sentinel
| Make sentinel available to outside. | Make sentinel available to outside.
| Python | bsd-3-clause | taschini/reg,morepath/reg |
268c4458161ce754a82e3986787f6703f9122e3e | trackmybmi/users/factories.py | trackmybmi/users/factories.py | import factory
from django.contrib.auth.hashers import make_password
from .models import Friendship, User
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
| import factory
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from .models import Friendship
User = get_user_model()
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
| Replace User import with call to get_user_model() | Replace User import with call to get_user_model()
| Python | mit | ojh/trackmybmi |
b9ccbb2addd8dcaeb100bb5e95768caa2a97c280 | srttools/core/__init__.py | srttools/core/__init__.py | import warnings
try:
import matplotlib
# matplotlib.use('TkAgg')
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
| import warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
| Set default backend, and minimum statsmodels version | Set default backend, and minimum statsmodels version
| Python | bsd-3-clause | matteobachetti/srt-single-dish-tools |
ab02c54cc713cc10c60f09dde3cae2fca3c2a9a4 | conference/management/commands/make_speaker_profiles_public.py | conference/management/commands/make_speaker_profiles_public.py |
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
args = '<conference>'
def handle(self, *args, **options):
try:
conference = args[0]
except IndexError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
|
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('conference')
def handle(self, *args, **options):
try:
conference = options['conference']
except KeyError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
| Fix script to make speaker profiles public. | Fix script to make speaker profiles public.
| Python | bsd-2-clause | EuroPython/epcon,EuroPython/epcon,EuroPython/epcon,EuroPython/epcon |
b875f457d7a4926f5028428ead4cecc75af90c2e | examples/launch_cloud_harness.py | examples/launch_cloud_harness.py | import json
import os
from osgeo import gdal
from gbdxtools import Interface
from gbdx_task_template import TaskTemplate, Task, InputPort, OutputPort
gbdx = Interface()
# data = "s3://receiving-dgcs-tdgplatform-com/054813633050_01_003" # WV02 Image over San Francisco
# aoptask = gbdx.Task("AOP_Strip_Processor", data=data, enable_acomp=True, enable_pansharpen=True)
class RasterMetaApp(TaskTemplate):
task = Task("RasterMetaTask")
task.input_raster = InputPort(value="/Users/michaelconnor/demo_image")
task.output_meta = OutputPort(value="/Users/michaelconnor")
def invoke(self):
images = self.task.input_raster.list_files(extensions=[".tiff", ".tif"])
# Magic Starts here
for img in images:
header = "META FOR %s\n\n" % os.path.basename(img)
gtif = gdal.Open(img)
self.task.output_meta.write('metadata.txt', header)
self.task.output_meta.write('metadata.txt', json.dumps(gtif.GetMetadata(), indent=2))
# Create a cloud-harness
ch_task = gbdx.Task(RasterMetaApp)
# NOTE: This will override the value in the class definition above.
ch_task.inputs.input_raster = 's3://test-tdgplatform-com/data/envi_src/sm_tiff' # Overwrite the value from
workflow = gbdx.Workflow([ch_task])
# workflow = gbdx.Workflow([aoptask, ch_task])
workflow.savedata(ch_task.outputs.output_meta, location='CH_OUT')
# workflow.savedata(aoptask.outputs.data, location='AOP_OUT')
# NOTE: Always required because the source bundle must be uploaded.
ch_task.upload_input_ports()
print(workflow.generate_workflow_description())
print(workflow.execute())
| from gbdxtools import Interface
gbdx = Interface()
# Create a cloud-harness gbdxtools Task
from ch_tasks.cp_task import CopyTask
cp_task = gbdx.Task(CopyTask)
from ch_tasks.raster_meta import RasterMetaTask
ch_task = gbdx.Task(RasterMetaTask)
# NOTE: This will override the value in the class definition.
ch_task.inputs.input_raster = cp_task.outputs.output_data.value # Overwrite the value from
workflow = gbdx.Workflow([cp_task, ch_task])
workflow.savedata(cp_task.outputs.output_data, location='CH_Demo/output_data')
workflow.savedata(ch_task.outputs.output_meta, location='CH_Demo/output_meta')
print(workflow.execute()) # Will upload cloud-harness ports before executing
# print(workflow.generate_workflow_description())
| Remove the cloud-harness task and add second cloud-harness task for chaining. | Remove the cloud-harness task and add second cloud-harness task for chaining.
| Python | mit | michaelconnor00/gbdxtools,michaelconnor00/gbdxtools |
4f46fe7abf5efcd93bc161f2cfccc58df4ab1ee4 | whats_fresh/whats_fresh_api/tests/views/entry/test_list_preparations.py | whats_fresh/whats_fresh_api/tests/views/entry/test_list_preparations.py | from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'], preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
| from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'],
preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
| Rewrite preparations list test to get ID from URL | Rewrite preparations list test to get ID from URL
| Python | apache-2.0 | iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api |
e0d0c9726766dc3281411e265c4d16ff66ecc595 | regression/pages/studio/terms_of_service.py | regression/pages/studio/terms_of_service.py | """
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.field-page-body'
).text[0]
| """
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.content-section'
).text[0]
| Fix target css for TOS page | Fix target css for TOS page
| Python | agpl-3.0 | edx/edx-e2e-tests,edx/edx-e2e-tests |
649c70527ae602512cfa6ea62b60ebc43fc69797 | lab/run_trace.py | lab/run_trace.py | # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
sys.settrace(trace)
execfile(the_program)
| # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
code = open(the_program).read()
sys.settrace(trace)
exec(code)
| Make this useful for py3 also | Make this useful for py3 also
| Python | apache-2.0 | hugovk/coveragepy,hugovk/coveragepy,nedbat/coveragepy,hugovk/coveragepy,hugovk/coveragepy,nedbat/coveragepy,nedbat/coveragepy,nedbat/coveragepy,nedbat/coveragepy,hugovk/coveragepy |
89bbc555ecf520ee34a9b1292a2bdb5c937b18e2 | addons/hw_drivers/iot_handlers/interfaces/PrinterInterface.py | addons/hw_drivers/iot_handlers/interfaces/PrinterInterface.py | from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
for printer in printers:
printers[printer]['supported'] = True # these printers are automatically supported
printers[printer]['device-make-and-model'] = printers[printer]['printer-make-and-model']
if 'usb' in printers[printer]['device-uri']:
printers[printer]['device-class'] = 'direct'
else:
printers[printer]['device-class'] = 'network'
devices = conn.getDevices()
if printers:
devices.update(printers)
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
| from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
devices = conn.getDevices()
for printer in printers:
path = printers.get(printer).get('device-uri', False)
if path and path in devices:
devices.get(path).update({'supported': True}) # these printers are automatically supported
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
| Fix issue with printer device-id | [FIX] hw_drivers: Fix issue with printer device-id
When we print a ticket status with a thermal printer we need printer's device-id
But if we add manually a printer this device-id doesn't exist
So now we update de devices list with a supported = True if
printer are manually added
closes odoo/odoo#53043
Signed-off-by: Quentin Lejeune (qle) <5d1c09880ce30fdcf63039932dac0c183ef0bdc7@odoo.com>
| Python | agpl-3.0 | ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo |
460ed562a64b7aacbd690a2e62f39b11bfcb092f | src/MCPClient/lib/clientScripts/examineContents.py | src/MCPClient/lib/clientScripts/examineContents.py | #!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
| #!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
# remove empty BulkExtractor logs
for filename in os.listdir(output):
filepath = os.path.join(output,filename)
if os.path.getsize(filepath) == 0:
os.remove(filepath)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
| Remove empty bulk extractor logs | Remove empty bulk extractor logs
Squashed commit of the following:
commit c923667809bb5d828144b09d03bd53554229a9bd
Author: Aaron Elkiss <aelkiss@umich.edu>
Date: Thu Dec 8 09:34:47 2016 -0500
fix spacing & variable name
commit df597f69e19c3a3b4210c1131a79550eb147e412
Author: Aaron Daniel Elkiss <aelkiss@umich.edu>
Date: Mon Oct 31 13:57:32 2016 +0000
remove empty bulkextractor logs
| Python | agpl-3.0 | artefactual/archivematica,artefactual/archivematica,artefactual/archivematica,artefactual/archivematica |
5a15ca8b790dda7b2ea11af5d1c179f9e7d9f2ac | pages/search_indexes.py | pages/search_indexes.py | """Django haystack `SearchIndex` module."""
from pages.models import Page
from django.conf import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
| """Django haystack `SearchIndex` module."""
from pages.models import Page
from gerbi import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
| Use gerbi setting not global settings | Use gerbi setting not global settings
| Python | bsd-3-clause | pombredanne/django-page-cms-1,akaihola/django-page-cms,remik/django-page-cms,akaihola/django-page-cms,batiste/django-page-cms,remik/django-page-cms,remik/django-page-cms,pombredanne/django-page-cms-1,batiste/django-page-cms,batiste/django-page-cms,pombredanne/django-page-cms-1,remik/django-page-cms,akaihola/django-page-cms |
6c4c3ac1dde0519d08ab461ab60ccc1d8b9d3d38 | CodeFights/createDie.py | CodeFights/createDie.py | #!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
pass
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
| #!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
def __new__(self, seed, n):
random.seed(seed)
return int(random.random() * n) + 1
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
| Solve Code Fights create die problem | Solve Code Fights create die problem
| Python | mit | HKuz/Test_Code |
b57a599640c6fa8bf23f081c914b7437e3f04dcd | course_discovery/apps/courses/management/commands/refresh_all_courses.py | course_discovery/apps/courses/management/commands/refresh_all_courses.py | import logging
from optparse import make_option
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
option_list = BaseCommand.option_list + (
make_option('--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'),
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
| import logging
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
def add_arguments(self, parser):
parser.add_argument(
'--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
| Switch to argparse for management command argument parsing | Switch to argparse for management command argument parsing
| Python | agpl-3.0 | edx/course-discovery,edx/course-discovery,edx/course-discovery,edx/course-discovery |
e321b47a5ee2252ce71fabb992e50e5f455a217f | blaze/tests/test_blfuncs.py | blaze/tests/test_blfuncs.py | from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[(_add, 'f8(f8,f8)'),
(_add, 'c16(c16,c16)')])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
| from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[('f8(f8,f8)', _add),
('c16(c16,c16)', _add)])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
| Fix usage of urlparse. and re-order list of key, value dict specification. | Fix usage of urlparse. and re-order list of key, value dict specification.
| Python | bsd-3-clause | ContinuumIO/blaze,dwillmer/blaze,dwillmer/blaze,ContinuumIO/blaze,mwiebe/blaze,markflorisson/blaze-core,AbhiAgarwal/blaze,LiaoPan/blaze,ChinaQuants/blaze,markflorisson/blaze-core,FrancescAlted/blaze,caseyclements/blaze,FrancescAlted/blaze,caseyclements/blaze,jcrist/blaze,mwiebe/blaze,AbhiAgarwal/blaze,jcrist/blaze,cpcloud/blaze,cpcloud/blaze,ChinaQuants/blaze,AbhiAgarwal/blaze,maxalbert/blaze,FrancescAlted/blaze,alexmojaki/blaze,aterrel/blaze,jdmcbr/blaze,mwiebe/blaze,aterrel/blaze,mrocklin/blaze,xlhtc007/blaze,FrancescAlted/blaze,LiaoPan/blaze,markflorisson/blaze-core,aterrel/blaze,nkhuyu/blaze,jdmcbr/blaze,mrocklin/blaze,maxalbert/blaze,nkhuyu/blaze,scls19fr/blaze,cowlicks/blaze,xlhtc007/blaze,markflorisson/blaze-core,cowlicks/blaze,AbhiAgarwal/blaze,scls19fr/blaze,mwiebe/blaze,alexmojaki/blaze |
9581334db472c8ad8dbff0766ec74ed6dfa20d6f | tests/test_api_request.py | tests/test_api_request.py | #!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
| #!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException, BinanceWithdrawException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
def test_withdraw_api_exception():
"""Test Withdraw API response Exception"""
with pytest.raises(BinanceWithdrawException):
with requests_mock.mock() as m:
json_obj = {"success": False, "msg": "Insufficient funds"}
m.register_uri('POST', requests_mock.ANY, json=json_obj, status_code=200)
client.withdraw(asset='BTC', address='BTCADDRESS', amount=100)
| Add test for withdraw exception response | Add test for withdraw exception response
| Python | mit | sammchardy/python-binance |
c73572f2a9b63d35daf8b5935c4a1e6a0422c122 | pinax/documents/receivers.py | pinax/documents/receivers.py | from django.db.models.signals import post_save
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
| from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage, Document
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Document)
def document_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False) | Implement deletion of file object via Document model pre_save signal. | Implement deletion of file object via Document model pre_save signal.
| Python | mit | pinax/pinax-documents |
9c48cd08ee0805cfd9a8115d77da139e8c09d7a9 | plyer/platforms/linux/cpu.py | plyer/platforms/linux/cpu.py | from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
| from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
if output:
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
| Add CPU count for GNU/Linux | Add CPU count for GNU/Linux
| Python | mit | kivy/plyer,KeyWeeUsr/plyer,kivy/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer |
4c7336fbe1e82bd3d7d091429feda40932d73e67 | bin/pear.py | bin/pear.py | """
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
| """
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
import shell
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if shell.which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
| Add missing import for shell module | Add missing import for shell module
| Python | mit | hglattergotz/sfdeploy |
60f101e4fc3ac6822c7cf254afa9e98004eb07a1 | bot.py | bot.py | #!/usr/bin/python3
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
| #!/usr/bin/python3
"""
Copyright (c) 2017 Finn Ellis.
Free to use and modify under the terms of the MIT license.
See included LICENSE file for details.
"""
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
| Add copyright and license information. | Add copyright and license information. | Python | mit | relsqui/awkward_polaroid,relsqui/awkward_polaroid |
cd611cee6843ff9056d98d26d08091188cd20172 | app/rest.py | app/rest.py | from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
full_name = db.session.execute(query).fetchone()[0]
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
| from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
try:
full_name = db.session.execute(query).fetchone()[0]
except Exception as e:
current_app.logger.error('Database exception: %r', e)
full_name = 'Database error, check logs'
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
| Handle db exceptions when getting api info | Handle db exceptions when getting api info
| Python | mit | NewAcropolis/api,NewAcropolis/api,NewAcropolis/api |
f779905c1b7a48a8f49da6ad061ae7d67e677052 | cartoframes/viz/legend_list.py | cartoframes/viz/legend_list.py | from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'default' or legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
if legend._type == 'default' and default_legend:
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
| from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
elif legend._type == 'default' and default_legend:
legend._type = default_legend._type
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
| Fix default legend type detection | Fix default legend type detection
| Python | bsd-3-clause | CartoDB/cartoframes,CartoDB/cartoframes |
4b3ec77a6e1639dc156135fd42ca215c58c082a3 | pyecore/notification.py | pyecore/notification.py | """
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
| """
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
try:
from enum34 import unique, Enum
except ImportError:
from enum import unique, Enum
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
| Add conditional import of the enum34 library | Add conditional import of the enum34 library
This lib is used to bing enumerations to Python <= 3.3.
| Python | bsd-3-clause | aranega/pyecore,pyecore/pyecore |
f18ea85f3599e16c60cfc2b652c30ff64997e95b | pytablereader/loadermanager/_base.py | pytablereader/loadermanager/_base.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
| Add an interface to get the loader | Add an interface to get the loader
| Python | mit | thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader |
ddb12a892d42e8a6ffdd8146149ec306dea48a12 | pydmrs/pydelphin_interface.py | pydmrs/pydelphin_interface.py | from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence)['RESULTS']: # cmdargs=['-r', 'root_informal']
mrs = result['MRS']
xmrs = simplemrs.loads_one(mrs)
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs)['RESULTS']:
sentence = result['SENT']
results.append(sentence)
return results
| from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence).results(): # cmdargs=['-r', 'root_informal']
xmrs = result.mrs()
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs).results():
sentence = result['surface']
results.append(sentence)
return results
| Update PyDelphin interface to recent version | Update PyDelphin interface to recent version
| Python | mit | delph-in/pydmrs,delph-in/pydmrs,delph-in/pydmrs |
c26a7f83b1e9689496b5cf3b5e42fb85611c1ded | ideascube/conf/idb_aus_queensland.py | ideascube/conf/idb_aus_queensland.py | # -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
| # -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] not in ['server:battery']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'library',
},
{
'id': 'wikipedia',
'languages': ['en']
},
{
'id': 'khanacademy',
},
{
'id': 'gutenberg',
'lang': 'en',
},
{
'id': 'vikidia',
'languages': ['en']
},
]
| Change cards for the new version | Change cards for the new version
We setup a new version of the server and installed the ZIM file with the
catalog, so the cards has to change from the old version to the new
version to match with the ideascube catalog policy
| Python | agpl-3.0 | ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube |
6894bd3cfc010c371478e7ae9e5e0b3ba108e165 | plugins/configuration/configurationtype/configuration_registrar.py | plugins/configuration/configurationtype/configuration_registrar.py | #!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
raise Exception("Not implemented yet.")
def validate_metadata(metadata):
raise Exception("Not implemented yet.") | #!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
"""
Undoes the registration of a configuration plug-in.
The configuration plug-in will no longer keep track of any configuration.
Existing configuration will be stored persistently.
:param identity: The identity of the plug-in to unregister.
"""
if identity not in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is not registered, so I can't unregister it.", configuration=identity)
return
del _configurations[identity] #The actual unregistration.
def validate_metadata(metadata):
raise Exception("Not implemented yet.") | Implement unregistration of configuration plug-ins | Implement unregistration of configuration plug-ins
Perhaps we should not give a warning, but instead an exception, when registering or unregistering fails?
| Python | cc0-1.0 | Ghostkeeper/Luna |
7b83e8fbe8e6a249ab82db38e358774ba78b4ea8 | pyflation/analysis/__init__.py | pyflation/analysis/__init__.py | """ analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrum | """ analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr, scaled_Pzeta
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrum,\
Sspectrum, scaled_dP_spectrum, scaled_dPnad_spectrum,\
scaled_S_spectrum | Add new S spectrum functions into package initializer. | Add new S spectrum functions into package initializer.
| Python | bsd-3-clause | ihuston/pyflation,ihuston/pyflation |
6212f78597dff977a7e7348544d09c7a649aa470 | bitbots_transform/src/bitbots_transform/transform_ball.py | bitbots_transform/src/bitbots_transform/transform_ball.py | #!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformLines(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("camera/camera_info", CameraInfo, self._callback_camera_info)
self.line_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.line_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformLines()
| #!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformBall(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("minibot/camera/camera_info", CameraInfo, self._callback_camera_info)
self.ball_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y - ballinfo.diameter // 2, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.ball_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformBall()
| Transform Ball: Fixed wrong names | Transform Ball: Fixed wrong names
| Python | mit | bit-bots/bitbots_misc,bit-bots/bitbots_misc,bit-bots/bitbots_misc |
b28b4bb834d8ab70e8820c43ed8cf11242c1b5b6 | keystoneclient/v2_0/endpoints.py | keystoneclient/v2_0/endpoints.py | # Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl, internalurl):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
| # Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl=None,
internalurl=None):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
| Make parameters in EndpointManager optional | Make parameters in EndpointManager optional
Change adminurl and internalurl parameters in EndpointManager create()
to optional parameters.
Change-Id: I490e35b89f7ae7c6cdbced6ba8d3b82d5132c19d
Closes-Bug: #1318436
| Python | apache-2.0 | magic0704/python-keystoneclient,jamielennox/python-keystoneclient,klmitch/python-keystoneclient,ging/python-keystoneclient,alexpilotti/python-keystoneclient,klmitch/python-keystoneclient,darren-wang/ksc,alexpilotti/python-keystoneclient,magic0704/python-keystoneclient,Mercador/python-keystoneclient,ging/python-keystoneclient,Mercador/python-keystoneclient,jamielennox/python-keystoneclient,sdpp/python-keystoneclient,sdpp/python-keystoneclient,darren-wang/ksc,jamielennox/python-keystoneclient |
1005a41bd6fb3f854f75bd9d4d6ab69290778ba9 | kolibri/core/lessons/viewsets.py | kolibri/core/lessons/viewsets.py | from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
return Lesson.objects.filter(is_archived=False)
| from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
queryset = Lesson.objects.filter(is_archived=False)
classid = self.request.query_params.get('classid', None)
if classid is not None:
queryset = queryset.filter(collection_id=classid)
return queryset
| Add classid filter for Lessons | Add classid filter for Lessons
| Python | mit | learningequality/kolibri,mrpau/kolibri,mrpau/kolibri,lyw07/kolibri,christianmemije/kolibri,christianmemije/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,jonboiser/kolibri,benjaoming/kolibri,lyw07/kolibri,jonboiser/kolibri,learningequality/kolibri,christianmemije/kolibri,jonboiser/kolibri,DXCanas/kolibri,mrpau/kolibri,christianmemije/kolibri,lyw07/kolibri,benjaoming/kolibri,learningequality/kolibri,benjaoming/kolibri,mrpau/kolibri,indirectlylit/kolibri,DXCanas/kolibri,learningequality/kolibri,jonboiser/kolibri,benjaoming/kolibri,DXCanas/kolibri,indirectlylit/kolibri,lyw07/kolibri,DXCanas/kolibri |
bd5844aa6c59c8d34df12e358e5e06eefcb55f9d | qiita_pet/handlers/download.py | qiita_pet/handlers/download.py | from tornado.web import authenticated
from os.path import split
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = split(relpath)[-1]
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
| from tornado.web import authenticated
from os.path import basename
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = basename(relpath)
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
| Use basename instead of os.path.split(...)[-1] | Use basename instead of os.path.split(...)[-1]
| Python | bsd-3-clause | ElDeveloper/qiita,josenavas/QiiTa,RNAer/qiita,squirrelo/qiita,RNAer/qiita,ElDeveloper/qiita,antgonza/qiita,adamrp/qiita,wasade/qiita,antgonza/qiita,squirrelo/qiita,biocore/qiita,adamrp/qiita,josenavas/QiiTa,biocore/qiita,ElDeveloper/qiita,adamrp/qiita,antgonza/qiita,RNAer/qiita,squirrelo/qiita,ElDeveloper/qiita,wasade/qiita,josenavas/QiiTa,wasade/qiita,antgonza/qiita,biocore/qiita,josenavas/QiiTa,biocore/qiita,squirrelo/qiita,RNAer/qiita,adamrp/qiita |
23a3f80d44592d4a86878f29eaa873d727ad31ee | london_commute_alert.py | london_commute_alert.py | import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription']
for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
| import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
| Correct for problem on webfaction | Correct for problem on webfaction
| Python | mit | noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit |
598e21a7c397c0c429a78f008a36e5800c1b23e3 | conftest.py | conftest.py | import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
default=os.environ.get("PYTEST_DB_URL"), conn_max_age=600
),
}
| import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
env="PYTEST_DB_URL", conn_max_age=600
),
}
| Fix picking invalid env variable for tests | Fix picking invalid env variable for tests
| Python | bsd-3-clause | mociepka/saleor,mociepka/saleor,mociepka/saleor |
c265f3a24ba26800a15ddf54ad3aa7515695fb3f | app/__init__.py | app/__init__.py | from flask import Flask
from .extensions import db
from . import views
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
# @app.route("/")
# def index():
# return "Hello World!"
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
| from flask import Flask
from flask_user import UserManager
from . import views
from .extensions import db, mail, toolbar
from .models import DataStoreAdapter, UserModel
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
mail.init_app(app)
toolbar.init_app(app)
# Cannot put it in extension files
# due to will create a circular import.
db_adapter = DataStoreAdapter(db, UserModel)
user_manager = UserManager(db_adapter, app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
| Update app init to user flask user, mail and toolbar ext | Update app init to user flask user, mail and toolbar ext
| Python | mit | oldani/nanodegree-blog,oldani/nanodegree-blog,oldani/nanodegree-blog |
c109b41dc76c333bda1973fa2a543688f2fd5141 | braid/config.py | braid/config.py | """
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Loads the passed environment configuration. This task can be invoked before
executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
| """
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Load the configuration for the testing environment.
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Load the configuration for the production environment.
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
| Make docstrings more Fabric friendly | Make docstrings more Fabric friendly
| Python | mit | alex/braid,alex/braid |
97a1e627b682f9aec80134334277b63e81265ddd | tests/test_ircv3.py | tests/test_ircv3.py | import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@((ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
| import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb'@empty=;missing :irc.example.com NOTICE #channel :Message',
{'empty': True, 'missing': True}
),
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@(ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
| Add test case for empty and missing IRCv3 tags | Add test case for empty and missing IRCv3 tags
| Python | bsd-3-clause | Shizmob/pydle |
127a3da0d453785bd9c711d738e20dfdc1876df1 | tool/serial_dump.py | tool/serial_dump.py | #!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.1
if (len(sys.argv) < 3):
print("Usage: serial_dump.py /dev/ttyUSB0 57600")
exit()
elif (len(sys.argv) == 3):
port = sys.argv[1]
baudrate = sys.argv[2]
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
second = float(sys.argv[3])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open("gps.log","rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
| #!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.001
if (len(sys.argv) < 4 ):
print("Usage: \n./serial_dump.py /dev/ttyUSB0 57600 file_name 0.01")
exit()
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
elif (len(sys.argv) == 5):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
second = float(sys.argv[4])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open(file_name,"rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
| Change command option, need to specify file name now | Change command option, need to specify file name now
| Python | mit | ming6842/firmware-new,fboris/firmware,UrsusPilot/firmware,fboris/firmware,UrsusPilot/firmware,fboris/firmware,UrsusPilot/firmware,ming6842/firmware-new,ming6842/firmware-new |
5fb17ccf0311500e5ce14a49e246d1a6cbc427a4 | mopidy/frontends/mpd/__init__.py | mopidy/frontends/mpd/__init__.py | import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
logger.warning(u'Cannot handle message: %s', message)
| import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
| Make MpdFrontend ignore unknown messages | Make MpdFrontend ignore unknown messages
| Python | apache-2.0 | diandiankan/mopidy,rawdlite/mopidy,adamcik/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,bencevans/mopidy,abarisain/mopidy,pacificIT/mopidy,bacontext/mopidy,jodal/mopidy,adamcik/mopidy,jcass77/mopidy,jmarsik/mopidy,quartz55/mopidy,quartz55/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,ali/mopidy,bencevans/mopidy,adamcik/mopidy,swak/mopidy,tkem/mopidy,jmarsik/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,quartz55/mopidy,glogiotatidis/mopidy,hkariti/mopidy,ZenithDK/mopidy,swak/mopidy,woutervanwijk/mopidy,hkariti/mopidy,rawdlite/mopidy,swak/mopidy,priestd09/mopidy,dbrgn/mopidy,mokieyue/mopidy,bacontext/mopidy,mopidy/mopidy,ZenithDK/mopidy,priestd09/mopidy,diandiankan/mopidy,tkem/mopidy,bencevans/mopidy,mokieyue/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,pacificIT/mopidy,ali/mopidy,priestd09/mopidy,bacontext/mopidy,vrs01/mopidy,pacificIT/mopidy,dbrgn/mopidy,mokieyue/mopidy,dbrgn/mopidy,jcass77/mopidy,liamw9534/mopidy,kingosticks/mopidy,abarisain/mopidy,tkem/mopidy,tkem/mopidy,liamw9534/mopidy,mokieyue/mopidy,ali/mopidy,jodal/mopidy,dbrgn/mopidy,diandiankan/mopidy,kingosticks/mopidy,jmarsik/mopidy,vrs01/mopidy,vrs01/mopidy,woutervanwijk/mopidy,diandiankan/mopidy,jodal/mopidy,ali/mopidy,hkariti/mopidy,quartz55/mopidy,swak/mopidy,jmarsik/mopidy,jcass77/mopidy,SuperStarPL/mopidy,rawdlite/mopidy,bacontext/mopidy,bencevans/mopidy,glogiotatidis/mopidy,mopidy/mopidy,mopidy/mopidy,hkariti/mopidy,vrs01/mopidy,rawdlite/mopidy |
076ef01bd3334d2a1941df369286e4972223901e | PyramidSort.py | PyramidSort.py | import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
txt = self.view.substr(r)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, r, u"\n".join(lines))
| #
# 123
# 12
# 1
import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
lr = self.view.line(r)
txt = self.view.substr(lr)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, lr, u"\n".join(lines))
| Revert "removed grab line from region, gives some unexpected behaviour. Instead just replace exactly what is marked" | Revert "removed grab line from region, gives some unexpected behaviour. Instead just replace exactly what is marked"
This reverts commit 9c944db3affc8181146fa27d8483a58d2731756b.
| Python | apache-2.0 | kenglxn/PyramidSortSublimeTextPlugin,kenglxn/PyramidSortSublimeTextPlugin |
69b0e1c60eafff596ebb494a7e79a22c6bea374b | polling_stations/apps/data_collection/management/commands/import_hart.py | polling_stations/apps/data_collection/management/commands/import_hart.py | from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
| from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
def station_record_to_dict(self, record):
if record.polling_place_id == '1914':
record = record._replace(polling_place_easting = '479224')
record = record._replace(polling_place_northing = '154016')
return super().station_record_to_dict(record)
| Fix dodgy point in Hart | Fix dodgy point in Hart
| Python | bsd-3-clause | DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations |
1ac2e2b03048cf89c8df36c838130212f4ac63d3 | server/src/weblab/__init__.py | server/src/weblab/__init__.py | import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1}".format(base_version, git_version.get('version'))
__ALL__ = []
| import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1} ({2})".format(base_version, git_version.get('version'), git_version.get('date'))
__ALL__ = []
| Add date to the version | Add date to the version
| Python | bsd-2-clause | morelab/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.