commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
bc47862e89f73ec152a57bf43126653a981cd411
|
suggestions/tests.py
|
suggestions/tests.py
|
from django.test import TestCase
from django.contrib.auth.models import User
from mks.models import Member
from .models import Suggestion
class SuggestionsTests(TestCase):
def setUp(self):
self.member = Member.objects.create(name='mk_1')
self.regular_user = User.objects.create_user('reg_user')
def test_simple_text_suggestion(self):
MK_SITE = 'http://mk1.example.com'
suggestion = Suggestion.objects.create_suggestion(
suggested_by=self.regular_user,
content_object=self.member,
suggestion_action=Suggestion.UPDATE,
suggested_field='website',
suggested_text=MK_SITE
)
self.assertIsNone(self.member.website)
suggestion.auto_apply()
mk = Member.objects.get(pk=self.member.pk)
self.assertEqual(mk.website, MK_SITE)
|
from django.test import TestCase
from django.contrib.auth.models import User
from mks.models import Member
from .models import Suggestion
class SuggestionsTests(TestCase):
def setUp(self):
self.member = Member.objects.create(name='mk_1')
self.regular_user = User.objects.create_user('reg_user')
def test_simple_text_suggestion(self):
MK_SITE = 'http://mk1.example.com'
suggestion = Suggestion.objects.create_suggestion(
suggested_by=self.regular_user,
content_object=self.member,
suggestion_action=Suggestion.UPDATE,
suggested_field='website',
suggested_text=MK_SITE
)
self.assertIsNone(self.member.website)
suggestion.auto_apply()
mk = Member.objects.get(pk=self.member.pk)
self.assertEqual(mk.website, MK_SITE)
# cleanup
mk.website = None
mk.save()
self.member = mk
|
Undo member changes in test
|
Undo member changes in test
|
Python
|
bsd-3-clause
|
MeirKriheli/Open-Knesset,jspan/Open-Knesset,navotsil/Open-Knesset,navotsil/Open-Knesset,DanaOshri/Open-Knesset,alonisser/Open-Knesset,jspan/Open-Knesset,noamelf/Open-Knesset,daonb/Open-Knesset,habeanf/Open-Knesset,noamelf/Open-Knesset,DanaOshri/Open-Knesset,OriHoch/Open-Knesset,otadmor/Open-Knesset,alonisser/Open-Knesset,MeirKriheli/Open-Knesset,ofri/Open-Knesset,Shrulik/Open-Knesset,navotsil/Open-Knesset,Shrulik/Open-Knesset,ofri/Open-Knesset,ofri/Open-Knesset,otadmor/Open-Knesset,otadmor/Open-Knesset,Shrulik/Open-Knesset,daonb/Open-Knesset,daonb/Open-Knesset,MeirKriheli/Open-Knesset,navotsil/Open-Knesset,noamelf/Open-Knesset,Shrulik/Open-Knesset,jspan/Open-Knesset,OriHoch/Open-Knesset,OriHoch/Open-Knesset,alonisser/Open-Knesset,habeanf/Open-Knesset,DanaOshri/Open-Knesset,OriHoch/Open-Knesset,otadmor/Open-Knesset,daonb/Open-Knesset,habeanf/Open-Knesset,ofri/Open-Knesset,noamelf/Open-Knesset,alonisser/Open-Knesset,MeirKriheli/Open-Knesset,habeanf/Open-Knesset,jspan/Open-Knesset,DanaOshri/Open-Knesset
|
f69bc50985a644f90c3f59d06cb7b99a6aeb3b53
|
migrations/versions/0209_email_branding_update.py
|
migrations/versions/0209_email_branding_update.py
|
"""
Revision ID: 0209_email_branding_update
Revises: 84c3b6eb16b3
Create Date: 2018-07-25 16:08:15.713656
"""
from alembic import op
import sqlalchemy as sa
revision = '0209_email_branding_update'
down_revision = '84c3b6eb16b3'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('email_branding', sa.Column('text', sa.String(length=255), nullable=True))
op.execute('UPDATE email_branding SET text = name')
op.execute('UPDATE email_branding SET name = NULL')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('email_branding', 'text')
# ### end Alembic commands ###
|
"""
Revision ID: 0209_email_branding_update
Revises: 84c3b6eb16b3
Create Date: 2018-07-25 16:08:15.713656
"""
from alembic import op
import sqlalchemy as sa
revision = '0209_email_branding_update'
down_revision = '84c3b6eb16b3'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('email_branding', sa.Column('text', sa.String(length=255), nullable=True))
op.execute('UPDATE email_branding SET text = name')
op.execute('UPDATE email_branding SET name = NULL')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute('UPDATE email_branding SET name = text')
op.drop_column('email_branding', 'text')
# ### end Alembic commands ###
|
Move data back before dropping the column for downgrade
|
Move data back before dropping the column for downgrade
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
a97c6ebda62762501fdf5f18326c8c518d73635f
|
securedrop/source_app/forms.py
|
securedrop/source_app/forms.py
|
from flask_babel import gettext
from flask_wtf import FlaskForm
from wtforms import PasswordField
from wtforms.validators import InputRequired, Regexp, Length
from db import Source
class LoginForm(FlaskForm):
codename = PasswordField('codename', validators=[
InputRequired(message=gettext('This field is required.')),
Length(1, Source.MAX_CODENAME_LEN,
message=gettext('Field must be between 1 and '
'{max_codename_len} characters long. '.format(
max_codename_len=Source.MAX_CODENAME_LEN))),
# The regex here allows either whitespace (\s) or
# alphanumeric characters (\W) except underscore (_)
Regexp(r'(\s|[^\W_])+$', message=gettext('Invalid input.'))
])
|
from flask_babel import gettext
from flask_wtf import FlaskForm
from wtforms import PasswordField
from wtforms.validators import InputRequired, Regexp, Length
from db import Source
class LoginForm(FlaskForm):
codename = PasswordField('codename', validators=[
InputRequired(message=gettext('This field is required.')),
Length(1, Source.MAX_CODENAME_LEN,
message=gettext('Field must be between 1 and '
'{max_codename_len} characters long. '.format(
max_codename_len=Source.MAX_CODENAME_LEN))),
# Make sure to allow dashes since some words in the wordlist have them
Regexp(r'[\sA-Za-z0-9-]+$', message=gettext('Invalid input.'))
])
|
Use @dachary's much clearer regex to validate codenames
|
Use @dachary's much clearer regex to validate codenames
|
Python
|
agpl-3.0
|
micahflee/securedrop,conorsch/securedrop,garrettr/securedrop,heartsucker/securedrop,conorsch/securedrop,heartsucker/securedrop,conorsch/securedrop,ehartsuyker/securedrop,micahflee/securedrop,garrettr/securedrop,ehartsuyker/securedrop,garrettr/securedrop,garrettr/securedrop,micahflee/securedrop,ehartsuyker/securedrop,conorsch/securedrop,micahflee/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,conorsch/securedrop,ehartsuyker/securedrop,heartsucker/securedrop
|
52a3ab97f888734db3c602ac69a33660e6026bb6
|
linked-list/remove-k-from-list.py
|
linked-list/remove-k-from-list.py
|
# Given a singly linked list of integers l and an integer k, remove all elements from list l that have a value equal to k
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
def add(self, new_node):
current_node = self.head
if self.head:
while current_node.next:
current_node = current_node.next
current_node.next = new_node
else:
self.head = new_node
def __repr__(self):
current_node = self.head
output_arr = []
while current_node:
output_arr.append(str(current_node.data))
current_node = current_node.next
return "->".join(output_arr)
|
# Given a singly linked list of integers l and an integer k, remove all elements from list l that have a value equal to k
class Node(object):
def __init__(self, value):
self.value = value
self.next = None
def remove_k_from_list(l, k):
fake_head = Node(None)
fake_head.next = l
current_node = fake_head
while current_node:
while current_node.next and current_node.next.value == k:
current_node.next = current_node.next.next
current_node = current_node.next
return fake_head.next
|
Remove linked list class and implement algorithm just using single method
|
Remove linked list class and implement algorithm just using single method
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
03d07a20928997ecc136884110311453217443c3
|
reportlab/platypus/__init__.py
|
reportlab/platypus/__init__.py
|
#copyright ReportLab Inc. 2000
#see license.txt for license details
#history http://cvs.sourceforge.net/cgi-bin/cvsweb.cgi/reportlab/platypus/__init__.py?cvsroot=reportlab
#$Header: /tmp/reportlab/reportlab/platypus/__init__.py,v 1.12 2000/11/29 17:28:50 rgbecker Exp $
__version__=''' $Id: __init__.py,v 1.12 2000/11/29 17:28:50 rgbecker Exp $ '''
__doc__=''
from reportlab.platypus.flowables import Flowable, Image, Macro, PageBreak, Preformatted, Spacer, XBox, \
CondPageBreak, KeepTogether
from reportlab.platypus.paragraph import Paragraph, cleanBlockQuotedText, ParaLines
from reportlab.platypus.paraparser import ParaFrag
from reportlab.platypus.tables import Table, TableStyle, CellStyle
from reportlab.platypus.frames import Frame
from reportlab.platypus.doctemplate import BaseDocTemplate, NextPageTemplate, PageTemplate, ActionFlowable, \
SimpleDocTemplate, FrameBreak
from xpreformatted import XPreformatted
|
#copyright ReportLab Inc. 2000
#see license.txt for license details
#history http://cvs.sourceforge.net/cgi-bin/cvsweb.cgi/reportlab/platypus/__init__.py?cvsroot=reportlab
#$Header: /tmp/reportlab/reportlab/platypus/__init__.py,v 1.13 2002/03/15 09:03:37 rgbecker Exp $
__version__=''' $Id: __init__.py,v 1.13 2002/03/15 09:03:37 rgbecker Exp $ '''
__doc__=''
from reportlab.platypus.flowables import Flowable, Image, Macro, PageBreak, Preformatted, Spacer, XBox, \
CondPageBreak, KeepTogether
from reportlab.platypus.paragraph import Paragraph, cleanBlockQuotedText, ParaLines
from reportlab.platypus.paraparser import ParaFrag
from reportlab.platypus.tables import Table, TableStyle, CellStyle
from reportlab.platypus.frames import Frame
from reportlab.platypus.doctemplate import BaseDocTemplate, NextPageTemplate, PageTemplate, ActionFlowable, \
SimpleDocTemplate, FrameBreak, PageBegin
from xpreformatted import XPreformatted
|
Add PageBegin to pkg exports
|
Add PageBegin to pkg exports
|
Python
|
bsd-3-clause
|
makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile
|
f678f5c1a197c504ae6703f3b4e5658f9e2db1f6
|
setuptools/tests/py26compat.py
|
setuptools/tests/py26compat.py
|
import sys
import unittest
import tarfile
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
res = tarfile.open(*args, **kwargs)
res.__exit__ = lambda exc_type, exc_value, traceback: self.close()
res.__enter__ = lambda: res
return res
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
tarfile_open = _tarfile_open_ex
|
import sys
import unittest
import tarfile
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
res = tarfile.open(*args, **kwargs)
res.__exit__ = lambda exc_type, exc_value, traceback: res.close()
res.__enter__ = lambda: res
return res
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
|
Remove spurious reference to self. Remove debugging code.
|
Remove spurious reference to self. Remove debugging code.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
427dab842e2d8aea1610c3e23d792119dc60c94b
|
moksha/widgets/jquery_ui_theme.py
|
moksha/widgets/jquery_ui_theme.py
|
from tw.api import Widget, CSSLink
class JQueryUITheme(Widget):
css = [CSSLink(link='/css/jquery-ui/ui.theme.css', modname=__name__)]
template = ''
|
"""
:mod:`moksha.widgets.jquery_ui_theme` - jQuery UI Theme
=======================================================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from tw.api import Widget, CSSLink, CSSLink
ui_theme_css = CSSLink(link='/css/jquery-ui/ui.theme.css', modname=__name__)
ui_base_css = CSSLink(link='/css/jquery-ui/ui.base.css',
css=[ui_theme_css],
modname=__name__)
|
Update our customized jQuery ui widgets
|
Update our customized jQuery ui widgets
|
Python
|
apache-2.0
|
pombredanne/moksha,lmacken/moksha,mokshaproject/moksha,mokshaproject/moksha,lmacken/moksha,pombredanne/moksha,lmacken/moksha,mokshaproject/moksha,ralphbean/moksha,ralphbean/moksha,mokshaproject/moksha,ralphbean/moksha,pombredanne/moksha,pombredanne/moksha
|
79d8c1e95f3c876e600e1637253c7afcf3f36763
|
nn_patterns/explainer/__init__.py
|
nn_patterns/explainer/__init__.py
|
from .base import *
from .gradient_based import *
from .misc import *
from .pattern_based import *
from .relevance_based import *
def create_explainer(name,
output_layer, patterns=None, to_layer=None, **kwargs):
return {
# Gradient based
"gradient": GradientExplainer,
"deconvnet": DeConvNetExplainer,
"guided": GuidedBackpropExplainer,
"gradient.alt": AlternativeGradientExplainer,
# Relevance based
"lrp.z": LRPZExplainer,
"lrp.eps": LRPEpsExplainer,
# Pattern based
"patternnet": PatternNetExplainer,
"patternnet.guided": GuidedPatternNetExplainer,
"patternlrp": PatternLRPExplainer,
}[name](output_layer, patterns=patterns, to_layer=to_layer, **kwargs)
|
from .base import *
from .gradient_based import *
from .misc import *
from .pattern_based import *
from .relevance_based import *
def create_explainer(name,
output_layer, patterns=None, to_layer=None, **kwargs):
return {
# Utility.
"input": InputExplainer,
"random": RandomExplainer,
# Gradient based
"gradient": GradientExplainer,
"deconvnet": DeConvNetExplainer,
"guided": GuidedBackpropExplainer,
"gradient.alt": AlternativeGradientExplainer,
# Relevance based
"lrp.z": LRPZExplainer,
"lrp.eps": LRPEpsExplainer,
# Pattern based
"patternnet": PatternNetExplainer,
"patternnet.guided": GuidedPatternNetExplainer,
"patternlrp": PatternLRPExplainer,
}[name](output_layer, patterns=patterns, to_layer=to_layer, **kwargs)
|
Add input and random explainer to utility function.
|
Add input and random explainer to utility function.
|
Python
|
mit
|
pikinder/nn-patterns
|
689417cef23297e54b5f082e31539bd2381798bf
|
Persistence/RedisPersist.py
|
Persistence/RedisPersist.py
|
import redis
class RedisPersist:
_redis_connection = None
def __init__(self, host="localhost", port=6379, db=0):
self._redis_connection = redis.StrictRedis(
host=host,
port=port,
db=db
)
self._redis_connection.set('tmp_validate', 'tmp_validate')
def save(self, key=None, jsonstr=None):
if key is None:
raise ValueError("Key must be present to persist game.")
if jsonstr is None:
raise ValueError("JSON is badly formed or not present")
self._redis_connection.set(key, str(jsonstr))
def load(self, key=None):
if key is None:
raise ValueError("Key must be present to load game")
return_result = self._redis_connection.get(key)
if return_result is not None:
return_result = str(return_result)
return return_result
|
import redis
class RedisPersist:
_redis_connection = None
def __init__(self, host="localhost", port=6379, db=0):
self._redis_connection = redis.StrictRedis(
host=host,
port=port,
db=db
)
self._redis_connection.set('tmp_validate', 'tmp_validate')
def save(self, key=None, jsonstr=None):
if key is None:
raise ValueError("Key must be present to persist game.")
if jsonstr is None:
raise ValueError("JSON is badly formed or not present")
self._redis_connection.set(key, str(jsonstr), ex=(60*60))
def load(self, key=None):
if key is None:
raise ValueError("Key must be present to load game")
return_result = self._redis_connection.get(key)
if return_result is not None:
return_result = str(return_result)
return return_result
|
Remove debugging statements and provide support for Python 2.7
|
Remove debugging statements and provide support for Python 2.7
|
Python
|
apache-2.0
|
dsandersAzure/python_cowbull_server,dsandersAzure/python_cowbull_server
|
5f430b076ad70c23c430017a6aa7a7893530e995
|
deflect/management/commands/checkurls.py
|
deflect/management/commands/checkurls.py
|
from django.contrib.sites.models import Site
from django.core.mail import mail_managers
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
message = ''
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
message += self.bad_redirect_text(url, e)
mail_managers('go.corban.edu URL report', message)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
base = 'http://%s' % Site.objects.get_current().domain
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
from django.contrib.sites.models import Site
from django.core.mail import mail_managers
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
domain = Site.objects.get_current().domain
def handle_noargs(self, *args, **options):
message = ''
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
message += self.url_exception_text(url, e)
mail_managers('URL report for %s' % self.domain, message)
def url_exception_text(self, url, exception):
"""Return text block for a URL exception."""
base = 'http://%s' % self.domain
return """
Redirect {key} with target {target} returned {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
Improve subject and text of URL report email
|
Improve subject and text of URL report email
|
Python
|
bsd-3-clause
|
jbittel/django-deflect
|
a28b2bc45b69503a8133b0df98ffa96d9aa4e229
|
helusers/migrations/0002_add_oidcbackchannellogoutevent.py
|
helusers/migrations/0002_add_oidcbackchannellogoutevent.py
|
# Generated by Django 3.2.4 on 2021-06-21 05:46
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
("helusers", "0001_add_ad_groups"),
]
operations = [
migrations.CreateModel(
name="OIDCBackChannelLogoutEvent",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created_at", models.DateTimeField(default=django.utils.timezone.now)),
("iss", models.CharField(db_index=True, max_length=4096)),
("sub", models.CharField(blank=True, db_index=True, max_length=4096)),
("sid", models.CharField(blank=True, db_index=True, max_length=4096)),
],
options={
"unique_together": {("iss", "sub", "sid")},
},
),
]
|
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
("helusers", "0001_add_ad_groups"),
]
operations = [
migrations.CreateModel(
name="OIDCBackChannelLogoutEvent",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created_at", models.DateTimeField(default=django.utils.timezone.now)),
("iss", models.CharField(db_index=True, max_length=4096)),
("sub", models.CharField(blank=True, db_index=True, max_length=4096)),
("sid", models.CharField(blank=True, db_index=True, max_length=4096)),
],
options={
"verbose_name": "OIDC back channel logout event",
"verbose_name_plural": "OIDC back channel logout events",
"unique_together": {("iss", "sub", "sid")},
},
),
]
|
Modify migration file to include meta data changes
|
Modify migration file to include meta data changes
The OIDCBackChannelLogoutEvent model's meta data was changed in commit
f62a72b29f. Although this has no effect on the database, Django still
wants to include the meta data in migrations. Since this migration file
isn't yet included in any release, it can be modified, instead of
creating a new migration file only for the meta data change.
|
Python
|
bsd-2-clause
|
City-of-Helsinki/django-helusers,City-of-Helsinki/django-helusers
|
83cd6a78a61a81bb2e431ee493dbe9b443e05927
|
fireplace/cards/wog/neutral_legendary.py
|
fireplace/cards/wog/neutral_legendary.py
|
from ..utils import *
##
# Minions
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
|
from ..utils import *
##
# Minions
|
Fix copypaste error in card definitions
|
Fix copypaste error in card definitions
|
Python
|
agpl-3.0
|
NightKev/fireplace,beheh/fireplace,jleclanche/fireplace
|
e8f2f1c9db328dd8116a44d9d934ecef3bc7fb5e
|
enum34_custom.py
|
enum34_custom.py
|
from enum import Enum, EnumMeta
from functools import total_ordering
class _MultiValueMeta(EnumMeta):
def __init__(self, cls, bases, classdict):
# make sure we only have tuple values, not single values
for member in self.__members__.values():
if not isinstance(member.value, tuple):
raise TypeError('{} = {!r}, should be tuple!'
.format(member.name, member.value))
def __call__(cls, value):
"""Return the appropriate instance with any of the values listed."""
for member in cls:
if value in member.value:
return member
# raise ValueError otherwise
return super().__call__(value)
class MultiValueEnum(Enum, metaclass=_MultiMeta):
"""Enum subclass where members are declared as tuples."""
@total_ordering
class OrderableMixin:
"""Mixin for comparable Enums. The order is the definition order
from smaller to bigger.
"""
def __eq__(self, other):
if self.__class__ is other.__class__:
return self.value == other.value
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
names = self.__class__._member_names_
return names.index(self.name) < names.index(other.name)
return NotImplemented
|
from enum import Enum, EnumMeta
from functools import total_ordering
class _MultiValueMeta(EnumMeta):
def __init__(self, cls, bases, classdict):
# make sure we only have tuple values, not single values
for member in self.__members__.values():
if not isinstance(member.value, tuple):
raise TypeError('{} = {!r}, should be tuple!'
.format(member.name, member.value))
def __call__(cls, value):
"""Return the appropriate instance with any of the values listed."""
for member in cls:
if value in member.value:
return member
else:
raise ValueError("%s is not a valid %s" % (value, cls.__name__))
class MultiValueEnum(Enum, metaclass=_MultiMeta):
"""Enum subclass where members are declared as tuples."""
@total_ordering
class OrderableMixin:
"""Mixin for comparable Enums. The order is the definition order
from smaller to bigger.
"""
def __eq__(self, other):
if self.__class__ is other.__class__:
return self.value == other.value
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
names = self.__class__._member_names_
return names.index(self.name) < names.index(other.name)
return NotImplemented
|
Raise ValueError explicitly from __call__ rather than with super()
|
Raise ValueError explicitly from __call__ rather than with super()
because super() would make another lookup, but we already know the value isn't there.
|
Python
|
mit
|
kissgyorgy/enum34-custom
|
739a5a85a455105f01013b20762b1b493c4d5027
|
deflect/views.py
|
deflect/views.py
|
from __future__ import unicode_literals
import base32_crockford
import logging
from django.db.models import F
from django.http import Http404
from django.http import HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.utils.timezone import now
from .models import ShortURL
from .models import VanityURL
from .utils import add_query_params
logger = logging.getLogger(__name__)
def redirect(request, key):
"""
Given the short URL key, update the statistics and redirect the
user to the destination URL, including available Google Analytics
parameters.
"""
try:
alias = VanityURL.objects.select_related().get(alias=key.upper())
key_id = alias.redirect.id
except VanityURL.DoesNotExist:
try:
key_id = base32_crockford.decode(key)
except ValueError as e:
logger.warning("Error decoding redirect '%s': %s" % (key, e))
raise Http404
redirect = get_object_or_404(ShortURL, pk=key_id)
ShortURL.objects.filter(pk=key_id).update(hits=F('hits') + 1,
last_used=now())
# Inject Google campaign parameters
utm_params = {'utm_source': redirect.key,
'utm_campaign': redirect.campaign,
'utm_content': redirect.content,
'utm_medium': redirect.medium}
url = add_query_params(redirect.long_url, utm_params)
return HttpResponsePermanentRedirect(url)
|
from __future__ import unicode_literals
import base32_crockford
import logging
from django.db.models import F
from django.http import Http404
from django.http import HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.utils.timezone import now
from .models import ShortURL
from .models import VanityURL
from .utils import add_query_params
logger = logging.getLogger(__name__)
def redirect(request, key):
"""
Given the short URL key, update the statistics and redirect the
user to the destination URL, including available Google Analytics
parameters.
"""
try:
alias = VanityURL.objects.select_related().get(alias=key.upper())
key_id = alias.redirect.id
except VanityURL.DoesNotExist:
try:
key_id = base32_crockford.decode(key)
except ValueError as e:
logger.warning("Error decoding redirect: %s" % e)
raise Http404
redirect = get_object_or_404(ShortURL, pk=key_id)
ShortURL.objects.filter(pk=key_id).update(hits=F('hits') + 1,
last_used=now())
# Inject Google campaign parameters
utm_params = {'utm_source': redirect.key,
'utm_campaign': redirect.campaign,
'utm_content': redirect.content,
'utm_medium': redirect.medium}
url = add_query_params(redirect.long_url, utm_params)
return HttpResponsePermanentRedirect(url)
|
Simplify invalid decode warning text
|
Simplify invalid decode warning text
The string is already displayed in the error text, so there's no
reason to duplicate it.
|
Python
|
bsd-3-clause
|
jbittel/django-deflect
|
53ba55615fbd02e83212aecaa0c37d1887adfc73
|
tests/test_tracer.py
|
tests/test_tracer.py
|
import unittest
import sys
from tests.utils import requires_python_version
class TestTreeTrace(unittest.TestCase):
maxDiff = None
@requires_python_version(3.5)
def test_async_forbidden(self):
def check(body):
with self.assertRaises(ValueError):
exec("""
from birdseye.tracer import TreeTracerBase
@TreeTracerBase()
async def f(): """ + body)
check('pass')
if sys.version_info >= (3, 6):
check('yield 1')
|
import sys
import unittest
from tests.utils import requires_python_version
class TestTreeTrace(unittest.TestCase):
maxDiff = None
@requires_python_version(3.5)
def test_async_forbidden(self):
from birdseye.tracer import TreeTracerBase
tracer = TreeTracerBase()
with self.assertRaises(ValueError):
exec("""
@tracer
async def f(): pass""")
if sys.version_info >= (3, 6):
with self.assertRaises(ValueError):
exec("""
@tracer
async def f(): yield 1""")
|
Fix inner exec syntax error in python 2.7
|
Fix inner exec syntax error in python 2.7
|
Python
|
mit
|
alexmojaki/birdseye,alexmojaki/birdseye,alexmojaki/birdseye,alexmojaki/birdseye
|
31d1e9a991923dcd748f26b3533f2736f04f6454
|
tests/test_typing.py
|
tests/test_typing.py
|
import typing
from trafaretrecord import TrafaretRecord
def test_initialization():
class A(TrafaretRecord):
a: int
b: str
c: typing.List[int]
tmp = A(a=1, b='1', c=[1, 2, 3])
assert repr(tmp) == "A(a=1, b='1', c=[1, 2, 3])"
assert tmp._field_types == {'a': int, 'b': str, 'c': typing.List[int]}
def test_class_properties():
class A(TrafaretRecord):
a: int
b: str
c: typing.List[int]
@property
def B(self):
return self.b.upper()
@staticmethod
def static():
return 'static method result'
@classmethod
def initialize(cls, *args, **kwargs):
return cls(*args, **kwargs)
tmp = A(a=1, b='b', c=[1, 2, 3])
assert tmp.b == 'b'
assert tmp.B == 'B'
assert tmp.static() == 'static method result'
assert A.initialize(a=3, b='B', c=[4, 5, 6]) == A(a=3, b='B', c=[4, 5, 6])
|
import typing
from trafaretrecord import TrafaretRecord
def test_initialization():
class A(TrafaretRecord):
a: int
b: str
c: typing.List[int]
tmp = A(a=1, b='1', c=[1, 2, 3])
assert repr(tmp) == "A(a=1, b='1', c=[1, 2, 3])"
assert tmp._field_types == {'a': int, 'b': str, 'c': typing.List[int]}
def test_class_properties():
class A(TrafaretRecord):
a: int
b: str
c: typing.List[int]
@property
def B(self):
return self.b.upper()
@B.setter
def B(self, value):
self._replace(b=value.lower())
@staticmethod
def static():
return 'static method result'
@classmethod
def initialize(cls, *args, **kwargs):
return cls(*args, **kwargs)
tmp = A(a=1, b='b', c=[1, 2, 3])
assert tmp.b == 'b'
assert tmp.B == 'B'
tmp.B = 'BB'
assert tmp.b == 'bb'
assert tmp.B == 'BB'
assert tmp.static() == 'static method result'
assert A.initialize(a=3, b='B', c=[4, 5, 6]) == A(a=3, b='B', c=[4, 5, 6])
|
Add test for property setter
|
Add test for property setter
|
Python
|
mit
|
vovanbo/trafaretrecord,vovanbo/trafaretrecord
|
57ead9af05c95cee2354c55bb73f5fe26be3a256
|
rasterio/rio/main.py
|
rasterio/rio/main.py
|
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
from rasterio.rio.cli import cli
# Find and load all entry points in the rasterio.rio_commands group.
# This includes the standard commands included with Rasterio as well
# as commands provided by other packages.
#
# At a mimimum, commands must use the rasterio.rio.cli.cli command
# group decorator like so:
#
# from rasterio.rio.cli import cli
#
# @cli.command()
# def foo(...):
# ...
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
|
# main: loader of all the command entry points.
import sys
import traceback
from pkg_resources import iter_entry_points
from rasterio.rio.cli import cli
# Find and load all entry points in the rasterio.rio_commands group.
# This includes the standard commands included with Rasterio as well
# as commands provided by other packages.
#
# At a mimimum, commands must use the rasterio.rio.cli.cli command
# group decorator like so:
#
# from rasterio.rio.cli import cli
#
# @cli.command()
# def foo(...):
# ...
for entry_point in iter_entry_points('rasterio.rio_commands'):
try:
entry_point.load()
except ImportError:
# Catch this so a busted plugin doesn't take down the CLI.
# Handled by registering a stub that does nothing other than
# explain the error.
msg = (
"Warning: plugin module could not be loaded. Contact "
"its author for help.\n\n\b\n"
+ traceback.format_exc())
short_msg = (
"Warning: plugin module could not be loaded. See "
"`rio %s --help` for details." % entry_point.name)
@cli.command(entry_point.name, help=msg, short_help=short_msg)
def cmd_stub():
sys.exit(0)
|
Handle plugin load errors in a helpful way.
|
Handle plugin load errors in a helpful way.
On catching an ImportError, we make a dummy/stub subcommand that
sits in the list of subcommands as expected and reports the error
there.
|
Python
|
bsd-3-clause
|
youngpm/rasterio,johanvdw/rasterio,clembou/rasterio,youngpm/rasterio,njwilson23/rasterio,perrygeo/rasterio,johanvdw/rasterio,njwilson23/rasterio,kapadia/rasterio,perrygeo/rasterio,youngpm/rasterio,kapadia/rasterio,brendan-ward/rasterio,njwilson23/rasterio,kapadia/rasterio,brendan-ward/rasterio,johanvdw/rasterio,clembou/rasterio,clembou/rasterio,perrygeo/rasterio,brendan-ward/rasterio
|
69a339c792e2545cbd12c126a5b0865e4cf1e7e5
|
paystackapi/tests/test_product.py
|
paystackapi/tests/test_product.py
|
import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.product import Product
# class TestProduct(BaseTestCase):
# @httpretty.activate
# def test_valid_create(self):
# pass
|
import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.product import Product
class TestProduct(BaseTestCase):
@httpretty.activate
def test_product_create(self):
"""Method defined to test product creation."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Product successfully created"}',
status=201,
)
response = Product.create(
name="Product pypaystack test", description="my test description",
price=500000, currency="NGN"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_product_list(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}',
status=201,
)
response = Product.list()
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_fetch(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.fetch(5499)
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_fetch(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.PUT,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.update(product_id=5499, name="Product pypaystack test",
description="my test description", price=500000000,
currency="USD"
)
self.assertEqual(response['status'], True)
|
Add test cases for product.
|
Add test cases for product.
|
Python
|
mit
|
andela-sjames/paystack-python
|
096564c95371510769a7dec31cd5d90bf2c56955
|
scripts/migration/migrate_confirmed_user_emails.py
|
scripts/migration/migrate_confirmed_user_emails.py
|
"""Ensure that users with User.emails == [] have User.username inserted.
"""
import logging
import sys
from modularodm import Q
from nose.tools import *
from website import models
from website.app import init_app
from scripts import utils as scripts_utils
logger = logging.getLogger(__name__)
def main():
# Set up storage backends
init_app(routes=False)
dry_run = 'dry' in sys.argv
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
logger.info("Iterating users with username not in confirmed emails")
for user in get_users_with_username_not_in_emails():
add_username_to_emails(user)
logger.info(repr(user))
if not dry_run:
user.save()
def get_users_with_username_not_in_emails():
return models.User.find(
Q('date_confirmed', 'ne', None)
& Q('emails', 'eq', [])
)
def add_username_to_emails(user):
user.emails.append(user.username)
if __name__ == '__main__':
main()
|
"""Ensure that confirmed users' usernames are included in their emails field.
"""
import logging
import sys
from modularodm import Q
from website import models
from website.app import init_app
from scripts import utils as scripts_utils
logger = logging.getLogger(__name__)
def main():
# Set up storage backends
init_app(routes=False)
dry_run = 'dry' in sys.argv
count = 0
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
logger.info("Finding users with username not in confirmed emails")
for user in get_users_with_username_not_in_emails():
user.emails.append(user.username)
logger.info(repr(user))
if not dry_run:
user.save()
count += 1
logger.info('Migrated {} users'.format(count))
def get_users_with_username_not_in_emails():
return (
user for user in
models.User.find(Q('date_confirmed', 'ne', None))
if user.is_active and
user.username.lower() not in [email.lower() for email in user.emails] and
user.username is not None
)
if __name__ == '__main__':
main()
|
Update migration script for users whose usernames aren't in emails field
|
Update migration script for users whose usernames aren't in emails field
OSF-5462
Previously the script only migrated users who had an empty emails
field. This updates the script to also handle users whose username
isn't in the emails field, even when the emails field isn't empty
|
Python
|
apache-2.0
|
wearpants/osf.io,chrisseto/osf.io,felliott/osf.io,samchrisinger/osf.io,saradbowman/osf.io,jnayak1/osf.io,alexschiller/osf.io,Nesiehr/osf.io,asanfilippo7/osf.io,abought/osf.io,icereval/osf.io,caneruguz/osf.io,RomanZWang/osf.io,abought/osf.io,TomBaxter/osf.io,hmoco/osf.io,Nesiehr/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,aaxelb/osf.io,amyshi188/osf.io,chennan47/osf.io,mattclark/osf.io,abought/osf.io,doublebits/osf.io,sloria/osf.io,crcresearch/osf.io,doublebits/osf.io,TomHeatwole/osf.io,wearpants/osf.io,Nesiehr/osf.io,leb2dg/osf.io,alexschiller/osf.io,RomanZWang/osf.io,erinspace/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,mfraezz/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,acshi/osf.io,sloria/osf.io,mluke93/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,mluke93/osf.io,icereval/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,binoculars/osf.io,zachjanicki/osf.io,caneruguz/osf.io,cslzchen/osf.io,chrisseto/osf.io,jnayak1/osf.io,zachjanicki/osf.io,mluo613/osf.io,KAsante95/osf.io,baylee-d/osf.io,GageGaskins/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,chrisseto/osf.io,cwisecarver/osf.io,brandonPurvis/osf.io,emetsger/osf.io,mluo613/osf.io,cwisecarver/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,samchrisinger/osf.io,felliott/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,hmoco/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,billyhunt/osf.io,adlius/osf.io,emetsger/osf.io,TomHeatwole/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,hmoco/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,Johnetordoff/osf.io,erinspace/osf.io,leb2dg/osf.io,rdhyee/osf.io,TomBaxter/osf.io,asanfilippo7/osf.io,icereval/osf.io,RomanZWang/osf.io,amyshi188/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,alexschiller/osf.io,kch8qx/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,mattclark/osf.io,chennan47/osf.io,DanielSBrown/osf.io,amyshi188/osf.io,adlius/osf.io,cslzchen/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,DanielSBrown/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,pattisdr/osf.io,caseyrollins/osf.io,kwierman/osf.io,binoculars/osf.io,zamattiac/osf.io,aaxelb/osf.io,caneruguz/osf.io,kch8qx/osf.io,billyhunt/osf.io,monikagrabowska/osf.io,mluke93/osf.io,HalcyonChimera/osf.io,cwisecarver/osf.io,mluo613/osf.io,baylee-d/osf.io,jnayak1/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,cslzchen/osf.io,asanfilippo7/osf.io,DanielSBrown/osf.io,acshi/osf.io,acshi/osf.io,SSJohns/osf.io,jnayak1/osf.io,adlius/osf.io,kch8qx/osf.io,chrisseto/osf.io,mattclark/osf.io,brianjgeiger/osf.io,doublebits/osf.io,GageGaskins/osf.io,caneruguz/osf.io,baylee-d/osf.io,mluo613/osf.io,KAsante95/osf.io,kwierman/osf.io,SSJohns/osf.io,mluo613/osf.io,brandonPurvis/osf.io,amyshi188/osf.io,zamattiac/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,TomBaxter/osf.io,emetsger/osf.io,mfraezz/osf.io,wearpants/osf.io,laurenrevere/osf.io,alexschiller/osf.io,acshi/osf.io,leb2dg/osf.io,saradbowman/osf.io,kwierman/osf.io,adlius/osf.io,GageGaskins/osf.io,crcresearch/osf.io,crcresearch/osf.io,erinspace/osf.io,leb2dg/osf.io,mluke93/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,hmoco/osf.io,kch8qx/osf.io,KAsante95/osf.io,pattisdr/osf.io,abought/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,emetsger/osf.io,rdhyee/osf.io,billyhunt/osf.io,felliott/osf.io,rdhyee/osf.io,felliott/osf.io,alexschiller/osf.io,GageGaskins/osf.io,sloria/osf.io,wearpants/osf.io,doublebits/osf.io,caseyrollins/osf.io,acshi/osf.io,DanielSBrown/osf.io,KAsante95/osf.io
|
4053e98a8d337628760233c40915fde43f22d1e2
|
events/models.py
|
events/models.py
|
from django.db import models
from django.conf import settings
# Create your models here.
#Events :
# Des users peuvent participer à un event
# Les gens peuvnet être "intéressés"
# Utiliser https://github.com/thoas/django-sequere ?
# API hackeragenda
class Event(models.Model):
STATUS_CHOICES = (
("i", "in preparation"),
("r", "ready"),
("p", "planned"),
("j", "just an idea"),
)
place = models.CharField(max_length=300)
start = models.DateTimeField()
stop = models.DateTimeField()
title = models.CharField(max_length=300)
status = models.CharField(max_length=1, choices=STATUS_CHOICES)
organizer = models.OneToOneField(settings.AUTH_USER_MODEL)
|
from django.db import models
from django.conf import settings
# Create your models here.
#Events :
# Des users peuvent participer à un event
# Les gens peuvnet être "intéressés"
# Utiliser https://github.com/thoas/django-sequere ?
# API hackeragenda
class Event(models.Model):
STATUS_CHOICES = (
("i", "in preparation"),
("r", "ready"),
("p", "planned"),
("j", "just an idea"),
)
place = models.CharField(max_length=300)
start = models.DateTimeField()
stop = models.DateTimeField()
title = models.CharField(max_length=300)
status = models.CharField(max_length=1, choices=STATUS_CHOICES)
organizer = models.ForeignKey(settings.AUTH_USER_MODEL)
|
Use ForeignKey instead of OneToOneField for event organizer
|
[fix] Use ForeignKey instead of OneToOneField for event organizer
|
Python
|
agpl-3.0
|
UrLab/incubator,UrLab/incubator,UrLab/incubator,UrLab/incubator
|
352aeadf68c102b03dc7fcc243e46c3442132c1d
|
pychecker/test_input/test70.py
|
pychecker/test_input/test70.py
|
'test checking constant conditions'
# __pychecker__ = ''
def func1(x):
'should not produce a warning'
if 1:
pass
while 1:
print x
break
assert x, 'test'
return 0
def func2(x):
'should produce a warning'
__pychecker__ = 'constant1'
if 1:
pass
while 1:
print x
break
return 0
def func3(x):
'should produce a warning'
if 21:
return 1
if 31:
return 2
assert(x, 'test')
assert(5, 'test')
assert 5, 'test'
if 'str':
return 3
return 4
def func4(x):
'should not produce a warning'
if x == 204 or x == 201 or 200 <= x < 300:
x = 0
if x == 1:
pass
while x == 'str':
print x
break
return 0
|
'test checking constant conditions'
# __pychecker__ = ''
def func1(x):
'should not produce a warning'
if 1:
pass
while 1:
print x
break
assert x, 'test'
return 0
def func2(x):
'should produce a warning'
__pychecker__ = 'constant1'
if 1:
pass
while 1:
print x
break
return 0
def func3(x):
'should produce a warning'
if 21:
return 1
if 31:
return 2
assert(x, 'test')
assert(5, 'test')
assert 5, 'test'
if 'str':
return 3
return 4
def func4(x):
'should not produce a warning'
if x == 204 or x == 201 or 200 <= x < 300:
x = 0
if x == 1:
pass
while x == 'str':
print x
break
return 0
def func5(need_quotes, text):
'should not produce a warning'
return (need_quotes) and ('"%s"' % text) or (text)
|
Fix a problem reported by Greg Ward and pointed out by John Machin when doing:
|
Fix a problem reported by Greg Ward and pointed out by John Machin when doing:
return (need_quotes) and ('"%s"' % text) or (text)
The following warning was generated:
Using a conditional statement with a constant value ("%s")
This was because even the stack wasn't modified after a BINARY_MODULO
to say the value on the stack was no longer const.
|
Python
|
bsd-3-clause
|
smspillaz/pychecker,smspillaz/pychecker,smspillaz/pychecker
|
55646644c18fe5e10669743025cc00b8225f9908
|
south/introspection_plugins/__init__.py
|
south/introspection_plugins/__init__.py
|
# This module contains built-in introspector plugins for various common
# Django apps.
# These imports trigger the lower-down files
import south.introspection_plugins.geodjango
import south.introspection_plugins.django_tagging
import south.introspection_plugins.django_taggit
import south.introspection_plugins.django_objectpermissions
|
# This module contains built-in introspector plugins for various common
# Django apps.
# These imports trigger the lower-down files
import south.introspection_plugins.geodjango
import south.introspection_plugins.django_tagging
import south.introspection_plugins.django_taggit
import south.introspection_plugins.django_objectpermissions
import south.introspection_plugins.annoying_autoonetoone
|
Add import of django-annoying patch
|
Add import of django-annoying patch
|
Python
|
apache-2.0
|
philipn/django-south,philipn/django-south,nimnull/django-south,RaD/django-south,RaD/django-south,RaD/django-south,nimnull/django-south
|
65a78d5aafdbba03812995f38e31fba0621e350e
|
setup_utils.py
|
setup_utils.py
|
import os
import re
def update_pins(setup_args):
# Use requirements and constraints to set version pins
packages = set()
with open('./requirements.txt') as requirements:
for r in requirements:
if r.lower().strip() == 'dallinger':
continue
if not r.startswith('-') or r.startswith('#'):
packages.add(r.strip().lower())
requirements = []
REQUIREMENT_RE = re.compile(r'^(([^=]+)==[^#]+)(#.*)?$')
with open('./constraints.txt') as constraints:
for c in constraints:
matches = REQUIREMENT_RE.match(c.strip())
if not matches:
continue
match = matches.group(2).lower().strip()
req = matches.group(1).strip()
if match in packages:
requirements.append(req)
# pin extra requirements
for extra in setup_args['extras_require']:
extra_packages = setup_args['extras_require'][extra]
for i, package in enumerate(extra_packages[:]):
if package.lower() == match:
extra_packages[i] = req
if requirements:
setup_args['install_requires'] = requirements
# If not on Heroku, install setuptools-markdown.
try:
os.environ["DYNO"]
except KeyError:
setup_args.update({
"setup_requires": ['setuptools-markdown==0.2'],
"long_description_markdown_filename": 'README.md',
})
|
import os
import re
REQUIREMENT_RE = re.compile(r'^(([^=]+)[=<>]+[^#]+)(#.*)?$')
def update_pins(setup_args):
# Use requirements and constraints to set version pins
packages = set()
install_dir = os.path.dirname(__file__)
with open(os.path.join(install_dir, 'requirements.txt')) as requirements:
for r in requirements:
if r.lower().strip() == 'dallinger':
continue
if not r.startswith('-') or r.startswith('#'):
packages.add(r.strip().lower())
requirements = []
with open(os.path.join(install_dir, 'constraints.txt')) as constraints:
for c in constraints:
matches = REQUIREMENT_RE.match(c.strip())
if not matches:
continue
match = matches.group(2).lower().strip()
req = matches.group(1).strip()
if match in packages:
requirements.append(req)
# pin extra requirements
for extra in setup_args['extras_require']:
extra_packages = setup_args['extras_require'][extra]
for i, package in enumerate(extra_packages[:]):
if package.lower() == match:
extra_packages[i] = req
if requirements:
setup_args['install_requires'] = requirements
# If not on Heroku, install setuptools-markdown.
try:
os.environ["DYNO"]
except KeyError:
setup_args.update({
"setup_requires": ['setuptools-markdown==0.2'],
"long_description_markdown_filename": 'README.md',
})
|
Address review concerns: allow range requirements, specify requirments file path explicitly, ...
|
Address review concerns: allow range requirements, specify requirments file path explicitly, ...
|
Python
|
mit
|
Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger
|
14edc2e547f3dbad0777c8fccc23a0d0b6a0019f
|
plugins/star.py
|
plugins/star.py
|
import urllib.request
import urllib.error
import json
import plugin
import command
import message
import os
def onInit(plugin):
star_command = command.command(plugin, 'star', shortdesc='Post a random picture of Star Butterfly to the channel')
return plugin.plugin.plugin(plugin, 'star', [star_command])
def onCommand(message_in):
if message_in.command == 'star':
try:
f = urllib.request.urlopen("https://sydneyerickson.me/starapi/rand.php").read().decode("utf-8")
except urllib.error.URLError as e:
return message.message(body='There was an issue connecting to XKCD'.format(message_in.body))
imageName = f.split('/')
if os.path.isfile('cache/star_' + imageName[-1]):
pass
else:
urllib.request.urlretrieve(f, 'cache/star_' + imageName[-1])
return message.message(file='cache/star_' + imageName[-1])
|
import urllib.request
import urllib.error
import json
import plugin
import command
import message
import caching
import os
def onInit(plugin):
star_command = command.command(plugin, 'star', shortdesc='Post a random picture of Star Butterfly to the channel')
return plugin.plugin.plugin(plugin, 'star', [star_command])
def onCommand(message_in):
if message_in.command == 'star':
try:
f = urllib.request.urlopen("https://sydneyerickson.me/starapi/rand.php").read().decode("utf-8")
except urllib.error.URLError as e:
return message.message(body='There was an issue connecting to Starapi'.format(message_in.body))
imageName = f.split('/')
caching.downloadToCache(f, imageName[-1])
return message.message(file='cache/star_' + imageName[-1])
|
Update Star plugin to use new caching API
|
Update Star plugin to use new caching API
|
Python
|
apache-2.0
|
dhinakg/BitSTAR,dhinakg/BitSTAR,StarbotDiscord/Starbot,StarbotDiscord/Starbot
|
0433623b8e15559fe304e6406e58b1cd2639493f
|
apps/polls/tests.py
|
apps/polls/tests.py
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
|
import datetime
from django.utils import timezone
from django.test import TestCase
from apps.polls.models import Poll
class PollMethodTests(TestCase):
def test_was_published_recently_with_future_poll(self):
"""
was_published_recently() should return False for polls whose
pub_date is in the future
"""
future_poll = Poll(pub_date=timezone.now() + datetime.timedelta(days=30))
self.assertEqual(future_poll.was_published_recently(), False)
|
Create a test to expose the bug
|
Create a test to expose the bug
|
Python
|
bsd-3-clause
|
datphan/teracy-tutorial
|
da952803636a0701331008a025b6789de89ce152
|
modloader/modclass.py
|
modloader/modclass.py
|
import modinfo
class Mod():
"""The Mod class
This is supposed to act like a superclass for mods.
Execution order is as follows:
mod_load -> mod_complete
"""
def mod_info(self):
"""Get the mod info
Returns:
A tuple with the name, version, and author
"""
raise Exception("Mod info isn't overriden")
def mod_load(self):
"""Executes when a mod is loaded
This is where you put patcher code
Other mods may not be fully loaded yet. If you want this functionality, see mod_complete
"""
pass
def mod_complete(self):
"""Executes when all mods are loaded"""
pass
def loadable_mod(modclass):
"""Annotation to add a Mod subclass to the mod list
Args:
modclass (Mod): The Mod class
Raises:
Exception: If the given class is not a subclass of Mod
"""
if not issubclass(modclass, Mod):
raise Exception("Class must be a subclass of Mod")
mod = modclass() # Create a new instance of the class
mod_name, _, _ = mod.mod_info() # Get just the mod name
mod.mod_load() # Load the mod
modinfo.add_mod(mod_name, mod)
|
import modinfo
import sys
class Mod():
"""The Mod class
This is supposed to act like a superclass for mods.
Execution order is as follows:
mod_load -> mod_complete
"""
def mod_info(self):
"""Get the mod info
Returns:
A tuple with the name, version, and author
"""
raise Exception("Mod info isn't overriden")
def mod_load(self):
"""Executes when a mod is loaded
This is where you put patcher code
Other mods may not be fully loaded yet. If you want this functionality, see mod_complete
"""
pass
def mod_complete(self):
"""Executes when all mods are loaded"""
pass
def loadable_mod(modclass):
"""Annotation to add a Mod subclass to the mod list
Args:
modclass (Mod): The Mod class
Raises:
Exception: If the given class is not a subclass of Mod
"""
if not issubclass(modclass, Mod):
raise Exception("Class must be a subclass of Mod")
mod = modclass() # Create a new instance of the class
mod_name, version, author = mod.mod_info()
mod.mod_load() # Load the mod
modinfo.add_mod(modclass.__module__, (mod, mod_name, version, author, sys.modules[modclass.__module__]))
|
Store more data about a mod in the registry
|
Store more data about a mod in the registry
|
Python
|
mit
|
AWSW-Modding/AWSW-Modtools
|
17655f4b099ac840712dd95ad989f7b41301b83c
|
case_conversion/__init__.py
|
case_conversion/__init__.py
|
from case_conversion import (
camelcase, pascalcase, snakecase, dashcase, kebabcase, spinalcase,
constcase, dotcase, separate_words, slashcase, backslashcase)
|
from __future__ import absolute_import
from .case_conversion import (
camelcase, pascalcase, snakecase, dashcase, kebabcase, spinalcase,
constcase, dotcase, separate_words, slashcase, backslashcase)
|
Fix import errors from implicit-relative imports.
|
Fix import errors from implicit-relative imports.
|
Python
|
mit
|
AlejandroFrias/case-conversion
|
2945ae3bb8dd85bd96546cef4ff1e297774d7190
|
checker/checker/__init__.py
|
checker/checker/__init__.py
|
#!/usr/bin/python3
from checker.local import LocalChecker as BaseChecker
#from checker.contest import ContestChecker as BaseChecker
OK = 0
TIMEOUT = 1
NOTWORKING = 2
NOTFOUND = 3
|
#!/usr/bin/python3
from checker.local import LocalChecker as BaseChecker
#from checker.contest import ContestChecker as BaseChecker
OK = 0
TIMEOUT = 1
NOTWORKING = 2
NOTFOUND = 3
_mapping = ["OK", "TIMEOUT", "NOTWORKING", "NOTFOUND"]
def string_to_result(strresult):
return _mapping.index(strresult)
def result_to_string(result):
return _mapping[result]
|
Add forward/reverse mapping of checkerstati
|
Add forward/reverse mapping of checkerstati
|
Python
|
isc
|
fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver
|
1a837e84a129e99f7734fe0ffdc6ff3a239ecc4a
|
ci/generate_pipeline_yml.py
|
ci/generate_pipeline_yml.py
|
#!/usr/bin/env python
import os
from jinja2 import Template
clusters = ['2_3', '2_4', '2_5', '2_6']
# Commenting out this as we only have one example and it breaks
tiles = [] # [d for d in os.listdir('../examples') if os.path.isdir(os.path.join('../examples', d))]
with open('pipeline.yml.jinja2', 'r') as f:
t = Template(f.read());
with open('pipeline.yml', 'w') as f:
f.write(t.render(clusters=clusters, tiles=tiles))
print("Successfully generated pipeline.yml")
|
#!/usr/bin/env python
import os
from jinja2 import Template
clusters = ['2_5', '2_6', '2_7']
# Commenting out this as we only have one example and it breaks
tiles = [] # [d for d in os.listdir('../examples') if os.path.isdir(os.path.join('../examples', d))]
with open('pipeline.yml.jinja2', 'r') as f:
t = Template(f.read());
with open('pipeline.yml', 'w') as f:
f.write(t.render(clusters=clusters, tiles=tiles))
print("Successfully generated pipeline.yml")
|
Remove 2.3 and 2.4 from CI pipeline
|
Remove 2.3 and 2.4 from CI pipeline
|
Python
|
apache-2.0
|
cf-platform-eng/tile-generator,cf-platform-eng/tile-generator,cf-platform-eng/tile-generator,cf-platform-eng/tile-generator
|
60317dda9795391dd6468b573f5e1038ae1fe384
|
src/apps/utils/db.py
|
src/apps/utils/db.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
def retrieve_in_order_from_db(model, ids):
"""
Retrieve entities of the given model from the RDBMS in order given their ids.
:param model: model of the entities
:param ids: ids of the entities
:return: a list of entities
"""
# Retrieve from RDBMS
entities = model.objects.in_bulk(ids)
#TODO: prefetch_related
# Order by search order
ordered_entities = [entities.get(id, None) for id in ids]
# Filter not found entities
filtered_entities = filter(None, ordered_entities)
return filtered_entities
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
def retrieve_in_order_from_db(model, ids, prefetch=True):
"""
Retrieve entities of the given model from the RDBMS in order given their ids.
:param model: model of the entities
:param ids: ids of the entities
:param prefetch: prefetch many-to-many relationships
:return: a list of entities
"""
# Prefetch related
if prefetch:
relationships = [m2m.attname for m2m in model._meta._many_to_many()]
entities = model.objects.all().prefetch_related(*relationships).in_bulk(ids)
else:
entities = model.objects.in_bulk(ids)
# Order by search order
ordered_entities = [entities.get(id, None) for id in ids]
# Filter not found entities
filtered_entities = filter(None, ordered_entities)
return filtered_entities
|
Optimize DB access: use of prefetch_related()
|
Optimize DB access: use of prefetch_related()
|
Python
|
apache-2.0
|
dvalcarce/filmyou-web,dvalcarce/filmyou-web,dvalcarce/filmyou-web
|
b76b3cbe0d86bd5037ccfd21086ab50803606ec2
|
autobuilder/webhooks.py
|
autobuilder/webhooks.py
|
from buildbot.status.web.hooks.github import GitHubEventHandler
from twisted.python import log
import abconfig
class AutobuilderGithubEventHandler(GitHubEventHandler):
def handle_push(self, payload):
# This field is unused:
user = None
# user = payload['pusher']['name']
repo = payload['repository']['name']
repo_url = payload['repository']['url']
# NOTE: what would be a reasonable value for project?
# project = request.args.get('project', [''])[0]
project = abconfig.get_project_for_url(repo_url,
default_if_not_found=payload['repository']['full_name'])
changes = self._process_change(payload, user, repo, repo_url, project)
log.msg("Received %d changes from github" % len(changes))
return changes, 'git'
|
from buildbot.status.web.hooks.github import GitHubEventHandler
from twisted.python import log
import abconfig
def codebasemap(payload):
return abconfig.get_project_for_url(payload['repository']['url'])
class AutobuilderGithubEventHandler(GitHubEventHandler):
def __init__(self, secret, strict codebase=None):
if codebase is None:
codebase = codebasemap
GitHubEventHandler.__init__(self, secret, strict, codebase)
def handle_push(self, payload):
# This field is unused:
user = None
# user = payload['pusher']['name']
repo = payload['repository']['name']
repo_url = payload['repository']['url']
# NOTE: what would be a reasonable value for project?
# project = request.args.get('project', [''])[0]
project = abconfig.get_project_for_url(repo_url,
default_if_not_found=payload['repository']['full_name'])
changes = self._process_change(payload, user, repo, repo_url, project)
log.msg("Received %d changes from github" % len(changes))
return changes, 'git'
|
Add a codebase generator to the Github web hoook handler, to map the URL to the repo name for use as the codebase.
|
Add a codebase generator to the Github web hoook handler,
to map the URL to the repo name for use as the codebase.
|
Python
|
mit
|
madisongh/autobuilder
|
fa1a383aa194f028e9aa6eb4ff474281dd7c5bfe
|
team2/python/rasacalculator.py
|
team2/python/rasacalculator.py
|
import sys;s='%s: lines %d, RaSa: %d'
def u(z):
r=I=0;b=1
for m in open(z):
r+=1
for k in m:
if '{'==k:b+=1
if ';'==k:I+=b
if '}'==k:b-=1
return(r,I)
c=D=0
for z in sys.argv[1:]:
r,I=u(z);c+=r;D+=I;print s%(z,r,I)
print s%('total',c,D)
|
#!/usr/bin/env python
import argparse
def calculate_file_rasa(file_path):
row_count = 0
multiplier = 1
rasa = 0
for line in open(file_path):
row_count += 1
for char in line:
if char == '{':
multiplier += 1
if char == ';':
rasa += multiplier
if char == '}':
multiplier -= 1
return (row_count, rasa)
def main(args):
total_rows = 0
total_rasa = 0
for file_path in args.argument:
row_count, rasa = calculate_file_rasa(file_path)
total_rows += row_count
total_rasa += rasa
print '%s: lines %d, RaSa: %d' % (file_path, row_count, rasa)
print 'total: lines %d, RaSa: %d' % (total_rows, total_rasa)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('argument', nargs='*')
main(parser.parse_args())
|
Revert to try cleanest solution
|
Revert to try cleanest solution
|
Python
|
mit
|
jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015
|
c03241320138fe7b545b43514e93615473270b0d
|
netbox/dcim/fields.py
|
netbox/dcim/fields.py
|
from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from netaddr import AddrFormatError, EUI, mac_unix_expanded
import pprint
class ASNField(models.BigIntegerField):
description = "32-bit ASN field"
default_validators = [
MinValueValidator(1),
MaxValueValidator(4294967295),
]
class mac_unix_expanded_uppercase(mac_unix_expanded):
word_fmt = '%.2X'
class MACAddressField(models.Field):
description = "PostgreSQL MAC Address field"
def python_type(self):
return EUI
def from_db_value(self, value, expression, connection, context):
return self.to_python(value)
def to_python(self, value):
if value is None:
return value
try:
return EUI(value, version=48, dialect=mac_unix_expanded_uppercase)
except AddrFormatError as e:
raise ValidationError("Invalid MAC address format: {}".format(value))
def db_type(self, connection):
return 'macaddr'
def get_prep_value(self, value):
if not value:
return None
return str(self.to_python(value))
|
from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from netaddr import AddrFormatError, EUI, mac_unix_expanded
class ASNField(models.BigIntegerField):
description = "32-bit ASN field"
default_validators = [
MinValueValidator(1),
MaxValueValidator(4294967295),
]
class mac_unix_expanded_uppercase(mac_unix_expanded):
word_fmt = '%.2X'
class MACAddressField(models.Field):
description = "PostgreSQL MAC Address field"
def python_type(self):
return EUI
def from_db_value(self, value, expression, connection, context):
return self.to_python(value)
def to_python(self, value):
if value is None:
return value
try:
return EUI(value, version=48, dialect=mac_unix_expanded_uppercase)
except AddrFormatError as e:
raise ValidationError("Invalid MAC address format: {}".format(value))
def db_type(self, connection):
return 'macaddr'
def get_prep_value(self, value):
if not value:
return None
return str(self.to_python(value))
|
Remove unneeded import from testing.
|
Remove unneeded import from testing.
|
Python
|
apache-2.0
|
lampwins/netbox,digitalocean/netbox,lampwins/netbox,digitalocean/netbox,lampwins/netbox,digitalocean/netbox,lampwins/netbox,digitalocean/netbox
|
4a8b1a7633279e3276fceb3e12eca852dc583764
|
baro.py
|
baro.py
|
from datetime import datetime
import utils
class Baro:
"""This class contains info about the Void Trader and is initialized with
data in JSON format
"""
def __init__(self, data):
self.config = data['Config']
self.start = datetime.fromtimestamp(data['Activation']['sec'])
self.end = datetime.fromtimestamp(data['Expiry']['sec'])
self.location = data['Node']
self.manifest = data['Manifest']
def __str__(self):
"""Returns a string with all the information about Baro's offers
"""
baroItemString = ""
if datetime.now() < self.start:
return "None"
else:
for item in self.manifest:
baroItemString += ('== '+ str(item["ItemType"]) +' ==\n'
'- price: '+ str(item["PrimePrice"]) +' ducats + '+ str(item["RegularPrice"]) +'cr -\n\n' )
return baroItemString
def get_end_string(self):
"""Returns a string containing Baro's departure time
"""
return timedelta_to_string(self.end - datetime.now())
def get_start_string(self):
"""Returns a string containing Baro's arrival time
"""
return timedelta_to_string(self.start - datetime.now())
|
from datetime import datetime
import utils
class Baro:
"""This class contains info about the Void Trader and is initialized with
data in JSON format
"""
def __init__(self, data):
self.config = data['Config']
self.start = datetime.fromtimestamp(data['Activation']['sec'])
self.end = datetime.fromtimestamp(data['Expiry']['sec'])
self.location = data['Node']
self.manifest = data['Manifest']
def __str__(self):
"""Returns a string with all the information about Baro's offers
"""
baroItemString = ""
if datetime.now() < self.start:
return "None"
else:
for item in self.manifest:
baroItemString += ('== '+ str(item["ItemType"]) +' ==\n'
'- price: '+ str(item["PrimePrice"]) +' ducats + '+ str(item["RegularPrice"]) +'cr -\n\n' )
return baroItemString
def is_active(self):
"""Returns True if the Void Trader is currently active, False otherwise
"""
return (self.start < datetime.now() and self.end > datetime.now())
def get_end_string(self):
"""Returns a string containing Baro's departure time
"""
return timedelta_to_string(self.end - datetime.now())
def get_start_string(self):
"""Returns a string containing Baro's arrival time
"""
return timedelta_to_string(self.start - datetime.now())
|
Add is_active() method to the Baro class
|
Add is_active() method to the Baro class
|
Python
|
mit
|
pabletos/Hubot-Warframe,pabletos/Hubot-Warframe
|
3e8921b2edcf8a675b6ed496cf5e282c76cc2070
|
retrieveData.py
|
retrieveData.py
|
#!/usr/bin/env python
import json, os, requests
from models import db, FoodMenu, FoodServices
key = os.environ.get('UWOPENDATA_APIKEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
foodMenuData = FoodMenu(foodMenu)
serviceInfo = getData('FoodServices').text
serviceInfoData = FoodServices(serviceInfo)
db.session.add(foodMenuData)
db.session.add(serviceInfoData)
db.session.commit()
|
#!/usr/bin/env python
import json, os, requests
from models import db, FoodMenu, FoodServices
key = os.environ.get('UWOPENDATA_APIKEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
def retrieve():
payload = {'key': key}
url = os.environ.get('API_URL')
r = requests.get(url, params=payload)
return r
foodMenu = retrieve().text
foodMenuData = FoodMenu(foodMenu)
serviceInfo = getData('FoodServices').text
serviceInfoData = FoodServices(serviceInfo)
db.session.add(foodMenuData)
db.session.add(serviceInfoData)
db.session.commit()
|
Update retrieve() for FoodMenu data
|
Update retrieve() for FoodMenu data
|
Python
|
mit
|
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
|
566fc15f136076db5c421ca18f8b1fcb3d332229
|
ovp_projects/views.py
|
ovp_projects/views.py
|
from ovp_projects import serializers
from ovp_projects import models
from ovp_users import models as users_models
from rest_framework import mixins
from rest_framework import viewsets
from rest_framework import response
from rest_framework import status
class ProjectResourceViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
"""
ProjectResourceViewSet resource endpoint
"""
queryset = models.Project.objects.all()
lookup_field = 'slug'
lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field
def get_serializer_class(self):
#if self.action == 'create':
return serializers.ProjectCreateSerializer
def create(self, request, *args, **kwargs):
user = users_models.User.objects.all().first()
request.data['owner'] = user.pk
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
headers = self.get_success_headers(serializer.data)
return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
from ovp_projects import serializers
from ovp_projects import models
from ovp_users import models as users_models
from rest_framework import mixins
from rest_framework import viewsets
from rest_framework import response
from rest_framework import status
class ProjectResourceViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
"""
ProjectResourceViewSet resource endpoint
"""
queryset = models.Project.objects.all()
lookup_field = 'slug'
lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field
def get_serializer_class(self):
if self.action == 'create':
return serializers.ProjectCreateSerializer
return serializers.ProjectSearchSerializer
def create(self, request, *args, **kwargs):
user = users_models.User.objects.all().first()
request.data['owner'] = user.pk
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
headers = self.get_success_headers(serializer.data)
return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
Return ProjectSearchSerializer on ProjectResourceViewSet if action != 'Create'
|
Return ProjectSearchSerializer on ProjectResourceViewSet if action != 'Create'
|
Python
|
agpl-3.0
|
OpenVolunteeringPlatform/django-ovp-projects,OpenVolunteeringPlatform/django-ovp-projects
|
9f9441cf43e66780ca7f24197d3cd9ece923dd30
|
kiva/quartz/__init__.py
|
kiva/quartz/__init__.py
|
# :Author: Robert Kern
# :Copyright: 2004, Enthought, Inc.
# :License: BSD Style
from mac_context import get_mac_context
def get_macport(dc):
"""
Returns the Port or the CGContext of a wxDC (or child class) instance.
"""
if 'GetCGContext' in dir(dc):
ptr = dc.GetCGContext()
return int(ptr)
else:
from macport import get_macport as _get_macport
return _get_macport(str(dc.this))
|
# :Author: Robert Kern
# :Copyright: 2004, Enthought, Inc.
# :License: BSD Style
try:
from mac_context import get_mac_context
except ImportError:
get_mac_context = None
def get_macport(dc):
"""
Returns the Port or the CGContext of a wxDC (or child class) instance.
"""
if 'GetCGContext' in dir(dc):
ptr = dc.GetCGContext()
return int(ptr)
else:
from macport import get_macport as _get_macport
return _get_macport(str(dc.this))
|
Allow kiva.quartz to be imported on non-darwin platforms without error.
|
Allow kiva.quartz to be imported on non-darwin platforms without error.
|
Python
|
bsd-3-clause
|
tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable
|
98b6f81f68ce4338e932afc14b7b9d4c8a810e71
|
src/dirtyfields/dirtyfields.py
|
src/dirtyfields/dirtyfields.py
|
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
|
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, f.to_python(getattr(self, f.name))) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
|
Use field.to_python to do django type conversions on the field before checking if dirty.
|
Use field.to_python to do django type conversions on the field before checking if dirty.
This solves issues where you might have a decimal field that you write a string to, eg:
>>> m = MyModel.objects.get(id=1)
>>> m.my_decimal_field
Decimal('1.00')
>>> m.my_decimal_field = u'1.00' # from a form or something
>>> m.is_dirty() # currently evaluates to True, should evaluate to False
False
This pull request could probably use some unit testing, but it should be safe as the base class for django fields defines to_python as:
def to_python(self, value):
return value
So, any field type that does not have an explicit to_python method will behave as before this change.
|
Python
|
bsd-3-clause
|
mattcaldwell/django-dirtyfields,georgemarshall/django-dirtyfields
|
1a2cabca5be1b9682e39db12bd52c26f5bb8b5b9
|
src/dicomweb_client/ext/gcp/session_utils.py
|
src/dicomweb_client/ext/gcp/session_utils.py
|
"""Session management utilities for Google Cloud Platform (GCP)."""
from typing import Optional, Any
try:
import google.auth
from google.auth.transport import requests as google_requests
except ImportError:
raise ImportError(
'The `dicomweb-client` package needs to be installed with the '
'"gcp" extra requirements to use this module, as follows: '
'`pip install dicomweb-client[gcp]`')
import requests
def create_session_from_gcp_credentials(
google_credentials: Optional[Any] = None
) -> requests.Session:
"""Creates an authorized session for Google Cloud Platform.
Parameters
----------
google_credentials: Any
Google Cloud credentials.
(see https://cloud.google.com/docs/authentication/production
for more information on Google Cloud authentication).
If not set, will be initialized to ``google.auth.default()``.
Returns
-------
requests.Session
Google Cloud authorized session.
"""
if google_credentials is None:
google_credentials, _ = google.auth.default(
scopes=['https://www.googleapis.com/auth/cloud-platform']
)
return google_requests.AuthorizedSession(google_credentials)
|
"""Session management utilities for Google Cloud Platform (GCP)."""
from typing import Optional, Any
try:
import google.auth
from google.auth.transport import requests as google_requests
except ImportError:
raise ImportError(
'The `dicomweb-client` package needs to be installed with the '
'"gcp" extra requirements to use this module, as follows: '
'`pip install dicomweb-client[gcp]`')
import requests
def create_session_from_gcp_credentials(
google_credentials: Optional[Any] = None
) -> requests.Session:
"""Creates an authorized session for Google Cloud Platform.
Parameters
----------
google_credentials: Any
Google Cloud credentials.
(see https://cloud.google.com/docs/authentication/production
for more information on Google Cloud authentication).
If not set, will be initialized to ``google.auth.default()``.
Returns
-------
requests.Session
Google Cloud authorized session.
Note
----
Credentials will be read from environment variable
``GOOGLE_APPLICATION_CREDENTIALS`` if set.
"""
if google_credentials is None:
google_credentials, _ = google.auth.default(
scopes=['https://www.googleapis.com/auth/cloud-platform']
)
return google_requests.AuthorizedSession(google_credentials)
|
Add note to gcp session utils method
|
Add note to gcp session utils method
|
Python
|
mit
|
MGHComputationalPathology/dicomweb-client
|
84ec75ff6262d7926c0de87dffbeddb223fd190b
|
core/settings.py
|
core/settings.py
|
# -*- encoding: UTF-8 -*-
from enum import IntEnum
class LogType(IntEnum):
CVN_STATUS = 0
AUTH_ERROR = 1
LOG_TYPE = (
(LogType.CVN_STATUS, 'CVN_STATUS'),
(LogType.AUTH_ERROR, 'AUTH_ERROR'),
)
BASE_URL_FLATPAGES = '/investigacion/faq/'
|
# -*- encoding: UTF-8 -*-
from enum import IntEnum
class LogType(IntEnum):
CVN_STATUS = 0
AUTH_ERROR = 1
LOG_TYPE = (
(LogType.CVN_STATUS.value, 'CVN_STATUS'),
(LogType.AUTH_ERROR.value, 'AUTH_ERROR'),
)
BASE_URL_FLATPAGES = '/investigacion/faq/'
|
Fix a bug in LogType that broke migrations creation
|
Fix a bug in LogType that broke migrations creation
|
Python
|
agpl-3.0
|
tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador
|
5a6970349ace3ddcf12cfac6bc72ec6dbc3424a2
|
django_bcrypt/models.py
|
django_bcrypt/models.py
|
import bcrypt
from django.contrib.auth.models import User
from django.conf import settings
try:
rounds = settings.BCRYPT_ROUNDS
except AttributeError:
rounds = 12
_check_password = User.check_password
def bcrypt_check_password(self, raw_password):
if self.password.startswith('bc$'):
salt_and_hash = self.password[3:]
return bcrypt.hashpw(raw_password, salt_and_hash) == salt_and_hash
return _check_password(self, raw_password)
def bcrypt_set_password(self, raw_password):
salt = bcrypt.gensalt(rounds)
self.password = 'bc$' + bcrypt.hashpw(raw_password, salt)
User.check_password = bcrypt_check_password
User.set_password = bcrypt_set_password
|
import bcrypt
from django.contrib.auth.models import User
from django.conf import settings
rounds = getattr(settings, "BCRYPT_ROUNDS", 12)
_check_password = User.check_password
def bcrypt_check_password(self, raw_password):
if self.password.startswith('bc$'):
salt_and_hash = self.password[3:]
return bcrypt.hashpw(raw_password, salt_and_hash) == salt_and_hash
return _check_password(self, raw_password)
def bcrypt_set_password(self, raw_password):
salt = bcrypt.gensalt(rounds)
self.password = 'bc$' + bcrypt.hashpw(raw_password, salt)
User.check_password = bcrypt_check_password
User.set_password = bcrypt_set_password
|
Make the default setting retrieval more elegant.
|
Make the default setting retrieval more elegant.
|
Python
|
mit
|
dwaiter/django-bcrypt
|
c4a0dc9ecc12a82735738fe4b80dc74f991b66d7
|
csft/__main__.py
|
csft/__main__.py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
The entry point of csft.
"""
import argparse as ap
from os.path import isdir
from .csft import print_result
def main(argv=None):
parser = ap.ArgumentParser(add_help='add help')
parser.add_argument('path', help='the directory to be analyzed')
args = parser.parse_args(args=argv)
if not isdir(args.path):
raise TypeError('%s is not a directory!', args.path)
return print_result(args.path)
if __name__ == '__main__':
raise SystemExit(main())
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
The entry point of csft.
"""
import argparse as ap
from os.path import isdir
from . import __name__ as _name
from . import __version__ as _version
from .csft import print_result
def main(argv=None):
""" Execute the application CLI. """
parser = ap.ArgumentParser(prog=_name)
parser.add_argument('-V', '--version', action='version', version=_version)
parser.add_argument('path', help='the directory to be analyzed')
args = parser.parse_args(args=argv)
if not isdir(args.path):
raise TypeError('%s is not a directory!', args.path)
return print_result(args.path)
if __name__ == '__main__':
raise SystemExit(main())
|
Add version option to CLI.
|
Add version option to CLI.
|
Python
|
mit
|
yanqd0/csft
|
fb28813fbb906c1ea7c4fb3c52e60219c3ae1f19
|
votacao_with_redis/management/commands/gera_votacao.py
|
votacao_with_redis/management/commands/gera_votacao.py
|
# coding: utf-8
from django.core.management.base import BaseCommand
from ...models import Poll, Option
class Command(BaseCommand):
def handle(self, *args, **kwargs):
Poll.objects.filter(id=1).delete()
Option.objects.filter(id__in=[1, 2, 3, 4]).delete()
question = Poll.objects.create(id=1, title="Quem deve ser o vencedor")
option1 = Option.objects.create(id=1, name="Mario", pool=question, votes=0)
option2 = Option.objects.create(id=2, name="Luigi", pool=question, votes=0)
option3 = Option.objects.create(id=3, name="Yoshi", pool=question, votes=0)
option4 = Option.objects.create(id=4, name="Princesa", pool=question, votes=0)
question.save()
option1.save()
option2.save()
option3.save()
option4.save()
print "Pesquisa e Opções cadastradas com sucesso"
|
# coding: utf-8
from django.core.management.base import BaseCommand
from ...models import Poll, Option
import redis
cache = redis.StrictRedis(host='127.0.0.1', port=6379, db=0)
class Command(BaseCommand):
def handle(self, *args, **kwargs):
options = [1, 2, 3, 4]
Poll.objects.filter(id=1).delete()
Option.objects.filter(id__in=options).delete()
[cache.delete('votacao:option:{}'.format(opt)) for opt in options]
question = Poll.objects.create(id=1, title="Quem deve ser o vencedor")
option1 = Option.objects.create(id=1, name="Mario", pool=question, votes=0)
option2 = Option.objects.create(id=2, name="Luigi", pool=question, votes=0)
option3 = Option.objects.create(id=3, name="Yoshi", pool=question, votes=0)
option4 = Option.objects.create(id=4, name="Princesa", pool=question, votes=0)
question.save()
option1.save()
option2.save()
option3.save()
option4.save()
print "Pesquisa e Opções cadastradas com sucesso"
|
Remove chaves do redis referentes a votaçãoi
|
Remove chaves do redis referentes a votaçãoi
|
Python
|
mit
|
douglasbastos/votacao_with_redis,douglasbastos/votacao_with_redis
|
e861def07da1f0dea7f5273d06e7dc674a79025f
|
adventure/urls.py
|
adventure/urls.py
|
from django.conf.urls import url, include
from rest_framework import routers
from . import views
from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'players', PlayerViewSet)
router.register(r'adventures', AdventureViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', views.index, name='index'),
url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'),
# this route is a catch-all for compatibility with the Angular routes. It must be last in the list.
# NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app.
url(r'^(?P<path>.*)/$', views.index),
]
|
from django.conf.urls import url, include
from rest_framework import routers
from . import views
from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'players', PlayerViewSet)
router.register(r'adventures', AdventureViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet)
router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', views.index, name='index'),
url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'),
# this route is a catch-all for compatibility with the Angular routes. It must be last in the list.
# NOTE: this currently matches URLs without a . in them, so .js files and broken images will still 404.
# NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app.
url(r'^(?P<path>[^\.]*)/$', views.index),
]
|
Update Django catch-all URL path to not catch URLs with a . in them.
|
Update Django catch-all URL path to not catch URLs with a . in them.
This makes missing JS files 404 properly instead of returning the HTML 404 page which confuses the parser.
|
Python
|
mit
|
kdechant/eamon,kdechant/eamon,kdechant/eamon,kdechant/eamon
|
b0236a2cb936df9571139f074b35c178e2573593
|
dadi/__init__.py
|
dadi/__init__.py
|
import numpy
# This gives a nicer printout for masked arrays.
numpy.ma.default_real_fill_value = numpy.nan
import Integration
import PhiManip
import Numerics
import SFS
import ms
try:
import Plotting
except ImportError:
pass
try:
import os
__DIRECTORY__ = os.path.dirname(Integration.__file__)
__svn_file__ = os.path.join(__DIRECTORY__, 'svnversion')
__SVNVERSION__ = file(__svn_file__).read().strip()
except:
__SVNVERSION__ = 'Unknown'
|
import Integration
import PhiManip
import Numerics
import SFS
import ms
try:
import Plotting
except ImportError:
pass
try:
import os
__DIRECTORY__ = os.path.dirname(Integration.__file__)
__svn_file__ = os.path.join(__DIRECTORY__, 'svnversion')
__SVNVERSION__ = file(__svn_file__).read().strip()
except:
__SVNVERSION__ = 'Unknown'
|
Remove extraneous setting of masked fill value.
|
Remove extraneous setting of masked fill value.
git-svn-id: 4c7b13231a96299fde701bb5dec4bd2aaf383fc6@115 979d6bd5-6d4d-0410-bece-f567c23bd345
|
Python
|
bsd-3-clause
|
RyanGutenkunst/dadi,niuhuifei/dadi,cheese1213/dadi,yangjl/dadi,yangjl/dadi,ChenHsiang/dadi,paulirish/dadi,beni55/dadi,ChenHsiang/dadi,beni55/dadi,RyanGutenkunst/dadi,paulirish/dadi,cheese1213/dadi,niuhuifei/dadi
|
468ce899542197f8ab7ae51800b56132e6e81bd4
|
problem_2/solution.py
|
problem_2/solution.py
|
def sum_even_fibonacci_numbers_1():
f1, f2, s, = 0, 1, 0,
while f2 < 4000000:
f2, f1 = f1, f1 + f2
if f2 % 2 == 0:
s += f2
return s
def sum_even_fibonacci_numbers_2():
s, a, b = 0, 1, 1
c = a + b
while c < 4000000:
s += c
a = b + c
b = a + c
c = a + b
return s
|
from timeit import timeit
def sum_even_fibonacci_numbers_1():
f1, f2, s, = 0, 1, 0,
while f2 < 4000000:
f2, f1 = f1, f1 + f2
if f2 % 2 == 0:
s += f2
return s
def sum_even_fibonacci_numbers_2():
s, a, b = 0, 1, 1
c = a + b
while c < 4000000:
s += c
a = b + c
b = a + c
c = a + b
return s
print "sum_even_fibonacci_numbers_1: {0}".format(timeit("sum_even_fibonacci_numbers_1()", "from __main__ import sum_even_fibonacci_numbers_1;"))
print "sum_even_fibonacci_numbers_2: {0}".format(timeit("sum_even_fibonacci_numbers_2()", "from __main__ import sum_even_fibonacci_numbers_2;"))
|
Add timeit to measure each python implementation of problem 2
|
Add timeit to measure each python implementation of problem 2
|
Python
|
mit
|
mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler
|
34fe4bb5cd5c4c35a659698e8d258c78da01887a
|
pynexus/api_client.py
|
pynexus/api_client.py
|
import requests
class ApiClient:
def __init__(self, host, username, password):
self.uri = host + '/nexus/service/local/'
self.username = username
self.password = password
def get_all_repositories(self):
r = requests.get(self.uri + 'all_repositories', headers={'Accept': 'application/json'})
return r
def get_status(self):
r = requests.get(self.uri + 'status', headers={'Accept': 'application/json'})
return r
|
import requests
class ApiClient:
def __init__(self, host, username, password):
self.uri = host + '/nexus/service/local/'
self.username = username
self.password = password
def get_all_repositories(self):
r = requests.get(self.uri + 'all_repositories', headers={'Accept': 'application/json'})
return r
def get_status(self):
r = requests.get(self.uri + 'status', headers={'Accept': 'application/json'})
return r
def get_users(self):
r = requests.get(self.uri + 'users', auth=(self.username, self.password), headers={'Accept': 'application/json'})
return r
|
Add get_users method to get a list of users
|
Add get_users method to get a list of users
|
Python
|
apache-2.0
|
rcarrillocruz/pynexus
|
6eeecb5e36e5551ba3a3c35a9c7f52393d2f9d14
|
src/puzzle/problems/problem.py
|
src/puzzle/problems/problem.py
|
from src.data import meta
class Problem(object):
def __init__(self, name, lines):
self.name = name
self.lines = lines
self._solutions = None
self._constraints = []
def constrain(self, fn):
self._constraints.append(fn)
# Invalidate solutions.
self._solutions = None
def solutions(self):
if self._solutions is None:
self._solutions = meta.Meta(
(k, v) for k, v in self._solve().items() if all(
[fn(k, v) for fn in self._constraints]
)
)
return self._solutions
def _solve(self):
"""Solves Problem.
Returns:
dict Dict mapping solution to score.
"""
raise NotImplementedError()
|
from src.data import meta
class Problem(object):
def __init__(self, name, lines):
self.name = name
self.lines = lines
self._solutions = None
self._constraints = []
@property
def kind(self):
return str(type(self)).strip("'<>").split('.').pop()
@property
def solution(self):
return self.solutions().peek()
def constrain(self, fn):
self._constraints.append(fn)
# Invalidate solutions.
self._solutions = None
def solutions(self):
if self._solutions is None:
self._solutions = meta.Meta(
(k, v) for k, v in self._solve().items() if all(
[fn(k, v) for fn in self._constraints]
)
)
return self._solutions
def _solve(self):
"""Solves Problem.
Returns:
dict Dict mapping solution to score.
"""
raise NotImplementedError()
|
Add simple helper properties to Problem.
|
Add simple helper properties to Problem.
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
0d89712bda6e85901e839dec3e639c16aea42d48
|
tests/test_proxy_pagination.py
|
tests/test_proxy_pagination.py
|
import json
from django.test import TestCase
from rest_framework import status
from tests.models import TestModel
class ProxyPaginationTests(TestCase):
"""
Tests for drf-proxy-pagination
"""
def setUp(self):
for n in range(200):
TestModel.objects.create(n=n)
def test_without_pager_param(self):
resp = self.client.get('/data/', HTTP_ACCEPT='application/json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(resp['Content-Type'], 'application/json')
content = json.loads(resp.content)
self.assertIn('next', content)
self.assertIn('count', content)
self.assertIn('page=', content['next'])
self.assertNotIn('cursor=', content['next'])
def test_with_pager_param(self):
resp = self.client.get('/data/?pager=cursor', HTTP_ACCEPT='application/json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertNotIn('count', resp.content)
content = json.loads(resp.content)
self.assertIn('next', content)
self.assertNotIn('page=', content['next'])
self.assertIn('cursor=', content['next'])
|
import json
from django.test import TestCase
from django.utils import six
from rest_framework import status
from tests.models import TestModel
class ProxyPaginationTests(TestCase):
"""
Tests for drf-proxy-pagination
"""
def setUp(self):
for n in range(200):
TestModel.objects.create(n=n)
def test_without_pager_param(self):
resp = self.client.get('/data/', HTTP_ACCEPT='application/json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(resp['Content-Type'], 'application/json')
content = json.loads(str(resp.content, encoding='utf8') if six.PY3 else resp.content)
self.assertIn('next', content)
self.assertIn('count', content)
self.assertIn('page=', content['next'])
self.assertNotIn('cursor=', content['next'])
def test_with_pager_param(self):
resp = self.client.get('/data/?pager=cursor', HTTP_ACCEPT='application/json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(resp['Content-Type'], 'application/json')
content = json.loads(str(resp.content, encoding='utf8') if six.PY3 else resp.content)
self.assertIn('next', content)
self.assertNotIn('count', content)
self.assertNotIn('page=', content['next'])
self.assertIn('cursor=', content['next'])
|
Fix tests failing with Python 3
|
Fix tests failing with Python 3
|
Python
|
mit
|
tuffnatty/drf-proxy-pagination
|
8f2e1ef30a62c19fc91eed48adc38ecfcdbc37d6
|
pinax/api/registry.py
|
pinax/api/registry.py
|
from __future__ import unicode_literals
registry = {}
bound_registry = {}
def register(cls):
registry[cls.api_type] = cls
return cls
def bind(parent=None, resource=None):
def wrapper(endpointset):
if parent is not None:
endpointset.parent = parent
endpointset.url.parent = parent.url
if resource is not None:
BoundResource = type(
str("Bound{}".format(resource.__class__.__name__)),
(resource,),
{"endpointset": endpointset},
)
endpointset.resource_class = BoundResource
# override registry with bound resource (typically what we want)
registry[resource.api_type] = BoundResource
endpointset.relationships = getattr(endpointset, "relationships", {})
return endpointset
return wrapper
|
from __future__ import unicode_literals
registry = {}
bound_registry = {}
def register(cls):
registry[cls.api_type] = cls
def as_jsonapi(self):
return cls(self).serialize()
cls.model.as_jsonapi = as_jsonapi
return cls
def bind(parent=None, resource=None):
def wrapper(endpointset):
if parent is not None:
endpointset.parent = parent
endpointset.url.parent = parent.url
if resource is not None:
BoundResource = type(
str("Bound{}".format(resource.__class__.__name__)),
(resource,),
{"endpointset": endpointset},
)
endpointset.resource_class = BoundResource
# override registry with bound resource (typically what we want)
registry[resource.api_type] = BoundResource
endpointset.relationships = getattr(endpointset, "relationships", {})
return endpointset
return wrapper
|
Attach as_jsonapi to models for easy serialization
|
Attach as_jsonapi to models for easy serialization
|
Python
|
mit
|
pinax/pinax-api
|
ae5db950c683501c1ec77fee430b818293e6c603
|
csv2ofx/mappings/gls.py
|
csv2ofx/mappings/gls.py
|
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
'date': lambda r: r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' + r['Buchungstag'][-4:], # Chop up the dotted German date format and put it in ridiculous M/D/Y order
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'), # locale.atof does not actually know how to deal with German separators, so we do it this way
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r: r['Buchungstag'][3:5] + '/'
+ r['Buchungstag'][:2] + '/'
+ r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
Split up lines to pass the linter
|
Split up lines to pass the linter
|
Python
|
mit
|
reubano/csv2ofx,reubano/csv2ofx
|
131033fa3ab170ac2a66c1dd89074ea74702fb52
|
icekit/page_types/articles/migrations/0002_auto_20161012_2231.py
|
icekit/page_types/articles/migrations/0002_auto_20161012_2231.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_articles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='slug',
field=models.SlugField(max_length=255, default='woo'),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='title',
field=models.CharField(max_length=255, default='woo'),
preserve_default=False,
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_articles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='slug',
field=models.SlugField(max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='title',
field=models.CharField(max_length=255),
preserve_default=False,
),
]
|
Remove vestigial (?) "woo" default for article slug and title fields.
|
Remove vestigial (?) "woo" default for article slug and title fields.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
cde02a3129d276d02054e04c1b0a0b605b837d32
|
eodatasets3/__init__.py
|
eodatasets3/__init__.py
|
# coding=utf-8
from __future__ import absolute_import
from ._version import get_versions
REPO_URL = "https://github.com/GeoscienceAustralia/eo-datasets.git"
__version__ = get_versions()["version"]
del get_versions
|
# coding=utf-8
from __future__ import absolute_import
from ._version import get_versions
from .assemble import DatasetAssembler
REPO_URL = "https://github.com/GeoscienceAustralia/eo-datasets.git"
__version__ = get_versions()["version"]
del get_versions
__all__ = (DatasetAssembler, REPO_URL, __version__)
|
Allow assembler to be imported from eodatasets3 root
|
Allow assembler to be imported from eodatasets3 root
|
Python
|
apache-2.0
|
GeoscienceAustralia/eo-datasets,jeremyh/eo-datasets,jeremyh/eo-datasets,GeoscienceAustralia/eo-datasets
|
98f8a8fb51ae539aad6a3e2faebced4b806c3f0c
|
filer/utils/generate_filename.py
|
filer/utils/generate_filename.py
|
from __future__ import unicode_literals
try:
from django.utils.encoding import force_text
except ImportError:
# Django < 1.5
from django.utils.encoding import force_unicode as force_text
from django.utils.timezone import now
from filer.utils.files import get_valid_filename
import os
def by_date(instance, filename):
datepart = force_text(now().strftime("%Y/%m/%d"))
return os.path.join(datepart, get_valid_filename(filename))
def randomized(instance, filename):
import uuid
uuid_str = str(uuid.uuid4())
random_path = "%s/%s/%s" % (uuid_str[0:2], uuid_str[2:4], uuid_str)
return os.path.join(random_path, get_valid_filename(filename))
class prefixed_factory(object):
def __init__(self, upload_to, prefix):
self.upload_to = upload_to
self.prefix = prefix
def __call__(self, instance, filename):
if callable(self.upload_to):
upload_to_str = self.upload_to(instance, filename)
else:
upload_to_str = self.upload_to
if not self.prefix:
return upload_to_str
return os.path.join(self.prefix, upload_to_str)
|
from __future__ import unicode_literals
try:
from django.utils.encoding import force_text
except ImportError:
# Django < 1.5
from django.utils.encoding import force_unicode as force_text
from django.utils.timezone import now
from filer.utils.files import get_valid_filename
import os
def by_date(instance, filename):
datepart = force_text(now().strftime("%Y/%m/%d"))
return os.path.join(datepart, get_valid_filename(filename))
def randomized(instance, filename):
import uuid
uuid_str = str(uuid.uuid4())
return os.path.join(uuid_str[0:2], uuid_str[2:4], uuid_str,
get_valid_filename(filename))
class prefixed_factory(object):
def __init__(self, upload_to, prefix):
self.upload_to = upload_to
self.prefix = prefix
def __call__(self, instance, filename):
if callable(self.upload_to):
upload_to_str = self.upload_to(instance, filename)
else:
upload_to_str = self.upload_to
if not self.prefix:
return upload_to_str
return os.path.join(self.prefix, upload_to_str)
|
Build random path using os.path.join
|
Build random path using os.path.join
|
Python
|
bsd-3-clause
|
o-zander/django-filer,nimbis/django-filer,nimbis/django-filer,webu/django-filer,divio/django-filer,matthiask/django-filer,skirsdeda/django-filer,stefanfoulis/django-filer,sopraux/django-filer,Flight/django-filer,sopraux/django-filer,belimawr/django-filer,matthiask/django-filer,o-zander/django-filer,DylannCordel/django-filer,jakob-o/django-filer,lory87/django-filer,stefanfoulis/django-filer,o-zander/django-filer,skirsdeda/django-filer,nephila/django-filer,DylannCordel/django-filer,mkoistinen/django-filer,stefanfoulis/django-filer,DylannCordel/django-filer,skirsdeda/django-filer,divio/django-filer,vechorko/django-filer,nephila/django-filer,jakob-o/django-filer,nimbis/django-filer,jakob-o/django-filer,webu/django-filer,Flight/django-filer,belimawr/django-filer,civicresourcegroup/django-filer,kriwil/django-filer,belimawr/django-filer,civicresourcegroup/django-filer,mkoistinen/django-filer,mkoistinen/django-filer,Flight/django-filer,lory87/django-filer,kriwil/django-filer,stefanfoulis/django-filer,Flight/django-filer,vstoykov/django-filer,civicresourcegroup/django-filer,lory87/django-filer,Flight/django-filer,vstoykov/django-filer,skirsdeda/django-filer,kriwil/django-filer,divio/django-filer,webu/django-filer,nephila/django-filer,sopraux/django-filer,o-zander/django-filer,vechorko/django-filer,DylannCordel/django-filer,mkoistinen/django-filer,vstoykov/django-filer,jakob-o/django-filer,kriwil/django-filer,nimbis/django-filer,vechorko/django-filer,stefanfoulis/django-filer,belimawr/django-filer,skirsdeda/django-filer,webu/django-filer,matthiask/django-filer,lory87/django-filer,sopraux/django-filer,vechorko/django-filer,divio/django-filer,civicresourcegroup/django-filer,DylannCordel/django-filer,matthiask/django-filer,jakob-o/django-filer
|
f8e375bdc07b6fdefdae589f2d75c4ec0f5f3864
|
website/search/mutation_result.py
|
website/search/mutation_result.py
|
from models import Protein, Mutation
class SearchResult:
def __init__(self, protein, mutation, is_mutation_novel, type, **kwargs):
self.protein = protein
self.mutation = mutation
self.is_mutation_novel = is_mutation_novel
self.type = type
self.meta_user = None
self.__dict__.update(kwargs)
def __getstate__(self):
state = self.__dict__.copy()
state['protein_refseq'] = self.protein.refseq
del state['protein']
state['mutation_kwargs'] = {
'position': self.mutation.position,
'alt': self.mutation.alt
}
del state['mutation']
state['meta_user'].mutation = None
return state
def __setstate__(self, state):
state['protein'] = Protein.query.filter_by(
refseq=state['protein_refseq']
).one()
del state['protein_refseq']
state['mutation'] = Mutation.query.filter_by(
protein=state['protein'],
**state['mutation_kwargs']
).one()
del state['mutation_kwargs']
state['meta_user'].mutation = state['mutation']
state['mutation'].meta_user = state['meta_user']
self.__dict__.update(state)
|
from models import Protein, Mutation
from database import get_or_create
class SearchResult:
def __init__(self, protein, mutation, is_mutation_novel, type, **kwargs):
self.protein = protein
self.mutation = mutation
self.is_mutation_novel = is_mutation_novel
self.type = type
self.meta_user = None
self.__dict__.update(kwargs)
def __getstate__(self):
state = self.__dict__.copy()
state['protein_refseq'] = self.protein.refseq
del state['protein']
state['mutation_kwargs'] = {
'position': self.mutation.position,
'alt': self.mutation.alt
}
del state['mutation']
state['meta_user'].mutation = None
return state
def __setstate__(self, state):
state['protein'] = Protein.query.filter_by(
refseq=state['protein_refseq']
).one()
del state['protein_refseq']
state['mutation'], created = get_or_create(
Mutation,
protein=state['protein'],
**state['mutation_kwargs']
)
del state['mutation_kwargs']
state['meta_user'].mutation = state['mutation']
state['mutation'].meta_user = state['meta_user']
self.__dict__.update(state)
|
Fix result loading for novel mutations
|
Fix result loading for novel mutations
|
Python
|
lgpl-2.1
|
reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/ActiveDriverDB,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations
|
dad38c399c4687c93c69255df0f9d69d1bb386c4
|
yawf/models.py
|
yawf/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from yawf.config import INITIAL_STATE
from yawf.base_model import WorkflowAwareModelBase
class WorkflowAwareModel(WorkflowAwareModelBase):
class Meta:
abstract = True
state = models.CharField(default=INITIAL_STATE,
max_length=32, db_index=True, editable=False,
verbose_name=_('state'))
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from yawf.config import INITIAL_STATE
from yawf.base_model import WorkflowAwareModelBase
class WorkflowAwareModel(WorkflowAwareModelBase, models.Model):
class Meta:
abstract = True
state = models.CharField(default=INITIAL_STATE,
max_length=32, db_index=True, editable=False,
verbose_name=_('state'))
|
Add missing parent for WorkflowAwareModel
|
Add missing parent for WorkflowAwareModel
|
Python
|
mit
|
freevoid/yawf
|
32d4ea22c1bca4a96a8d826f0225dfee2a4c21d2
|
django_tenants/tests/__init__.py
|
django_tenants/tests/__init__.py
|
from .test_routes import *
from .test_tenants import *
from .test_cache import *
|
from .files import *
from .staticfiles import *
from .template import *
from .test_routes import *
from .test_tenants import *
from .test_cache import *
|
Include static file-related tests in 'test' package.
|
fix(tests): Include static file-related tests in 'test' package.
|
Python
|
mit
|
tomturner/django-tenants,tomturner/django-tenants,tomturner/django-tenants
|
47310125c53d80e4ff09af6616955ccb2d9e3bc8
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile, CMake
class ConanUsingSilicium(ConanFile):
settings = "os", "compiler", "build_type", "arch"
requires = "Boost/1.59.0@lasote/stable", "silicium/0.1@tyroxx/testing"
generators = "cmake"
default_options = "Boost:shared=True"
|
from conans import ConanFile, CMake
class ConanUsingSilicium(ConanFile):
settings = "os", "compiler", "build_type", "arch"
requires = "silicium/0.1@tyroxx/testing"
generators = "cmake"
default_options = "Boost:shared=True"
|
Boost should be referenced automatically by silicium
|
Boost should be referenced automatically by silicium
|
Python
|
mit
|
TyRoXx/conan_using_silicium,TyRoXx/conan_using_silicium
|
538f8e3382e274402f2f71ba79439fae0828b3cf
|
IPython/html.py
|
IPython/html.py
|
"""
Shim to maintain backwards compatibility with old IPython.html imports.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
from warnings import warn
warn("The `IPython.html` package has been deprecated. "
"You should import from jupyter_notebook instead.")
from IPython.utils.shimmodule import ShimModule
sys.modules['IPython.html'] = ShimModule(
src='IPython.html', mirror='jupyter_notebook')
if __name__ == '__main__':
from jupyter_notebook import notebookapp as app
app.launch_new_instance()
|
"""
Shim to maintain backwards compatibility with old IPython.html imports.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
from warnings import warn
warn("The `IPython.html` package has been deprecated. "
"You should import from `jupyter_notebook` and `jupyter_widgets` instead.")
from IPython.utils.shimmodule import ShimModule
sys.modules['IPython.html'] = ShimModule(
src='IPython.html', mirror='jupyter_notebook')
sys.modules['IPython.html.widgets'] = ShimModule(
src='IPython.html.widgets', mirror='jupyter_widgets')
if __name__ == '__main__':
from jupyter_notebook import notebookapp as app
app.launch_new_instance()
|
Add shim to new widgets repository.
|
Add shim to new widgets repository.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
0b5a657339870c7669082c39f8290c88732aa92e
|
extractor.py
|
extractor.py
|
from extraction.core import ExtractionRunner
from extraction.runnables import Extractor, RunnableError, Filter, ExtractorResult
import os
import sys
import grobid
import pdfbox
import filters
if __name__ == '__main__':
runner = ExtractionRunner()
runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor)
runner.add_runnable(filters.AcademicPaperFilter)
argc = len(sys.argv)
if argc == 2:
runner.run_from_file(sys.argv[1])
elif argc == 3:
runner.run_from_file(sys.argv[1], output_dir = sys.argv[2])
else:
print("USAGE: python {0} path_to_pdf [output_directory]")
|
from extraction.core import ExtractionRunner
from extraction.runnables import Extractor, RunnableError, Filter, ExtractorResult
import os
import sys
import grobid
import pdfbox
import filters
def get_extraction_runner():
runner = ExtractionRunner()
runner.add_runnable(grobid.GrobidPlainTextExtractor)
# OR
# runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor)
runner.add_runnable(filters.AcademicPaperFilter)
return runner
if __name__ == '__main__':
runner = get_extraction_runner()
argc = len(sys.argv)
if argc == 2:
runner.run_from_file(sys.argv[1])
elif argc == 3:
runner.run_from_file(sys.argv[1], output_dir = sys.argv[2])
else:
print("USAGE: python {0} path_to_pdf [output_directory]")
|
Make code a little cleaner
|
Make code a little cleaner
|
Python
|
apache-2.0
|
Tiger66639/new-csx-extractor,SeerLabs/new-csx-extractor,Tiger66639/new-csx-extractor,SeerLabs/new-csx-extractor,Tiger66639/new-csx-extractor,Tiger66639/new-csx-extractor,SeerLabs/new-csx-extractor,SeerLabs/new-csx-extractor
|
af182857b4a70245b0b06bbf37e2d67e0ded493f
|
ez_gpg/ui.py
|
ez_gpg/ui.py
|
import gi
import gnupg # Requires python3-gnupg
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
class MainWindow(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="EZ GPG")
self.connect("delete-event", Gtk.main_quit)
self.set_border_width(30)
gpg_keys_list = Gtk.ListStore(str, str)
for key in self._get_gpg_keys():
gpg_keys_list.append([key['keyid'], "%s %s" % (key['keyid'], key['uids'][0])])
gpg_key_combo_box = Gtk.ComboBox.new_with_model_and_entry(gpg_keys_list)
gpg_key_combo_box.set_entry_text_column(1)
self.add(gpg_key_combo_box)
def _get_gpg_keys(self):
gpg = gnupg.GPG()
return gpg.list_keys()
class EzGpg(Gtk.Window):
def launch(self):
MainWindow().show_all()
Gtk.main()
|
import gi
import gnupg # Requires python3-gnupg
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
class GpgKeyList(Gtk.ComboBox):
def __init__(self):
Gtk.ComboBox.__init__(self)
gpg_keys_list = Gtk.ListStore(str, str)
for key in self._get_gpg_keys():
key_id = key['keyid']
key_name = "%s %s" % (key['keyid'], key['uids'][0])
gpg_keys_list.append([key_id, key_name])
cell = Gtk.CellRendererText()
self.pack_start(cell, True)
self.add_attribute(cell, 'text', 1)
self.set_model(gpg_keys_list)
self.set_entry_text_column(1)
def _get_gpg_keys(self):
gpg = gnupg.GPG()
return gpg.list_keys()
class MainWindow(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="EZ GPG")
self.connect("delete-event", Gtk.main_quit)
self.set_border_width(30)
self.set_position(Gtk.WindowPosition.CENTER)
gpg_key_combo = GpgKeyList()
self.add(gpg_key_combo)
class EzGpg(Gtk.Window):
def launch(self):
MainWindow().show_all()
Gtk.main()
|
Split out gpg key list into its own class
|
Split out gpg key list into its own class
This will make it easy to break out into a module when we need it. In
the process, window was also set to be in the center of the user's
screen.
|
Python
|
lgpl-2.1
|
sgnn7/ez_gpg,sgnn7/ez_gpg
|
1dfbe495972a5f4d02ce374131f40d4474f24cc6
|
website/ember_osf_web/views.py
|
website/ember_osf_web/views.py
|
# -*- coding: utf-8 -*-
import os
import json
import requests
from flask import send_from_directory, Response, stream_with_context
from framework.sessions import session
from website.settings import EXTERNAL_EMBER_APPS, PROXY_EMBER_APPS, EXTERNAL_EMBER_SERVER_TIMEOUT
ember_osf_web_dir = os.path.abspath(os.path.join(os.getcwd(), EXTERNAL_EMBER_APPS['ember_osf_web']['path']))
routes = [
'/quickfiles/',
'/<uid>/quickfiles/'
]
def use_ember_app(**kwargs):
if PROXY_EMBER_APPS:
resp = requests.get(EXTERNAL_EMBER_APPS['ember_osf_web']['server'], stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT)
resp = Response(stream_with_context(resp.iter_content()), resp.status_code)
else:
resp = send_from_directory(ember_osf_web_dir, 'index.html')
if session.data.get('status'):
status = [{'id': stat.id if stat.id else stat.message, 'class': stat.css_class, 'jumbo': stat.jumbotron, 'dismiss': stat.dismissible, 'extra': stat.extra} for stat in session.data['status']]
resp.set_cookie('status', json.dumps(status))
return resp
|
# -*- coding: utf-8 -*-
import os
import json
import requests
from flask import send_from_directory, Response, stream_with_context
from framework.sessions import session
from website.settings import EXTERNAL_EMBER_APPS, PROXY_EMBER_APPS, EXTERNAL_EMBER_SERVER_TIMEOUT
ember_osf_web_dir = os.path.abspath(os.path.join(os.getcwd(), EXTERNAL_EMBER_APPS['ember_osf_web']['path']))
routes = [
'/quickfiles/',
'/<uid>/quickfiles/'
]
def use_ember_app(**kwargs):
if PROXY_EMBER_APPS:
resp = requests.get(EXTERNAL_EMBER_APPS['ember_osf_web']['server'], stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT)
resp = Response(stream_with_context(resp.iter_content()), resp.status_code)
else:
resp = send_from_directory(ember_osf_web_dir, 'index.html')
if session.data.get('status'):
status = [{'id': stat[5] if stat[5] else stat[0], 'class': stat[2], 'jumbo': stat[1], 'dismiss': stat[3], 'extra': stat[6]} for stat in session.data['status']]
resp.set_cookie('status', json.dumps(status))
return resp
|
Revert "Use namedtuple's getattr rather than indexing"
|
Revert "Use namedtuple's getattr rather than indexing"
This reverts commit 5c4f93207c1fbfe9b9a478082d5f039a9e5ba720.
|
Python
|
apache-2.0
|
Johnetordoff/osf.io,adlius/osf.io,aaxelb/osf.io,felliott/osf.io,mfraezz/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,icereval/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,mattclark/osf.io,aaxelb/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,sloria/osf.io,felliott/osf.io,felliott/osf.io,brianjgeiger/osf.io,binoculars/osf.io,pattisdr/osf.io,adlius/osf.io,sloria/osf.io,Johnetordoff/osf.io,mattclark/osf.io,mfraezz/osf.io,adlius/osf.io,mfraezz/osf.io,icereval/osf.io,cslzchen/osf.io,aaxelb/osf.io,erinspace/osf.io,aaxelb/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,icereval/osf.io,binoculars/osf.io,baylee-d/osf.io,baylee-d/osf.io,caseyrollins/osf.io,felliott/osf.io,erinspace/osf.io,caseyrollins/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,adlius/osf.io,baylee-d/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,sloria/osf.io
|
6a8068942d985f0c125749d5f58ad7cb9cd189be
|
scanpointgenerator/linegenerator_step.py
|
scanpointgenerator/linegenerator_step.py
|
from linegenerator import LineGenerator
import math as m
class StepLineGenerator(LineGenerator):
def __init__(self, name, units, start, end, step):
num = int(m.floor((end - start)/step))
super(StepLineGenerator, self).__init__(name, units, start, step, num)
|
from linegenerator import LineGenerator
class StepLineGenerator(LineGenerator):
def __init__(self, name, units, start, end, step):
num = int((end - start)/step) + 1
super(StepLineGenerator, self).__init__(name, units, start, step, num)
|
Add extra point to include start
|
Add extra point to include start
|
Python
|
apache-2.0
|
dls-controls/scanpointgenerator
|
acd5a676b08e070c804bdae78abba266b47c67b5
|
libvcs/__about__.py
|
libvcs/__about__.py
|
__title__ = 'libvcs'
__package_name__ = 'libvcs'
__description__ = 'vcs abstraction layer'
__version__ = '0.3.0'
__author__ = 'Tony Narlock'
__email__ = 'tony@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Tony Narlock'
|
__title__ = 'libvcs'
__package_name__ = 'libvcs'
__description__ = 'vcs abstraction layer'
__version__ = '0.3.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/vcs-python/libvcs'
__pypi__ = 'https://pypi.org/project/libvcs/'
__email__ = 'tony@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
|
Add pypi + github to metadata
|
Add pypi + github to metadata
|
Python
|
mit
|
tony/libvcs
|
7efcc9987f827eec56677d95bc7ad873208b392f
|
saw/parser/sentences.py
|
saw/parser/sentences.py
|
import base
from blocks import Blocks
import re
class Sentences(base.Base):
_type = 'sentences'
child_class = Blocks
@staticmethod
def parse(text):
#re.split('\!|\?|\. | \.',text)
result = []
prev = 0
# we allow .09 as not end of sentences
#for m in re.finditer('[\!\?]+|\.+(?:\s+|$|\?|\!)', text):
for m in re.finditer('\.+(?:\s+|$)|(\.*)[\!\?]+(\.+(?:\s+|$))*', text):
curr, _next = m.start(), m.end()
# if prev position of delimiter < current - between exists text
# at least 1 symbol.
if prev < curr:
node = text[prev:curr].strip()
if node != '':
result.append(node)
result.append(list( text[curr:_next].strip() ))
prev = _next
if len(text) > prev:
result.append(text[prev:].strip())
return result
|
import base
from blocks import Blocks
import re
class Sentences(base.Base):
_type = 'sentences'
child_class = Blocks
@staticmethod
def parse(text):
_len = len(text)
result = []
prev = 0
# we allow .09 as not end of sentences
for m in re.finditer('[\!\?\.]+', text):
curr, _next = m.start(), m.end()
items = list( text[curr: _next].strip() )
if (_len > _next) and not (text[_next] == ' '):
# delete ending '.' if they not before space or end of string
while (len(items) > 0) and (items[-1] == '.'):
items.pop()
_next = _next - 1
if len(items) > 0:
# if prev position of delimiter < current - between exists text
# at least 1 symbol.
if prev < curr:
node = text[prev:curr].strip()
if node != '':
result.append(node)
result.append( items )
prev = _next
if _len > prev:
result.append(text[prev:].strip())
return result
|
Optimize from 5-6s to 2.9-3.0
|
Optimize from 5-6s to 2.9-3.0
|
Python
|
mit
|
diNard/Saw
|
7c9fbdb62c6b045476064fd4193fd133ed0de7c3
|
genderator/utils.py
|
genderator/utils.py
|
from unidecode import unidecode
class Normalizer:
def normalize(text):
text = Normalizer.remove_extra_whitespaces(text)
text = Normalizer.replace_hyphens(text)
# text = Normalizer.remove_accent_marks(text)
return text.lower()
@staticmethod
def replace_hyphens(text):
return text.replace('-', ' ')
@staticmethod
def remove_extra_whitespaces(text):
return ' '.join(text.strip().split());
@staticmethod
def remove_accent_marks(text):
return unidecode(text)
|
from unidecode import unidecode
class Normalizer:
def normalize(text):
"""
Normalize a given text applying all normalizations.
Params:
text: The text to be processed.
Returns:
The text normalized.
"""
text = Normalizer.remove_extra_whitespaces(text)
text = Normalizer.replace_hyphens(text)
text = Normalizer.remove_accent_marks(text)
return text.lower()
@staticmethod
def replace_hyphens(text):
"""
Remove hyphens from input text.
Params:
text: The text to be processed.
Returns:
The text without hyphens.
"""
return text.replace('-', ' ')
@staticmethod
def remove_extra_whitespaces(text):
"""
Remove extra whitespaces from input text.
This function removes whitespaces from the beginning and the end of
the string, but also duplicated whitespaces between words.
Params:
text: The text to be processed.
Returns:
The text without extra whitespaces.
"""
return ' '.join(text.strip().split());
@staticmethod
def remove_accent_marks(text):
"""
Remove accent marks from input text.
Params:
text: The text to be processed.
Returns:
The text without accent marks.
"""
return unidecode(text)
|
Add accent marks normalization and missing docstrings
|
Add accent marks normalization and missing docstrings
|
Python
|
mit
|
davidmogar/genderator
|
0cab34e5f87b4484e0309aba8860d651afe06fb0
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask, request, redirect
from flask.ext.bootstrap import Bootstrap
from config import configs
from dmutils import apiclient, init_app, flask_featureflags
from dmutils.content_loader import ContentLoader
bootstrap = Bootstrap()
data_api_client = apiclient.DataAPIClient()
search_api_client = apiclient.SearchAPIClient()
feature_flags = flask_featureflags.FeatureFlag()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
data_api_client=data_api_client,
feature_flags=feature_flags,
search_api_client=search_api_client
)
questions_builder = ContentLoader(
"app/helpers/questions_manifest.yml",
"app/content/g6/"
).get_builder()
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
main_blueprint.config = {
'BASE_TEMPLATE_DATA': application.config['BASE_TEMPLATE_DATA'],
'QUESTIONS_BUILDER': questions_builder
}
@application.before_request
def remove_trailing_slash():
if request.path != '/' and request.path.endswith('/'):
if request.query_string:
return redirect(
'{}?{}'.format(
request.path[:-1],
request.query_string.decode('utf-8')
),
code=301
)
else:
return redirect(request.path[:-1], code=301)
return application
|
from flask import Flask, request, redirect
from flask.ext.bootstrap import Bootstrap
from config import configs
from dmutils import apiclient, init_app, flask_featureflags
from dmutils.content_loader import ContentLoader
bootstrap = Bootstrap()
data_api_client = apiclient.DataAPIClient()
search_api_client = apiclient.SearchAPIClient()
feature_flags = flask_featureflags.FeatureFlag()
questions_loader = ContentLoader(
"app/helpers/questions_manifest.yml",
"app/content/g6/"
)
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
data_api_client=data_api_client,
feature_flags=feature_flags,
search_api_client=search_api_client
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
main_blueprint.config = {
'BASE_TEMPLATE_DATA': application.config['BASE_TEMPLATE_DATA'],
}
@application.before_request
def remove_trailing_slash():
if request.path != '/' and request.path.endswith('/'):
if request.query_string:
return redirect(
'{}?{}'.format(
request.path[:-1],
request.query_string.decode('utf-8')
),
code=301
)
else:
return redirect(request.path[:-1], code=301)
return application
|
Move QUESTIONS_BUILDER from blueprint to a global variable
|
Move QUESTIONS_BUILDER from blueprint to a global variable
|
Python
|
mit
|
mtekel/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend
|
e65ed7382c691d8ee19a22659ddb6deaa064e85b
|
kmip/__init__.py
|
kmip/__init__.py
|
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
# Dynamically set __version__
version_path = os.path.join(os.path.dirname(
os.path.realpath(__file__)), 'version.py')
with open(version_path, 'r') as version_file:
mo = re.search(r"^.*= '(\d\.\d\.\d)'$", version_file.read(), re.MULTILINE)
__version__ = mo.group(1)
__all__ = ['core', 'demos', 'services']
|
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
from kmip.core import enums
# Dynamically set __version__
version_path = os.path.join(os.path.dirname(
os.path.realpath(__file__)), 'version.py')
with open(version_path, 'r') as version_file:
mo = re.search(r"^.*= '(\d\.\d\.\d)'$", version_file.read(), re.MULTILINE)
__version__ = mo.group(1)
__all__ = [
'core',
'demos',
'enums',
'services'
]
|
Update the kmip package to allow importing enums globally
|
Update the kmip package to allow importing enums globally
This change updates the root-level kmip package, allowing users
to now import enums directly from the kmip package:
from kmip import enums
Enumerations are used throughout the codebase and user applications
and this will simplify usage and help obfuscate internal package
details that may change in the future.
|
Python
|
apache-2.0
|
OpenKMIP/PyKMIP,OpenKMIP/PyKMIP
|
3e0fbefa021c4c97024da30963845b201ff35089
|
dmaws/commands/paasmanifest.py
|
dmaws/commands/paasmanifest.py
|
import click
import os
from ..cli import cli_command
from ..utils import load_file, template_string
@cli_command('paas-manifest', max_apps=1)
@click.option('--template', '-t', default='paas/manifest.j2',
type=click.Path(exists=True),
help="Manifest Jinja2 template file")
@click.option('--out-file', '-o',
help="Output file, if empty the template content is printed to the stdout")
def paas_manifest(ctx, template, out_file):
"""Generate a PaaS manifest file from a Jinja2 template"""
app = ctx.apps[0]
if app not in ctx.variables:
raise ValueError('Application configuration not found')
templace_content = load_file(template)
variables = {
'environment': ctx.environment,
'app': app.replace('_', '-')
}
variables.update(ctx.variables[app])
manifest_content = template_string(templace_content, variables)
if out_file is not None:
with open(out_file, 'w') as f:
f.write(manifest_content)
os.chmod(out_file, 0o600)
else:
print(manifest_content)
|
import click
import os
from ..cli import cli_command
from ..utils import load_file, template_string, merge_dicts
@cli_command('paas-manifest', max_apps=1)
@click.option('--out-file', '-o',
help="Output file, if empty the template content is printed to the stdout")
def paas_manifest(ctx, out_file):
"""Generate a PaaS manifest file from a Jinja2 template"""
app = ctx.apps[0]
if app not in ctx.variables:
raise ValueError('Application configuration not found')
variables = {
'environment': ctx.environment,
'app': app.replace('_', '-')
}
template_content = load_file('paas/{}.j2'.format(variables['app']))
variables = merge_dicts(variables, ctx.variables)
variables = merge_dicts(variables, ctx.variables[app])
manifest_content = template_string(template_content, variables, templates_path='paas/')
if out_file is not None:
with open(out_file, 'w') as f:
f.write(manifest_content)
os.chmod(out_file, 0o600)
else:
print(manifest_content)
|
Update paas-manifest command to load per-app manifests
|
Update paas-manifest command to load per-app manifests
Removes template file option in favour of the app-specific manifests.
Changes the way variables are set for the manifest template. Once
the relevant variable files are loaded and merged the command will
update the top-level namespace with the values from the application.
This allows us to use the same base manifest template referencing
generic top-level variables (eg `subdomain`, `instances`, `path`)
that are overridden by application-specific values. Previously
this was accomplished by using `stacks.yml` as the middle layer.
The template variable change means that we can run into issues if
we use clashing variable names accidentally, but at the same time
it allows us to set common values for all applications. Eg:
```
instances: 3
api:
instances: 5
```
sets instance counts to 3 for all applications, but since the context
will be updated with the `api` values the api manifest will only see
`instances: 5` value.
|
Python
|
mit
|
alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws
|
89a7a834638a1384bd9f1a560902b4d3aab29423
|
smoked/loader.py
|
smoked/loader.py
|
# coding: utf-8
from __future__ import unicode_literals
from importlib import import_module
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def load_test_module():
"""
Import test module and trigger registration of tests. Test module is
defined in `SMOKE_TESTS` setting.
"""
test_module = getattr(settings, 'SMOKE_TESTS')
if not test_module:
raise ImproperlyConfigured('Missing SMOKE_TESTS in settings.')
try:
import_module(test_module)
except ImportError as e:
msg = "Can't import '{0}' module. Exception: {1}"
raise ImproperlyConfigured(msg.format(test_module, e))
|
# coding: utf-8
from __future__ import unicode_literals
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def load_test_module():
"""
Import test module and trigger registration of tests. Test module is
defined in `SMOKE_TESTS` setting.
"""
test_module = getattr(settings, 'SMOKE_TESTS')
if not test_module:
raise ImproperlyConfigured('Missing SMOKE_TESTS in settings.')
try:
import_module(test_module)
except ImportError as e:
msg = "Can't import '{0}' module. Exception: {1}"
raise ImproperlyConfigured(msg.format(test_module, e))
|
Fix import of import_module for Py2.6
|
Fix import of import_module for Py2.6
|
Python
|
mit
|
djentlemen/django-smoked
|
d2339fa094062c0672aef0ce938572aa3f5aead3
|
nintendo/sead/random.py
|
nintendo/sead/random.py
|
class Random:
def __init__(self, seed):
multiplier = 0x6C078965
temp = seed
self.state = []
for i in range(1, 5):
temp ^= temp >> 30
temp = (temp * multiplier + i) & 0xFFFFFFFF
self.state.append(temp)
def u32(self):
temp = self.state[0]
temp = (temp ^ (temp << 11)) & 0xFFFFFFFF
temp ^= temp >> 8
temp ^= self.state[3]
temp ^= self.state[3] >> 19
self.state[0] = self.state[1]
self.state[1] = self.state[2]
self.state[2] = self.state[3]
self.state[3] = temp
return temp
def uint(self, max):
return (self.u32() * max) >> 32
|
class Random:
def __init__(self, *param):
if len(param) == 1: self.set_seed(param[0])
elif len(param) == 4: self.set_state(*param)
else:
raise TypeError("Random.__init__ takes either 1 or 4 arguments")
def set_seed(self, seed):
multiplier = 0x6C078965
temp = seed
self.state = []
for i in range(1, 5):
temp ^= temp >> 30
temp = (temp * multiplier + i) & 0xFFFFFFFF
self.state.append(temp)
def set_state(self, s0, s1, s2, s3):
self.state = [s0, s1, s2, s3]
def u32(self):
temp = self.state[0]
temp = (temp ^ (temp << 11)) & 0xFFFFFFFF
temp ^= temp >> 8
temp ^= self.state[3]
temp ^= self.state[3] >> 19
self.state[0] = self.state[1]
self.state[1] = self.state[2]
self.state[2] = self.state[3]
self.state[3] = temp
return temp
def uint(self, max):
return (self.u32() * max) >> 32
|
Allow sead.Random to be constructed by internal state
|
Allow sead.Random to be constructed by internal state
|
Python
|
mit
|
Kinnay/NintendoClients
|
3e913e4267fd7750516edcbed1aa687e0cbd17fe
|
edx_repo_tools/oep2/__init__.py
|
edx_repo_tools/oep2/__init__.py
|
"""
Top-level definition of the ``oep2`` commandline tool.
"""
import click
from . import explode_repos_yaml
from .report import cli
def _cli():
cli(auto_envvar_prefix="OEP2")
@click.group()
def cli():
"""
Tools for implementing and enforcing OEP-2.
"""
pass
cli.add_command(explode_repos_yaml.explode)
cli.add_command(explode_repos_yaml.implode)
cli.add_command(cli.cli, 'report')
|
"""
Top-level definition of the ``oep2`` commandline tool.
"""
import click
from . import explode_repos_yaml
from .report.cli import cli as report_cli
def _cli():
cli(auto_envvar_prefix="OEP2")
@click.group()
def cli():
"""
Tools for implementing and enforcing OEP-2.
"""
pass
cli.add_command(explode_repos_yaml.explode)
cli.add_command(explode_repos_yaml.implode)
cli.add_command(report_cli, 'report')
|
Make oep-2 checker run again
|
Make oep-2 checker run again
|
Python
|
apache-2.0
|
edx/repo-tools,edx/repo-tools
|
d85947ee083b0a5d7156b4e49fd5677ebeea33c7
|
brew/monitor.py
|
brew/monitor.py
|
import time
import threading
from . import app, mongo, controller
from bson.objectid import ObjectId
class Monitor(object):
def __init__(self, timeout=10):
self.thread = None
self.exit_event = None
self.timeout = timeout
def temperature(self, brew_id):
if self.thread:
raise RuntimeError("Brew still ongoing")
def run_in_background():
while True:
if self.exit_event.wait(self.timeout):
break
with app.app_context():
temperature = controller.get_temperature()
now = time.time()
query = {'_id': ObjectId(brew_id)}
op = {'$push': {'temperatures': (now, temperature)}}
mongo.db.brews.update(query, op)
self.exit_event = threading.Event()
self.thread = threading.Thread(target=run_in_background)
self.thread.start()
def stop(self):
self.exit_event.set()
self.thread.join()
|
import time
import threading
from . import app, mongo, controller
from bson.objectid import ObjectId
class Monitor(object):
def __init__(self, timeout=10):
self.thread = None
self.exit_event = None
self.timeout = timeout
def temperature(self, brew_id):
if self.thread:
raise RuntimeError("Brew still ongoing")
def run_in_background():
while True:
if self.exit_event.wait(self.timeout):
break
with app.app_context():
temperature = controller.get_temperature()
now = time.time()
query = {'_id': ObjectId(brew_id)}
op = {'$push': {'temperatures': (now, temperature)}}
mongo.db.brews.update(query, op)
self.exit_event = threading.Event()
self.thread = threading.Thread(target=run_in_background)
self.thread.start()
def stop(self):
self.exit_event.set()
self.thread.join()
self.thread = None
|
Fix problem after stopping process
|
Fix problem after stopping process
|
Python
|
mit
|
brewpeople/brewmeister,brewpeople/brewmeister,brewpeople/brewmeister
|
ccafafbd51422979ed93ed197135bf03b7d0be81
|
opps/images/__init__.py
|
opps/images/__init__.py
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Image')
settings.INSTALLED_APPS += ('thumbor',)
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Image')
|
Remove thumbor use on init image, thumbor not django application
|
Remove thumbor use on init image, thumbor not django application
|
Python
|
mit
|
YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,opps/opps,opps/opps,YACOWS/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,williamroot/opps,williamroot/opps,jeanmask/opps,opps/opps
|
715a3c7005130b4fea8ac46132e2d2505f1901cf
|
pdf_generator/styles.py
|
pdf_generator/styles.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def HSpacer(width):
return Spacer(0, width)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
|
Add a image shortcut to compute height and width with a ratio
|
Add a image shortcut to compute height and width with a ratio
|
Python
|
mit
|
cecedille1/PDF_generator
|
46de02b77c25c633b254dc81ed35da2443b287a9
|
lighty/wsgi/__init__.py
|
lighty/wsgi/__init__.py
|
import functools
from .handler import handler
from .urls import load_urls, resolve
def WSGIApplication(app_settings):
'''Create main application handler
'''
class Application(object):
settings = app_settings
urls = load_urls(settings.urls)
resolve_url = functools.partial(resolve, urls)
return functools.partial(handler, Application, Application.resolve_url)
|
import functools
import os
from ..templates.loaders import FSLoader
from .handler import handler
from .urls import load_urls, resolve
class BaseApplication(object):
'''Base application class contains obly settings, urls and resolve_url
method
'''
def __init__(self, settings):
self.settings = settings
self.urls = load_urls(settings.urls)
self.resolve_url = functools.partial(resolve, self.urls)
class ComplexApplication(BaseApplication):
'''Application loads also templates and database connection
'''
def __init__(self, settings):
super(ComplexApplication, self).__init__(settings)
self.apps = settings.section('APPS')
template_dirs = []
for app in self.apps:
module = __import__(app, globals(), locals(), app.split('.')[-1])
template_dir = os.path.join(module.__path__[0], 'templates')
if os.path.exists(template_dir):
template_dirs.append(template_dir)
try:
template_dirs += settings.section('TEMPLATE_DIRS')
except:
pass
self.template_loader = FSLoader(template_dirs)
self.get_template = self.template_loader.get_template
def WSGIApplication(app_settings):
'''Create main application handler
'''
application = ComplexApplication(app_settings)
return functools.partial(handler, application, application.resolve_url)
|
Add ComplexApplication class for WSGI apps that uses not only urls resolving.
|
Add ComplexApplication class for WSGI apps that uses not only urls resolving.
|
Python
|
bsd-3-clause
|
GrAndSE/lighty
|
a1b4526f48fbd9e7f48c8bb6bc1a4763cc710448
|
fabric_bolt/web_hooks/tables.py
|
fabric_bolt/web_hooks/tables.py
|
import django_tables2 as tables
from fabric_bolt.core.mixins.tables import ActionsColumn, PaginateTable
from fabric_bolt.web_hooks import models
class HookTable(PaginateTable):
"""Table used to show the configurations
Also provides actions to edit and delete"""
actions = ActionsColumn([
{'title': '<i class="glyphicon glyphicon-pencil"></i>', 'url': 'hooks_hook_update', 'args': [tables.A('pk')],
'attrs':{'data-toggle': 'tooltip', 'title': 'Edit Hook', 'data-delay': '{ "show": 300, "hide": 0 }'}},
{'title': '<i class="glyphicon glyphicon-trash"></i>', 'url': 'hooks_hook_delete', 'args': [tables.A('pk')],
'attrs':{'data-toggle': 'tooltip', 'title': 'Delete Hook', 'data-delay': '{ "show": 300, "hide": 0 }'}},
], delimiter='   ')
class Meta:
model = models.Hook
attrs = {"class": "table table-striped"}
sequence = fields = (
'url',
)
|
import django_tables2 as tables
from fabric_bolt.core.mixins.tables import ActionsColumn, PaginateTable
from fabric_bolt.web_hooks import models
class HookTable(PaginateTable):
"""Table used to show the configurations
Also provides actions to edit and delete"""
actions = ActionsColumn([
{'title': '<i class="glyphicon glyphicon-pencil"></i>', 'url': 'hooks_hook_update', 'args': [tables.A('pk')],
'attrs':{'data-toggle': 'tooltip', 'title': 'Edit Hook', 'data-delay': '{ "show": 300, "hide": 0 }'}},
{'title': '<i class="glyphicon glyphicon-trash"></i>', 'url': 'hooks_hook_delete', 'args': [tables.A('pk')],
'attrs':{'data-toggle': 'tooltip', 'title': 'Delete Hook', 'data-delay': '{ "show": 300, "hide": 0 }'}},
], delimiter='   ')
class Meta:
model = models.Hook
attrs = {"class": "table table-striped"}
sequence = fields = (
'project',
'url',
)
|
Add project to hook table so it's a little more clear what it's a global one.
|
Add project to hook table so it's a little more clear what it's a global one.
|
Python
|
mit
|
worthwhile/fabric-bolt,jproffitt/fabric-bolt,gvangool/fabric-bolt,qdqmedia/fabric-bolt,damoguyan8844/fabric-bolt,qdqmedia/fabric-bolt,npardington/fabric-bolt,fabric-bolt/fabric-bolt,maximon93/fabric-bolt,leominov/fabric-bolt,lethe3000/fabric-bolt,worthwhile/fabric-bolt,maximon93/fabric-bolt,damoguyan8844/fabric-bolt,lethe3000/fabric-bolt,maximon93/fabric-bolt,jproffitt/fabric-bolt,damoguyan8844/fabric-bolt,brajput24/fabric-bolt,gvangool/fabric-bolt,worthwhile/fabric-bolt,paperreduction/fabric-bolt,leominov/fabric-bolt,paperreduction/fabric-bolt,brajput24/fabric-bolt,leominov/fabric-bolt,fabric-bolt/fabric-bolt,npardington/fabric-bolt,lethe3000/fabric-bolt,gvangool/fabric-bolt,npardington/fabric-bolt,jproffitt/fabric-bolt,brajput24/fabric-bolt,fabric-bolt/fabric-bolt,qdqmedia/fabric-bolt,paperreduction/fabric-bolt
|
377fa94c2963a9c2522164ff374431dbe836217e
|
indra/sources/rlimsp/api.py
|
indra/sources/rlimsp/api.py
|
__all__ = ['process_pmc']
import logging
import requests
from .processor import RlimspProcessor
logger = logging.getLogger(__name__)
RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/pmc'
class RLIMSP_Error(Exception):
pass
def process_pmc(pmcid, with_grounding=True):
"""Get an output from RLIMS-p for the given pmic id.
Parameters
----------
pmcid : str
A PMCID, with the prefix PMC, of the paper to be "read".
with_grounding : bool
The RLIMS-P web service provides two endpoints, one pre-grounded, the
other not so much. The grounded endpoint returns far less content, and
may perform some grounding that can be handled by the grounding mapper.
"""
if with_grounding:
resp = requests.get(RLIMSP_URL + '.normed/pmcid/%s' % pmcid)
else:
resp = requests.get(RLIMSP_URL + '/pmcid/%s' % pmcid)
if resp.status_code != 200:
raise RLIMSP_Error("Bad status code: %d - %s"
% (resp.status_code, resp.reason))
rp = RlimspProcessor(resp.json())
return rp
|
__all__ = ['process_from_webservice']
import logging
import requests
from .processor import RlimspProcessor
logger = logging.getLogger(__name__)
RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/'
class RLIMSP_Error(Exception):
pass
def process_from_webservice(id_val, id_type='pmcid', source='pmc',
with_grounding=True):
"""Get an output from RLIMS-p for the given pmic id.
Parameters
----------
id_val : str
A PMCID, with the prefix PMC, or pmid, with no prefix, of the paper to
be "read".
id_type : str
Either 'pmid' or 'pmcid'. The default is 'pmcid'.
source : str
Either 'pmc' or 'medline', whether you want pmc fulltext or medline
abstracts.
with_grounding : bool
The RLIMS-P web service provides two endpoints, one pre-grounded, the
other not so much. The grounded endpoint returns far less content, and
may perform some grounding that can be handled by the grounding mapper.
"""
if with_grounding:
fmt = '%s.normed/%s/%s'
else:
fmt = '%s/%s/%s'
resp = requests.get(RLIMSP_URL + fmt % (source, id_type, id_val))
if resp.status_code != 200:
raise RLIMSP_Error("Bad status code: %d - %s"
% (resp.status_code, resp.reason))
rp = RlimspProcessor(resp.json())
return rp
|
Add capability to read pmids and get medline.
|
Add capability to read pmids and get medline.
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,bgyori/indra,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,johnbachman/belpy,sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,bgyori/indra,bgyori/indra,johnbachman/indra
|
c17aed93f3dd5a1a46dfb871268ebda4e56b1bee
|
lib/excel.py
|
lib/excel.py
|
"""EcoData Retriever Excel Functions
This module contains optional functions for importing data from Excel.
"""
class Excel:
@staticmethod
def empty_cell(cell):
"""Tests whether an excel cell is empty or contains only
whitespace"""
if cell.ctype == 0:
return True
if str(cell.value).strip() == "":
return True
return False
@staticmethod
def cell_value(cell):
"""Returns the string value of an excel spreadsheet cell"""
return str(cell.value).strip()
|
"""EcoData Retriever Excel Functions
This module contains optional functions for importing data from Excel.
"""
class Excel:
@staticmethod
def empty_cell(cell):
"""Tests whether an excel cell is empty or contains only
whitespace"""
if cell.ctype == 0:
return True
if str(cell.value).strip() == "":
return True
return False
@staticmethod
def cell_value(cell):
"""Returns the string value of an excel spreadsheet cell"""
if (cell.value).__class__.__name__ == 'unicode':
return (cell.value).encode('utf-8').strip()
return str(cell.value).strip()
|
Handle special characters in xls cell values
|
Handle special characters in xls cell values
|
Python
|
mit
|
davharris/retriever,goelakash/retriever,embaldridge/retriever,henrykironde/deletedret,davharris/retriever,goelakash/retriever,embaldridge/retriever,davharris/retriever,henrykironde/deletedret,embaldridge/retriever
|
b874a5d3f54ef7ba71af18474a96e835d97bb846
|
chat/views.py
|
chat/views.py
|
from datetime import datetime, timedelta
import jwt
import os
from django.shortcuts import render
from django.conf import settings
from django.views.generic.base import TemplateView
key = os.path.join(
os.path.dirname(__file__),
'ecc',
'key.pem',
)
with open(key, 'r') as fh:
ecc_private = fh.read()
# Create your views here.
class NabuView(TemplateView):
template_name = 'chat/nabu.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
data = {
'sub': 'Kromey',
'iss': self.request.headers['Host'],
'aud': self.request.headers['Host'],
'exp': datetime.utcnow() + timedelta(seconds=30),
}
token = jwt.encode(data, ecc_private, algorithm='ES256')
context['token'] = token.decode('utf-8')
return context
|
from datetime import datetime, timedelta
import jwt
import os
from django.shortcuts import render
from django.conf import settings
from django.views.generic.base import TemplateView
key = os.path.join(
os.path.dirname(__file__),
'ecc',
'key.pem',
)
with open(key, 'r') as fh:
ecc_private = fh.read()
# Create your views here.
class NabuView(TemplateView):
template_name = 'chat/nabu.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
data = {
'sub': 'Kromey',
'iss': settings.NABU['jwt']['iss'],
'aud': settings.NABU['jwt']['aud'],
'exp': datetime.utcnow() + timedelta(**settings.NABU['jwt']['exp']),
}
token = jwt.encode(data, ecc_private, algorithm='ES256')
context['token'] = token.decode('utf-8')
return context
|
Use Nabu settings in token generation
|
Use Nabu settings in token generation
|
Python
|
mit
|
Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters
|
d8702486851c59d8b030a63aefee2b5ca152772e
|
test_projects/django14/pizzagigi/urls.py
|
test_projects/django14/pizzagigi/urls.py
|
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from .views import (
PizzaCreateView, PizzaDeleteView, PizzaDetailView, PizzaListView,
PizzaUpdateView,
ChickenWingsListView
)
urlpatterns = patterns('', # NOQA
url(r'^$', PizzaListView.as_view(), name='list'),
url(r'^create/$', PizzaCreateView.as_view(), name='create'),
url(r'^created/$', TemplateView.as_view(
template_name='pizzagigi/pizza_created.html'), name='created'),
url(r'^detail/(?P<pk>[0-9]*)$', PizzaDetailView.as_view(), name='detail'),
url(r'^update/(?P<pk>[0-9]*)$', PizzaUpdateView.as_view(), name='update'),
url(r'^updated/$', TemplateView.as_view(
template_name='pizzagigi/pizza_updated.html'), name='updated'),
url(r'^delete/(?P<pk>[0-9]*)$', PizzaDeleteView.as_view(), name='delete'),
url(r'^deleted/$', TemplateView.as_view(
template_name='pizzagigi/pizza_deleted.html'), name='deleted'),
url(r'^wings/$', ChickenWingsListView.as_view(), name='chickenwings_list'),
)
|
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from .views import (
PizzaCreateView, PizzaDeleteView, PizzaDetailView, PizzaListView,
PizzaUpdateView
)
urlpatterns = patterns('', # NOQA
url(r'^$', PizzaListView.as_view(), name='list'),
url(r'^create/$', PizzaCreateView.as_view(), name='create'),
url(r'^created/$', TemplateView.as_view(
template_name='pizzagigi/pizza_created.html'), name='created'),
url(r'^detail/(?P<pk>[0-9]*)$', PizzaDetailView.as_view(), name='detail'),
url(r'^update/(?P<pk>[0-9]*)$', PizzaUpdateView.as_view(), name='update'),
url(r'^updated/$', TemplateView.as_view(
template_name='pizzagigi/pizza_updated.html'), name='updated'),
url(r'^delete/(?P<pk>[0-9]*)$', PizzaDeleteView.as_view(), name='delete'),
url(r'^deleted/$', TemplateView.as_view(
template_name='pizzagigi/pizza_deleted.html'), name='deleted'),
)
|
Move chickens to other app
|
Move chickens to other app
|
Python
|
bsd-3-clause
|
kelvinwong-ca/django-select-multiple-field,kelvinwong-ca/django-select-multiple-field,kelvinwong-ca/django-select-multiple-field
|
4bd930b8bc6410a9966327c8e73e0b1849c71157
|
sympy/conftest.py
|
sympy/conftest.py
|
import sys
sys._running_pytest = True
from sympy.core.cache import clear_cache
def pytest_terminal_summary(terminalreporter):
if (terminalreporter.stats.get('error', None) or
terminalreporter.stats.get('failed', None)):
terminalreporter.write_sep(' ', 'DO *NOT* COMMIT!', red=True, bold=True)
def pytest_runtest_teardown():
clear_cache()
|
import sys
sys._running_pytest = True
from sympy.core.cache import clear_cache
def pytest_report_header(config):
from sympy.utilities.misc import ARCH
s = "architecture: %s\n" % ARCH
from sympy.core.cache import USE_CACHE
s += "cache: %s\n" % USE_CACHE
from sympy.polys.domains import GROUND_TYPES
s += "ground types: %s\n" % GROUND_TYPES
return s
def pytest_terminal_summary(terminalreporter):
if (terminalreporter.stats.get('error', None) or
terminalreporter.stats.get('failed', None)):
terminalreporter.write_sep(' ', 'DO *NOT* COMMIT!', red=True, bold=True)
def pytest_runtest_teardown():
clear_cache()
|
Add more info to pytest header
|
Add more info to pytest header
|
Python
|
bsd-3-clause
|
moble/sympy,ahhda/sympy,chaffra/sympy,saurabhjn76/sympy,saurabhjn76/sympy,ga7g08/sympy,AkademieOlympia/sympy,sampadsaha5/sympy,hrashk/sympy,jbbskinny/sympy,chaffra/sympy,yukoba/sympy,Designist/sympy,abhiii5459/sympy,atsao72/sympy,pandeyadarsh/sympy,kevalds51/sympy,postvakje/sympy,sahmed95/sympy,beni55/sympy,Vishluck/sympy,asm666/sympy,rahuldan/sympy,garvitr/sympy,AunShiLord/sympy,MechCoder/sympy,shipci/sympy,bukzor/sympy,ga7g08/sympy,chaffra/sympy,ga7g08/sympy,sahilshekhawat/sympy,Gadal/sympy,emon10005/sympy,madan96/sympy,MechCoder/sympy,jaimahajan1997/sympy,MridulS/sympy,kaushik94/sympy,bukzor/sympy,oliverlee/sympy,lidavidm/sympy,Shaswat27/sympy,drufat/sympy,Gadal/sympy,Designist/sympy,cccfran/sympy,hargup/sympy,souravsingh/sympy,atsao72/sympy,diofant/diofant,dqnykamp/sympy,farhaanbukhsh/sympy,hrashk/sympy,abloomston/sympy,Titan-C/sympy,abhiii5459/sympy,farhaanbukhsh/sympy,mafiya69/sympy,lidavidm/sympy,pbrady/sympy,jamesblunt/sympy,iamutkarshtiwari/sympy,yashsharan/sympy,Titan-C/sympy,drufat/sympy,pandeyadarsh/sympy,dqnykamp/sympy,maniteja123/sympy,sunny94/temp,debugger22/sympy,meghana1995/sympy,ahhda/sympy,cswiercz/sympy,meghana1995/sympy,AkademieOlympia/sympy,jbbskinny/sympy,hargup/sympy,toolforger/sympy,kaushik94/sympy,mcdaniel67/sympy,Vishluck/sympy,kaichogami/sympy,wyom/sympy,lindsayad/sympy,Shaswat27/sympy,atsao72/sympy,srjoglekar246/sympy,Davidjohnwilson/sympy,sahilshekhawat/sympy,Davidjohnwilson/sympy,dqnykamp/sympy,sahmed95/sympy,moble/sympy,farhaanbukhsh/sympy,rahuldan/sympy,ChristinaZografou/sympy,skidzo/sympy,MridulS/sympy,kevalds51/sympy,shipci/sympy,jamesblunt/sympy,VaibhavAgarwalVA/sympy,beni55/sympy,hrashk/sympy,ChristinaZografou/sympy,pbrady/sympy,MechCoder/sympy,sampadsaha5/sympy,Designist/sympy,Curious72/sympy,souravsingh/sympy,wyom/sympy,lidavidm/sympy,abhiii5459/sympy,moble/sympy,wyom/sympy,Mitchkoens/sympy,Mitchkoens/sympy,liangjiaxing/sympy,Arafatk/sympy,hargup/sympy,yukoba/sympy,jbbskinny/sympy,postvakje/sympy,garvitr/sympy,grevutiu-gabriel/sympy,MridulS/sympy,mcdaniel67/sympy,Titan-C/sympy,cccfran/sympy,shipci/sympy,amitjamadagni/sympy,Curious72/sympy,cswiercz/sympy,toolforger/sympy,Arafatk/sympy,kmacinnis/sympy,asm666/sympy,iamutkarshtiwari/sympy,atreyv/sympy,kumarkrishna/sympy,atreyv/sympy,debugger22/sympy,wanglongqi/sympy,VaibhavAgarwalVA/sympy,saurabhjn76/sympy,pandeyadarsh/sympy,Gadal/sympy,aktech/sympy,shikil/sympy,amitjamadagni/sympy,VaibhavAgarwalVA/sympy,madan96/sympy,Mitchkoens/sympy,kumarkrishna/sympy,drufat/sympy,skidzo/sympy,maniteja123/sympy,kmacinnis/sympy,wanglongqi/sympy,kmacinnis/sympy,toolforger/sympy,skirpichev/omg,jerli/sympy,liangjiaxing/sympy,cswiercz/sympy,lindsayad/sympy,mafiya69/sympy,beni55/sympy,atreyv/sympy,abloomston/sympy,yukoba/sympy,cccfran/sympy,rahuldan/sympy,postvakje/sympy,shikil/sympy,shikil/sympy,Sumith1896/sympy,lindsayad/sympy,sunny94/temp,yashsharan/sympy,ahhda/sympy,asm666/sympy,AunShiLord/sympy,jamesblunt/sympy,grevutiu-gabriel/sympy,madan96/sympy,jaimahajan1997/sympy,sunny94/temp,liangjiaxing/sympy,jerli/sympy,emon10005/sympy,Vishluck/sympy,yashsharan/sympy,kaichogami/sympy,skidzo/sympy,kevalds51/sympy,mafiya69/sympy,AunShiLord/sympy,vipulroxx/sympy,vipulroxx/sympy,kumarkrishna/sympy,oliverlee/sympy,debugger22/sympy,grevutiu-gabriel/sympy,Davidjohnwilson/sympy,sampadsaha5/sympy,sahilshekhawat/sympy,Shaswat27/sympy,maniteja123/sympy,pbrady/sympy,emon10005/sympy,aktech/sympy,ChristinaZografou/sympy,bukzor/sympy,flacjacket/sympy,Curious72/sympy,mcdaniel67/sympy,oliverlee/sympy,Arafatk/sympy,sahmed95/sympy,souravsingh/sympy,Sumith1896/sympy,garvitr/sympy,abloomston/sympy,meghana1995/sympy,kaushik94/sympy,Sumith1896/sympy,AkademieOlympia/sympy,kaichogami/sympy,aktech/sympy,wanglongqi/sympy,iamutkarshtiwari/sympy,jerli/sympy,vipulroxx/sympy,jaimahajan1997/sympy
|
f5fb36875b09926effdae46a92497d01fa04e777
|
src/models/lm.py
|
src/models/lm.py
|
from keras.layers import LSTM, Input, Reshape
from keras.models import Model
from ..layers import LMMask, Projection
class LanguageModel(Model):
def __init__(self, n_batch, d_W, d_L, trainable=True):
"""
n_batch :: batch size for model application
d_L :: language model state dimension (and output vector size)
d_W :: input word embedding size (word features)
"""
w_n = Input(batch_shape=(n_batch, d_W), name='w_n', dtype='floatX')
w_nmask = Input(batch_shape=(n_batch, 1), name='w_nmask', dtype='int8')
# Prevent padded samples to affect internal state (and cause NaN loss in worst
# case) by masking them by using another input value
w_nmasked = LMMask(0.)([Reshape((1, d_W))(w_n), w_nmask])
# Using stateful LSTM for language model - model fitting code resets the
# state after each sentence
w_np1Ei = LSTM(d_L,
trainable=trainable,
return_sequences=False,
stateful=True,
consume_less='gpu')(w_nmasked)
w_np1E = Projection(d_W)(w_np1Ei)
super(LanguageModel, self).__init__(input=[w_n, w_nmask], output=w_np1E, name='LanguageModel')
|
from keras.layers import LSTM, Input, Reshape
from keras.models import Model
from ..layers import LMMask, Projection
class LanguageModel(Model):
def __init__(self, n_batch, d_W, d_L, trainable=True):
"""
n_batch :: batch size for model application
d_L :: language model state dimension (and output vector size)
d_W :: input word embedding size (word features)
"""
w_n = Input(batch_shape=(n_batch, d_W), name='w_n', dtype='floatX')
w_nmask = Input(batch_shape=(n_batch, 1), name='w_nmask', dtype='int8')
# Prevent padded samples to affect internal state (and cause NaN loss in worst
# case) by masking them by using w_nmask masking values
w_nmasked = LMMask(0.)([Reshape((1, d_W))(w_n), w_nmask])
# Using stateful LSTM for language model - model fitting code resets the
# state after each sentence
w_np1Ei = LSTM(d_L,
trainable=trainable,
return_sequences=True,
stateful=True,
consume_less='gpu')(w_nmasked)
w_np1Ei = LSTM(d_L,
trainable=trainable,
return_sequences=False,
stateful=True,
consume_less='gpu')(w_np1Ei)
w_np1E = Projection(d_W)(w_np1Ei)
super(LanguageModel, self).__init__(input=[w_n, w_nmask], output=w_np1E, name='LanguageModel')
|
Use two LSTM LM’s instead of single huge one
|
Use two LSTM LM’s instead of single huge one
|
Python
|
mit
|
milankinen/c2w2c,milankinen/c2w2c
|
f16ce4235e124fa9ea5d335665221514a2fcdcce
|
examples/cpp/clion.py
|
examples/cpp/clion.py
|
#!/usr/bin/env python3
"""This is a **proof-of-concept** CLion project generator."""
import functools
import json
import subprocess
subprocess.check_call(['cook', '--results'])
with open('results.json') as file:
content = json.load(file)
with open('CMakeLists.txt', 'w') as file:
w = functools.partial(print, file=file)
w('cmake_minimum_required(VERSION 2.8.8)')
w()
w('add_custom_target(COOK COMMAND cook '
'WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})')
w()
outputs = {}
for primary, result in content.items():
for output in result['outputs']:
outputs[output] = primary
for primary, result in content.items():
if result.get('type') == 'cpp.object':
cpp = [file for file in result['inputs'] if file.endswith('.cpp')]
w('add_library({} OBJECT {})'.format(primary, ' '.join(cpp)))
defines = ' '.join(name + '=' + str(val) for name, val
in result['define'].items())
if defines:
w('target_compile_definitions({} PRIVATE {})'
.format(primary, defines))
includes = result['include']
if includes:
w('target_include_directories({} PRIVATE {})'.format(
primary, ' '.join(includes)
))
w()
|
#!/usr/bin/env python3
"""This is a **proof-of-concept** CLion project generator."""
import functools
import json
import subprocess
import sys
subprocess.check_call(['cook', '--results'])
with open('results.json') as file:
content = json.load(file)
with open('CMakeLists.txt', 'w') as file:
w = functools.partial(print, file=file)
w('cmake_minimum_required(VERSION 2.8.8)')
w()
w('add_custom_target(COOK COMMAND ' + sys.executable + ' clion.py COMMAND cook '
'WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})')
w()
outputs = {}
for primary, result in content.items():
for output in result['outputs']:
outputs[output] = primary
for primary, result in content.items():
if result.get('type') == 'cpp.object':
cpp = [file for file in result['inputs'] if file.endswith('.cpp')]
w('add_library({} OBJECT {})'.format(primary, ' '.join(cpp)))
defines = ' '.join(name + '=' + str(val) for name, val
in result['define'].items())
if defines:
w('target_compile_definitions({} PRIVATE {})'
.format(primary, defines))
includes = result['include']
if includes:
w('target_include_directories({} PRIVATE {})'.format(
primary, ' '.join(includes)
))
w()
|
Add automatic regeneration for CLion
|
Add automatic regeneration for CLion
|
Python
|
mit
|
jachris/cook
|
c0df1342b6625cdc2a205f2ba13ee201e8d0b02a
|
tests/conftest.py
|
tests/conftest.py
|
from __future__ import absolute_import
import pytest
import os
import mock
import json
import app.mapping
with open(os.path.join(os.path.dirname(__file__), 'fixtures/mappings/services.json')) as f:
_services_mapping_definition = json.load(f)
@pytest.fixture(scope="function")
def services_mapping():
"""Provide a services mapping fixture, and patch it into the global singleton getter."""
mock_services_mapping_getter_patch = mock.patch('app.mapping.get_services_mapping')
mock_services_mapping_getter = mock_services_mapping_getter_patch.start()
mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services')
yield mock_services_mapping_getter.return_value
mock_services_mapping_getter_patch.stop()
|
from __future__ import absolute_import
import pytest
import os
import mock
import json
import app.mapping
with open(os.path.join(os.path.dirname(__file__), 'fixtures/mappings/services.json')) as f:
_services_mapping_definition = json.load(f)
@pytest.fixture(scope="function")
def services_mapping():
"""Provide a services mapping fixture, and patch it into the global singleton getter."""
with mock.patch('app.mapping.get_services_mapping') as mock_services_mapping_getter:
mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services')
yield mock_services_mapping_getter.return_value
|
Use with block to start/stop the patch context manager.
|
Use with block to start/stop the patch context manager.
- this is less code, hopefully is just as clear why we need to 'yield'
rather than just 'return'.
https://trello.com/c/OpWI068M/380-after-g9-go-live-removal-of-old-filters-from-search-api-mapping
|
Python
|
mit
|
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
|
d613ca02bef0572d7581c843eb5466443410decf
|
test_settings.py
|
test_settings.py
|
import os
from django.urls import (
include,
path,
)
BASE_DIR = os.path.dirname(__file__)
STATIC_URL = "/static/"
INSTALLED_APPS = (
'gcloudc',
'djangae',
'djangae.commands', # Takes care of emulator setup
'djangae.tasks',
)
DATABASES = {
'default': {
'ENGINE': 'gcloudc.db.backends.datastore',
'INDEXES_FILE': os.path.join(os.path.abspath(os.path.dirname(__file__)), "djangaeidx.yaml"),
"PROJECT": "test",
"NAMESPACE": "ns1", # Use a non-default namespace to catch edge cases where we forget
}
}
SECRET_KEY = "secret_key_for_testing"
USE_TZ = True
CSRF_USE_SESSIONS = True
CLOUD_TASKS_LOCATION = "[LOCATION]"
# Define two required task queues
CLOUD_TASKS_QUEUES = [
{
"name": "default"
},
{
"name": "another"
}
]
# Point the URL conf at this file
ROOT_URLCONF = __name__
urlpatterns = [
path('tasks/', include('djangae.tasks.urls')),
]
|
import os
from django.urls import (
include,
path,
)
BASE_DIR = os.path.dirname(__file__)
STATIC_URL = "/static/"
# Default Django middleware
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'djangae.tasks.middleware.task_environment_middleware',
]
INSTALLED_APPS = (
'django.contrib.sessions',
'gcloudc',
'djangae',
'djangae.commands', # Takes care of emulator setup
'djangae.tasks',
)
DATABASES = {
'default': {
'ENGINE': 'gcloudc.db.backends.datastore',
'INDEXES_FILE': os.path.join(os.path.abspath(os.path.dirname(__file__)), "djangaeidx.yaml"),
"PROJECT": "test",
"NAMESPACE": "ns1", # Use a non-default namespace to catch edge cases where we forget
}
}
SECRET_KEY = "secret_key_for_testing"
USE_TZ = True
CSRF_USE_SESSIONS = True
CLOUD_TASKS_LOCATION = "[LOCATION]"
# Define two required task queues
CLOUD_TASKS_QUEUES = [
{
"name": "default"
},
{
"name": "another"
}
]
# Point the URL conf at this file
ROOT_URLCONF = __name__
urlpatterns = [
path('tasks/', include('djangae.tasks.urls')),
]
|
Set default Django middleware in test settings
|
Set default Django middleware in test settings
|
Python
|
bsd-3-clause
|
potatolondon/djangae,potatolondon/djangae
|
2bef67ad0a4fb0db4bdf11d24b3c63e37558e7b9
|
poker/_common.py
|
poker/_common.py
|
import random
from enum import Enum
from enum34_custom import _MultiValueMeta, OrderableMixin, CaseInsensitiveMultiValueEnum
from types import DynamicClassAttribute
class _MultiMeta(_MultiValueMeta):
def make_random(cls):
return random.choice(list(cls))
class _MultiValueEnum(OrderableMixin, Enum, metaclass=_MultiMeta):
def __str__(self):
return str(self.value)
def __repr__(self):
apostrophe = "'" if isinstance(self.value, str) else ''
return "{0}({1}{2}{1})".format(self.__class__.__qualname__, apostrophe, self)
@DynamicClassAttribute
def value(self):
"""The value of the Enum member."""
return self._value_[0]
class _CaseInsensitiveMultiValueEnum(CaseInsensitiveMultiValueEnum):
def __str__(self):
return str(self.value[0])
class _ReprMixin:
def __repr__(self):
return "{}('{}')".format(self.__class__.__qualname__, self)
def _make_float(string):
return float(string.strip().replace(',', ''))
def _make_int(string):
return int(string.strip().replace(',', ''))
|
import random
from enum import Enum
from enum34_custom import (
_MultiValueMeta, OrderableMixin, CaseInsensitiveMultiValueEnum, MultiValueEnum
)
from types import DynamicClassAttribute
class _RandomMultiValueMeta(_MultiValueMeta):
def make_random(cls):
return random.choice(list(cls))
class _MultiValueEnum(OrderableMixin, MultiValueEnum, metaclass=_RandomMultiValueMeta):
def __str__(self):
return str(self.value)
def __repr__(self):
apostrophe = "'" if isinstance(self.value, str) else ''
return "{0}({1}{2}{1})".format(self.__class__.__qualname__, apostrophe, self)
@DynamicClassAttribute
def value(self):
"""The value of the Enum member."""
return self._value_[0]
class _CaseInsensitiveMultiValueEnum(CaseInsensitiveMultiValueEnum):
def __str__(self):
return str(self.value[0])
class _ReprMixin:
def __repr__(self):
return "{}('{}')".format(self.__class__.__qualname__, self)
def _make_float(string):
return float(string.strip().replace(',', ''))
def _make_int(string):
return int(string.strip().replace(',', ''))
|
Clarify what _MultiVAlueEnum does and where it comes from.
|
Clarify what _MultiVAlueEnum does and where it comes from.
|
Python
|
mit
|
pokerregion/poker,Seanmcn/poker,marchon/poker
|
62a55c9e4c46aac647e0c7bc3d8143f1a6bd41ca
|
groups/admin.py
|
groups/admin.py
|
from django.contrib import admin
from .models import Discussion, Group
admin.site.register(Discussion)
admin.site.register(Group)
|
from django.contrib import admin
from .models import Discussion, Group
class GroupAdmin(admin.ModelAdmin):
filter_horizontal = ('moderators', 'watchers', 'members_if_private')
class Meta:
model = Group
class DiscussionAdmin(admin.ModelAdmin):
filter_horizontal = ('subscribers', 'ignorers')
class Meta:
model = Discussion
admin.site.register(Group, GroupAdmin)
admin.site.register(Discussion, DiscussionAdmin)
|
Use filter_horizontal for many-to-many fields.
|
Use filter_horizontal for many-to-many fields.
|
Python
|
bsd-2-clause
|
incuna/incuna-groups,incuna/incuna-groups
|
992e39a6e669fd448034fe4d844b1bcc87d75721
|
comics/comics/oots.py
|
comics/comics/oots.py
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "The Order of the Stick"
language = "en"
url = "http://www.giantitp.com/"
start_date = "2003-09-30"
rights = "Rich Burlew"
class Crawler(CrawlerBase):
history_capable_days = 1
time_zone = "US/Eastern"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.giantitp.com/comics/oots.rss")
if len(feed.all()):
entry = feed.all()[0]
page = self.parse_page(entry.link)
url = page.src('img[src*="/comics/images/"]')
title = entry.title
return CrawlerImage(url, title)
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "The Order of the Stick"
language = "en"
url = "http://www.giantitp.com/"
start_date = "2003-09-30"
rights = "Rich Burlew"
class Crawler(CrawlerBase):
history_capable_days = 10
time_zone = "US/Eastern"
headers = {"User-Agent": "Mozilla/5.0"}
def crawl(self, pub_date):
feed = self.parse_feed("http://www.giantitp.com/comics/oots.rss")
if len(feed.all()):
entry = feed.all()[0]
page = self.parse_page(entry.link)
url = page.src('img[src*="/comics/oots/"]')
title = entry.title
return CrawlerImage(url, title)
|
Update "The Order of the Stick" after site change
|
Update "The Order of the Stick" after site change
|
Python
|
agpl-3.0
|
datagutten/comics,jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics
|
030e129fd60b5ab2255b10e8115ab4e3e973ae05
|
utils/gyb_syntax_support/protocolsMap.py
|
utils/gyb_syntax_support/protocolsMap.py
|
SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'MemberDeclList': [
'MemberDeclBlock'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
],
'TokenSyntax': [
'BinaryOperatorExpr'
]
}
|
SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'AccessorList': [
'AccessorBlock'
],
'CodeBlockItemList': [
'CodeBlock'
],
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'MemberDeclList': [
'MemberDeclBlock'
],
'FunctionCallExpr': [
'CodeBlockItem',
'ExprBuildable'
],
'SequenceExpr': [
'CodeBlockItem',
'ExprBuildable',
'TupleExprElement'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
],
'TokenSyntax': [
'BinaryOperatorExpr',
'DeclModifier'
]
}
|
Add more types in protocol map
|
Add more types in protocol map
|
Python
|
apache-2.0
|
JGiola/swift,apple/swift,JGiola/swift,glessard/swift,apple/swift,ahoppen/swift,JGiola/swift,ahoppen/swift,ahoppen/swift,roambotics/swift,apple/swift,benlangmuir/swift,atrick/swift,atrick/swift,rudkx/swift,gregomni/swift,JGiola/swift,rudkx/swift,benlangmuir/swift,roambotics/swift,rudkx/swift,benlangmuir/swift,gregomni/swift,JGiola/swift,gregomni/swift,gregomni/swift,atrick/swift,roambotics/swift,glessard/swift,glessard/swift,benlangmuir/swift,rudkx/swift,ahoppen/swift,rudkx/swift,rudkx/swift,atrick/swift,glessard/swift,ahoppen/swift,glessard/swift,benlangmuir/swift,ahoppen/swift,gregomni/swift,apple/swift,JGiola/swift,apple/swift,glessard/swift,apple/swift,atrick/swift,gregomni/swift,roambotics/swift,benlangmuir/swift,atrick/swift,roambotics/swift,roambotics/swift
|
0cf4aff4702ad580f9709b33c96cd115f34b028d
|
spacy/tests/conftest.py
|
spacy/tests/conftest.py
|
import pytest
import os
from ..en import English
from ..de import German
@pytest.fixture(scope="session")
def EN():
return English(path=False)
@pytest.fixture(scope="session")
def DE():
return German(path=False)
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
|
import pytest
import os
from ..en import English
from ..de import German
@pytest.fixture(scope="session")
def EN():
return English()
@pytest.fixture(scope="session")
def DE():
return German()
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
|
Set default path in EN/DE tests.
|
Set default path in EN/DE tests.
|
Python
|
mit
|
Gregory-Howard/spaCy,explosion/spaCy,raphael0202/spaCy,recognai/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,recognai/spaCy,honnibal/spaCy,recognai/spaCy,banglakit/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,spacy-io/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,Gregory-Howard/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,honnibal/spaCy,spacy-io/spaCy,raphael0202/spaCy,raphael0202/spaCy,raphael0202/spaCy,banglakit/spaCy,banglakit/spaCy,aikramer2/spaCy,spacy-io/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,recognai/spaCy,Gregory-Howard/spaCy,recognai/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,aikramer2/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,honnibal/spaCy
|
edbbf93222fc4061a18f81718a6a7233c6b840ec
|
tests/test_callbacks.py
|
tests/test_callbacks.py
|
import pytest
from aiotg import TgBot
from aiotg import MESSAGE_TYPES
API_TOKEN = "test_token"
def text_msg(text):
return {
"message_id": 0,
"from": {},
"chat": { "id": 0, "type": "private" },
"text": text
}
def test_command():
bot = TgBot(API_TOKEN)
called_with = None
@bot.command(r"/echo (.+)")
def echo(chat, match):
nonlocal called_with
called_with = match.group(1)
bot._process_message(text_msg("/echo foo"))
assert called_with == "foo"
def test_default():
bot = TgBot(API_TOKEN)
called_with = None
@bot.default
def default(chat, message):
nonlocal called_with
called_with = message["text"]
bot._process_message(text_msg("foo bar"))
assert called_with == "foo bar"
|
import pytest
import random
from aiotg import TgBot
from aiotg import MESSAGE_TYPES
API_TOKEN = "test_token"
bot = TgBot(API_TOKEN)
def custom_msg(msg):
template = {
"message_id": 0,
"from": {},
"chat": { "id": 0, "type": "private" }
}
template.update(msg)
return template
def text_msg(text):
return custom_msg({ "text": text })
def test_command():
called_with = None
@bot.command(r"/echo (.+)")
def echo(chat, match):
nonlocal called_with
called_with = match.group(1)
bot._process_message(text_msg("/echo foo"))
assert called_with == "foo"
def test_default():
called_with = None
@bot.default
def default(chat, message):
nonlocal called_with
called_with = message["text"]
bot._process_message(text_msg("foo bar"))
assert called_with == "foo bar"
@pytest.mark.parametrize("mt", MESSAGE_TYPES)
def test_handle(mt):
called_with = None
@bot.handle(mt)
def handle(chat, media):
nonlocal called_with
called_with = media
value = random.random()
bot._process_message(custom_msg({ mt: value }))
assert called_with == value
|
Add test for media handlers
|
Add test for media handlers
|
Python
|
mit
|
SijmenSchoon/aiotg,szastupov/aiotg,derfenix/aiotg
|
29f6a260e49a6955dd12d354400d9ee6cfd6ddc7
|
tests/qtcore/qstatemachine_test.py
|
tests/qtcore/qstatemachine_test.py
|
#!/usr/bin/python
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
class QStateMachineTest(unittest.TestCase):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
app = QCoreApplication([])
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, app.quit)
app.exec_()
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/python
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
from helper import UsesQCoreApplication
class QStateMachineTest(UsesQCoreApplication):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, self.app.quit)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
Add UsesQCoreApplication in state machine test
|
Add UsesQCoreApplication in state machine test
|
Python
|
lgpl-2.1
|
M4rtinK/pyside-bb10,enthought/pyside,M4rtinK/pyside-android,PySide/PySide,IronManMark20/pyside2,PySide/PySide,M4rtinK/pyside-bb10,RobinD42/pyside,BadSingleton/pyside2,PySide/PySide,qtproject/pyside-pyside,enthought/pyside,pankajp/pyside,pankajp/pyside,M4rtinK/pyside-android,PySide/PySide,BadSingleton/pyside2,gbaty/pyside2,qtproject/pyside-pyside,enthought/pyside,enthought/pyside,RobinD42/pyside,pankajp/pyside,pankajp/pyside,enthought/pyside,M4rtinK/pyside-android,M4rtinK/pyside-bb10,enthought/pyside,gbaty/pyside2,qtproject/pyside-pyside,PySide/PySide,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,M4rtinK/pyside-android,qtproject/pyside-pyside,gbaty/pyside2,RobinD42/pyside,BadSingleton/pyside2,RobinD42/pyside,enthought/pyside,RobinD42/pyside,gbaty/pyside2,IronManMark20/pyside2,M4rtinK/pyside-bb10,IronManMark20/pyside2,RobinD42/pyside,IronManMark20/pyside2,BadSingleton/pyside2,pankajp/pyside,M4rtinK/pyside-android,BadSingleton/pyside2,IronManMark20/pyside2,M4rtinK/pyside-android,gbaty/pyside2,RobinD42/pyside,qtproject/pyside-pyside
|
5eefa21699f2dc7b75a919b5899a25ec7ef5c5b7
|
tests/unit/test_adapter_session.py
|
tests/unit/test_adapter_session.py
|
import pytest
from wagtail_personalisation import adapters
from tests.factories.segment import SegmentFactory
@pytest.mark.django_db
def test_get_segments(rf, monkeypatch):
request = rf.get('/')
adapter = adapters.SessionSegmentsAdapter(request)
segment_1 = SegmentFactory(name='segment-1', persistent=True)
segment_2 = SegmentFactory(name='segment-2', persistent=True)
adapter.set_segments([segment_1, segment_2])
assert len(request.session['segments']) == 2
segments = adapter.get_segments()
assert segments == [segment_1, segment_2]
@pytest.mark.django_db
def test_get_segment_by_id(rf, monkeypatch):
request = rf.get('/')
adapter = adapters.SessionSegmentsAdapter(request)
segment_1 = SegmentFactory(name='segment-1', persistent=True)
segment_2 = SegmentFactory(name='segment-2', persistent=True)
adapter.set_segments([segment_1, segment_2])
segment_x = adapter.get_segment_by_id(segment_2.pk)
assert segment_x == segment_2
|
import pytest
from wagtail_personalisation import adapters
from tests.factories.segment import SegmentFactory
@pytest.mark.django_db
def test_get_segments(rf, monkeypatch):
request = rf.get('/')
adapter = adapters.SessionSegmentsAdapter(request)
segment_1 = SegmentFactory(name='segment-1', persistent=True)
segment_2 = SegmentFactory(name='segment-2', persistent=True)
adapter.set_segments([segment_1, segment_2])
assert len(request.session['segments']) == 2
segments = adapter.get_segments()
assert segments == [segment_1, segment_2]
@pytest.mark.django_db
def test_get_segment_by_id(rf, monkeypatch):
request = rf.get('/')
adapter = adapters.SessionSegmentsAdapter(request)
segment_1 = SegmentFactory(name='segment-1', persistent=True)
segment_2 = SegmentFactory(name='segment-2', persistent=True)
adapter.set_segments([segment_1, segment_2])
segment_x = adapter.get_segment_by_id(segment_2.pk)
assert segment_x == segment_2
@pytest.mark.django_db
def test_refresh_removes_disabled(rf, monkeypatch):
request = rf.get('/')
adapter = adapters.SessionSegmentsAdapter(request)
segment_1 = SegmentFactory(name='segment-1', persistent=True)
segment_2 = SegmentFactory(name='segment-2', persistent=True)
adapter.set_segments([segment_1, segment_2])
adapter = adapters.SessionSegmentsAdapter(request)
segment_1.status = segment_1.STATUS_DISABLED
segment_1.save()
adapter.refresh()
assert adapter.get_segments() == [segment_2]
|
Add test for sessionadapter.refresh when segment is disable
|
Add test for sessionadapter.refresh when segment is disable
|
Python
|
mit
|
LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation
|
5fb365333711f7e999f71d53061ae14c386e575c
|
src/waldur_core/core/api_groups_mapping.py
|
src/waldur_core/core/api_groups_mapping.py
|
API_GROUPS = {
'authentication': ['/api-auth/', '/api/auth-valimo/',],
'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',],
'organization': [
'/api/customers/',
'/api/customer-permissions-log/',
'/api/customer-permissions-reviews/',
'/api/customer-permissions/',
],
'marketplace': [
'/api/marketplace-bookings/',
'/api/marketplace-cart-items/',
'/api/marketplace-categories/',
'/api/marketplace-category-component-usages/',
'/api/marketplace-checklists-categories/',
'/api/marketplace-checklists/',
'/api/marketplace-component-usages/',
'/api/marketplace-offering-files/',
'/api/marketplace-offerings/',
'/api/marketplace-order-items/',
'/api/marketplace-orders/',
'/api/marketplace-plans/',
'/api/marketplace-plugins/',
'/api/marketplace-public-api/',
'/api/marketplace-resource-offerings/',
'/api/marketplace-resources/',
'/api/marketplace-screenshots/',
'/api/marketplace-service-providers/',
],
'reporting': [
'/api/support-feedback-average-report/',
'/api/support-feedback-report/',
],
}
|
API_GROUPS = {
'authentication': ['/api-auth/', '/api/auth-valimo/',],
'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',],
'organization': [
'/api/customers/',
'/api/customer-permissions-log/',
'/api/customer-permissions-reviews/',
'/api/customer-permissions/',
],
'marketplace': [
'/api/marketplace-bookings/',
'/api/marketplace-cart-items/',
'/api/marketplace-categories/',
'/api/marketplace-category-component-usages/',
'/api/marketplace-checklists-categories/',
'/api/marketplace-checklists/',
'/api/marketplace-component-usages/',
'/api/marketplace-offering-files/',
'/api/marketplace-offerings/',
'/api/marketplace-order-items/',
'/api/marketplace-orders/',
'/api/marketplace-plans/',
'/api/marketplace-plugins/',
'/api/marketplace-public-api/',
'/api/marketplace-resource-offerings/',
'/api/marketplace-resources/',
'/api/marketplace-screenshots/',
'/api/marketplace-service-providers/',
],
'reporting': [
'/api/support-feedback-average-report/',
'/api/support-feedback-report/',
],
'accounting': ['/api/invoices/', '/api/invoice-items/',],
}
|
Add accounting group to apidocs
|
Add accounting group to apidocs
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
|
563645019adaf20ee66af0b4cc13e8b08bcc9d32
|
lino_noi/lib/tickets/__init__.py
|
lino_noi/lib/tickets/__init__.py
|
# -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
def setup_main_menu(self, site, profile, m):
p = self.get_menu_group()
m = m.add_menu(p.app_label, p.verbose_name)
m.add_action('tickets.MyTicketsToWork')
def get_dashboard_items(self, user):
if user.authenticated:
yield self.site.actors.tickets.MyTicketsToWork
# else:
# yield self.site.actors.tickets. PublicTickets
|
# -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
def setup_main_menu(self, site, profile, m):
super(Plugin, self).setup_main_menu(site, profile, m)
p = self.get_menu_group()
m = m.add_menu(p.app_label, p.verbose_name)
m.add_action('tickets.MyTicketsToWork')
def get_dashboard_items(self, user):
super(Plugin, self).get_dashboard_items(user)
if user.authenticated:
yield self.site.actors.tickets.MyTicketsToWork
# else:
# yield self.site.actors.tickets. PublicTickets
|
Fix menu items for noi/tickets
|
Fix menu items for noi/tickets
|
Python
|
bsd-2-clause
|
khchine5/noi,lsaffre/noi,lsaffre/noi,lino-framework/noi,lino-framework/noi,khchine5/noi,lsaffre/noi
|
a5cd2110283ba699f36548c42b83aa86e6b50aab
|
configuration.py
|
configuration.py
|
# -*- coding: utf-8 -*-
"""
configuration.py
"""
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
account_id = fields.Integer('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
|
# -*- coding: utf-8 -*-
"""
configuration.py
"""
from trytond import backend
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
from trytond.transaction import Transaction
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
account_id = fields.Char('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
@classmethod
def __register__(cls, module_name):
TableHandler = backend.get('TableHandler')
cursor = Transaction().cursor
# Migration from 3.4.0.6 : Migrate account_id field to string
if backend.name() == 'postgresql':
cursor.execute(
'SELECT pg_typeof("account_id") '
'FROM endicia_configuration '
'LIMIT 1',
)
# Check if account_id is integer field
is_integer = cursor.fetchone()[0] == 'integer'
if is_integer:
# Migrate integer field to string
table = TableHandler(cursor, cls, module_name)
table.alter_type('account_id', 'varchar')
super(EndiciaConfiguration, cls).__register__(module_name)
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
|
Migrate account_id from integer field to char field
|
Migrate account_id from integer field to char field
|
Python
|
bsd-3-clause
|
priyankarani/trytond-shipping-endicia,fulfilio/trytond-shipping-endicia,prakashpp/trytond-shipping-endicia
|
819f36493e1e0112c3bbe4f92f87f1771cc4af3f
|
moa/base.py
|
moa/base.py
|
'''
* when dispatching events, returning True stops it.
'''
from weakref import ref
from kivy.event import EventDispatcher
from kivy.properties import StringProperty, OptionProperty, ObjectProperty
import logging
class MoaException(Exception):
pass
class MoaBase(EventDispatcher):
named_moas = {}
''' A weakref.ref to the named moa instances.
Read only.
'''
_last_name = ''
def __init__(self, **kwargs):
super(MoaBase, self).__init__(**kwargs)
def verfiy_name(instance, value):
named_moas = MoaBase.named_moas
old_name = self._last_name
if value == old_name:
return
if old_name:
del named_moas[old_name]
if value:
if value in named_moas and named_moas[value]() is not None:
raise ValueError('Moa instance with name {} already '
'exists: {}'.format(value, named_moas[value]()))
else:
named_moas[value] = ref(self)
self._last_name = value
self.bind(name=verfiy_name)
verfiy_name(self, self.name)
name = StringProperty('')
''' Unique name across all Moa objects
'''
logger = ObjectProperty(logging.getLogger('moa'),
baseclass=logging.Logger)
source = StringProperty('')
''' E.g. a filename to load that interpreted by the subclass.
'''
|
'''
* when dispatching events, returning True stops it.
'''
__all__ = ('MoaBase', )
from weakref import ref
from kivy.event import EventDispatcher
from kivy.properties import StringProperty, OptionProperty, ObjectProperty
import logging
class MoaBase(EventDispatcher):
named_moas = {}
''' A weakref.ref to the named moa instances.
Read only.
'''
_last_name = ''
def __init__(self, **kwargs):
super(MoaBase, self).__init__(**kwargs)
def verfiy_name(instance, value):
named_moas = MoaBase.named_moas
old_name = self._last_name
if value == old_name:
return
if old_name:
del named_moas[old_name]
if value:
if value in named_moas and named_moas[value]() is not None:
raise ValueError('Moa instance with name {} already '
'exists: {}'.format(value, named_moas[value]()))
else:
named_moas[value] = ref(self)
self._last_name = value
self.bind(name=verfiy_name)
verfiy_name(self, self.name)
name = StringProperty('')
''' Unique name across all Moa objects
'''
logger = ObjectProperty(logging.getLogger('moa'),
baseclass=logging.Logger)
source = StringProperty('')
''' E.g. a filename to load that interpreted by the subclass.
'''
|
Remove unused moa exception class.
|
Remove unused moa exception class.
|
Python
|
mit
|
matham/moa
|
166c002f9129c9c244532f8d490b55a884c6708b
|
mla_game/apps/transcript/management/commands/fake_game_one_gameplay.py
|
mla_game/apps/transcript/management/commands/fake_game_one_gameplay.py
|
import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from ...models import (
Transcript, TranscriptPhraseVote
)
from ...tasks import update_transcript_stats
class Command(BaseCommand):
help = 'Creates random votes for 5 phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random_transcript(in_progress=False).first()
phrases = transcript.phrases.all()[:5]
for phrase in phrases:
for user in users:
TranscriptPhraseVote.objects.create(
transcript_phrase=phrase,
user=user,
upvote=random.choice([True, False])
)
update_transcript_stats(transcript)
|
import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from ...models import (
Transcript, TranscriptPhraseVote
)
from ...tasks import update_transcript_stats
class Command(BaseCommand):
help = 'Creates random votes for 5 phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()[:5]
transcript = Transcript.objects.random_transcript(
in_progress=False
).first()
phrases = transcript.phrases.all()[:5]
for phrase in phrases:
for user in users:
TranscriptPhraseVote.objects.create(
transcript_phrase=phrase,
user=user,
upvote=random.choice([True, False])
)
update_transcript_stats(transcript)
|
Use a smaller set of users in fake game one gameplay
|
Use a smaller set of users in fake game one gameplay
|
Python
|
mit
|
WGBH/FixIt,WGBH/FixIt,WGBH/FixIt
|
5fb609b13cf65ef3c29502b9b406b73f03873ab0
|
pathfinder/tests/BugTracker/Tests/stream-document.SF-2804823.XQUERY.py
|
pathfinder/tests/BugTracker/Tests/stream-document.SF-2804823.XQUERY.py
|
import os, sys
try:
import sybprocess
except ImportError:
# user private copy for old Python versions
import MonetDBtesting.subprocess26 as subprocess
def client(cmd, input = None):
clt = subprocess.Popen(cmd,
shell = True,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
universal_newlines = True)
out, err = clt.communicate(input)
sys.stdout.write(out)
sys.stderr.write(err)
def main():
xq_client = os.getenv('XQUERY_CLIENT')
client('%s --input=my-document --collection=my-collection' % xq_client,
'<document>test document</document>')
client('%s -s "pf:documents()"' % xq_client)
client('%s -s "pf:del-doc(\'my-document\')"' % xq_client)
main()
|
import os, sys
try:
import sybprocess
except ImportError:
# user private copy for old Python versions
import MonetDBtesting.subprocess26 as subprocess
def client(cmd, input = None):
clt = subprocess.Popen(cmd,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
universal_newlines = True)
out, err = clt.communicate(input)
sys.stdout.write(out)
sys.stderr.write(err)
def main():
xq_client = os.getenv('XQUERY_CLIENT').split()
client(xq_client + ['--input=my-document', '--collection=my-collection'],
'<document>test document</document>')
client(xq_client + ['-s', 'for $doc in pf:documents() where $doc/@url = "my-document" return $doc'])
client(xq_client + ['-s', 'pf:del-doc("my-document")'])
main()
|
Make test independent of whatever else is in the database. Also, use a different way of calling subprocess.Popen so that we can use quotes and dollars without having to do difficult cross-architectural escaping.
|
Make test independent of whatever else is in the database.
Also, use a different way of calling subprocess.Popen so that we can
use quotes and dollars without having to do difficult
cross-architectural escaping.
|
Python
|
mpl-2.0
|
zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.