commit stringlengths 40 40 | old_file stringlengths 4 106 | new_file stringlengths 4 106 | old_contents stringlengths 10 2.94k | new_contents stringlengths 21 2.95k | subject stringlengths 16 444 | message stringlengths 17 2.63k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 7 43k | ndiff stringlengths 52 3.31k | instruction stringlengths 16 444 | content stringlengths 133 4.32k | diff stringlengths 49 3.61k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1d486d8035e918a83dce5a70c83149a06d982a9f | Instanssi/admin_calendar/models.py | Instanssi/admin_calendar/models.py |
from django.db import models
from django.contrib import admin
from django.contrib.auth.models import User
from imagekit.models import ImageSpec
from imagekit.processors import resize
class CalendarEvent(models.Model):
user = models.ForeignKey(User, verbose_name=u'Käyttäjä')
start = models.DateTimeField(u'Alku', help_text=u'Tapahtuman alkamisaika.')
end = models.DateTimeField(u'Loppe', help_text=u'Tapahtuman loppumisaika.', blank=True)
description = models.TextField(u'Kuvaus', help_text=u'Tapahtuman kuvaus.', blank=True)
title = models.CharField(u'Otsikko', help_text=u'Lyhyt otsikko.', max_length=32)
image_original = models.ImageField(u'Kuva', upload_to='calendar/images/', help_text=u"Kuva tapahtumalle.", blank=True)
image_small = ImageSpec([resize.Fit(48, 48)], image_field='imagefile_original', format='PNG')
EVENT_TYPES = (
(0, u'Aikaraja'),
(1, u'Aikavaraus'),
)
type = models.IntegerField(u'Tyyppi', help_text=u'Tapahtuman tyyppi', choices=EVENT_TYPES, default=0)
try:
admin.site.register(CalendarEvent)
except:
pass |
from django.db import models
from django.contrib import admin
from django.contrib.auth.models import User
from imagekit.models import ImageSpecField
from imagekit.processors import ResizeToFill
class CalendarEvent(models.Model):
user = models.ForeignKey(User, verbose_name=u'Käyttäjä')
start = models.DateTimeField(u'Alku', help_text=u'Tapahtuman alkamisaika.')
end = models.DateTimeField(u'Loppe', help_text=u'Tapahtuman loppumisaika.', blank=True)
description = models.TextField(u'Kuvaus', help_text=u'Tapahtuman kuvaus.', blank=True)
title = models.CharField(u'Otsikko', help_text=u'Lyhyt otsikko.', max_length=32)
image_original = models.ImageField(u'Kuva', upload_to='calendar/images/', help_text=u"Kuva tapahtumalle.", blank=True)
image_small = ImageSpecField([ResizeToFill(48, 48)], image_field='imagefile_original', format='PNG')
EVENT_TYPES = (
(0, u'Aikaraja'),
(1, u'Aikavaraus'),
)
type = models.IntegerField(u'Tyyppi', help_text=u'Tapahtuman tyyppi', choices=EVENT_TYPES, default=0)
try:
admin.site.register(CalendarEvent)
except:
pass | Fix to work on the latest django-imagekit | admin_calendar: Fix to work on the latest django-imagekit
| Python | mit | Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org |
from django.db import models
from django.contrib import admin
from django.contrib.auth.models import User
- from imagekit.models import ImageSpec
+ from imagekit.models import ImageSpecField
- from imagekit.processors import resize
+ from imagekit.processors import ResizeToFill
class CalendarEvent(models.Model):
user = models.ForeignKey(User, verbose_name=u'Käyttäjä')
start = models.DateTimeField(u'Alku', help_text=u'Tapahtuman alkamisaika.')
end = models.DateTimeField(u'Loppe', help_text=u'Tapahtuman loppumisaika.', blank=True)
description = models.TextField(u'Kuvaus', help_text=u'Tapahtuman kuvaus.', blank=True)
title = models.CharField(u'Otsikko', help_text=u'Lyhyt otsikko.', max_length=32)
image_original = models.ImageField(u'Kuva', upload_to='calendar/images/', help_text=u"Kuva tapahtumalle.", blank=True)
- image_small = ImageSpec([resize.Fit(48, 48)], image_field='imagefile_original', format='PNG')
+ image_small = ImageSpecField([ResizeToFill(48, 48)], image_field='imagefile_original', format='PNG')
EVENT_TYPES = (
(0, u'Aikaraja'),
(1, u'Aikavaraus'),
)
type = models.IntegerField(u'Tyyppi', help_text=u'Tapahtuman tyyppi', choices=EVENT_TYPES, default=0)
try:
admin.site.register(CalendarEvent)
except:
pass | Fix to work on the latest django-imagekit | ## Code Before:
from django.db import models
from django.contrib import admin
from django.contrib.auth.models import User
from imagekit.models import ImageSpec
from imagekit.processors import resize
class CalendarEvent(models.Model):
user = models.ForeignKey(User, verbose_name=u'Käyttäjä')
start = models.DateTimeField(u'Alku', help_text=u'Tapahtuman alkamisaika.')
end = models.DateTimeField(u'Loppe', help_text=u'Tapahtuman loppumisaika.', blank=True)
description = models.TextField(u'Kuvaus', help_text=u'Tapahtuman kuvaus.', blank=True)
title = models.CharField(u'Otsikko', help_text=u'Lyhyt otsikko.', max_length=32)
image_original = models.ImageField(u'Kuva', upload_to='calendar/images/', help_text=u"Kuva tapahtumalle.", blank=True)
image_small = ImageSpec([resize.Fit(48, 48)], image_field='imagefile_original', format='PNG')
EVENT_TYPES = (
(0, u'Aikaraja'),
(1, u'Aikavaraus'),
)
type = models.IntegerField(u'Tyyppi', help_text=u'Tapahtuman tyyppi', choices=EVENT_TYPES, default=0)
try:
admin.site.register(CalendarEvent)
except:
pass
## Instruction:
Fix to work on the latest django-imagekit
## Code After:
from django.db import models
from django.contrib import admin
from django.contrib.auth.models import User
from imagekit.models import ImageSpecField
from imagekit.processors import ResizeToFill
class CalendarEvent(models.Model):
user = models.ForeignKey(User, verbose_name=u'Käyttäjä')
start = models.DateTimeField(u'Alku', help_text=u'Tapahtuman alkamisaika.')
end = models.DateTimeField(u'Loppe', help_text=u'Tapahtuman loppumisaika.', blank=True)
description = models.TextField(u'Kuvaus', help_text=u'Tapahtuman kuvaus.', blank=True)
title = models.CharField(u'Otsikko', help_text=u'Lyhyt otsikko.', max_length=32)
image_original = models.ImageField(u'Kuva', upload_to='calendar/images/', help_text=u"Kuva tapahtumalle.", blank=True)
image_small = ImageSpecField([ResizeToFill(48, 48)], image_field='imagefile_original', format='PNG')
EVENT_TYPES = (
(0, u'Aikaraja'),
(1, u'Aikavaraus'),
)
type = models.IntegerField(u'Tyyppi', help_text=u'Tapahtuman tyyppi', choices=EVENT_TYPES, default=0)
try:
admin.site.register(CalendarEvent)
except:
pass |
from django.db import models
from django.contrib import admin
from django.contrib.auth.models import User
- from imagekit.models import ImageSpec
+ from imagekit.models import ImageSpecField
? +++++
- from imagekit.processors import resize
? ^
+ from imagekit.processors import ResizeToFill
? ^ ++++++
class CalendarEvent(models.Model):
user = models.ForeignKey(User, verbose_name=u'Käyttäjä')
start = models.DateTimeField(u'Alku', help_text=u'Tapahtuman alkamisaika.')
end = models.DateTimeField(u'Loppe', help_text=u'Tapahtuman loppumisaika.', blank=True)
description = models.TextField(u'Kuvaus', help_text=u'Tapahtuman kuvaus.', blank=True)
title = models.CharField(u'Otsikko', help_text=u'Lyhyt otsikko.', max_length=32)
image_original = models.ImageField(u'Kuva', upload_to='calendar/images/', help_text=u"Kuva tapahtumalle.", blank=True)
- image_small = ImageSpec([resize.Fit(48, 48)], image_field='imagefile_original', format='PNG')
? ^ ^ ^
+ image_small = ImageSpecField([ResizeToFill(48, 48)], image_field='imagefile_original', format='PNG')
? +++++ ^ ^^ ^^
EVENT_TYPES = (
(0, u'Aikaraja'),
(1, u'Aikavaraus'),
)
type = models.IntegerField(u'Tyyppi', help_text=u'Tapahtuman tyyppi', choices=EVENT_TYPES, default=0)
try:
admin.site.register(CalendarEvent)
except:
pass |
2b1cc5b2426994953e8f8b937364d91f4e7aadf2 | MyHub/MyHub/urls.py | MyHub/MyHub/urls.py | from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', loader_page, name='loader'),
url(r'^home/$', home_page, name='index'),
url(r'^resume/$', resume_page, name='resume'),
url(r'^projects/$', projects_page, name='projects'),
url(r'^contact/$', contact_page, name='contact'),
url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', home_page, name='loader'),
# url(r'^home/$', home_page, name='index'),
# url(r'^resume/$', resume_page, name='resume'),
# url(r'^projects/$', projects_page, name='projects'),
# url(r'^contact/$', contact_page, name='contact'),
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| Change default URL to display home content. Temporary fix. | Change default URL to display home content. Temporary fix.
| Python | mit | sebastienbarbier/sebastienbarbier.com,sebastienbarbier/sebastienbarbier.com,sebastienbarbier/sebastienbarbier.com,sebastienbarbier/sebastienbarbier.com | from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
- url(r'^$', loader_page, name='loader'),
+ url(r'^$', home_page, name='loader'),
- url(r'^home/$', home_page, name='index'),
+ # url(r'^home/$', home_page, name='index'),
- url(r'^resume/$', resume_page, name='resume'),
+ # url(r'^resume/$', resume_page, name='resume'),
- url(r'^projects/$', projects_page, name='projects'),
+ # url(r'^projects/$', projects_page, name='projects'),
- url(r'^contact/$', contact_page, name='contact'),
+ # url(r'^contact/$', contact_page, name='contact'),
- url(r'^admin/', include(admin.site.urls)),
+ # url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| Change default URL to display home content. Temporary fix. | ## Code Before:
from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', loader_page, name='loader'),
url(r'^home/$', home_page, name='index'),
url(r'^resume/$', resume_page, name='resume'),
url(r'^projects/$', projects_page, name='projects'),
url(r'^contact/$', contact_page, name='contact'),
url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
## Instruction:
Change default URL to display home content. Temporary fix.
## Code After:
from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', home_page, name='loader'),
# url(r'^home/$', home_page, name='index'),
# url(r'^resume/$', resume_page, name='resume'),
# url(r'^projects/$', projects_page, name='projects'),
# url(r'^contact/$', contact_page, name='contact'),
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
- url(r'^$', loader_page, name='loader'),
? ^ ^^ -
+ url(r'^$', home_page, name='loader'),
? ^ ^
- url(r'^home/$', home_page, name='index'),
+ # url(r'^home/$', home_page, name='index'),
? ++
- url(r'^resume/$', resume_page, name='resume'),
+ # url(r'^resume/$', resume_page, name='resume'),
? ++
- url(r'^projects/$', projects_page, name='projects'),
+ # url(r'^projects/$', projects_page, name='projects'),
? ++
- url(r'^contact/$', contact_page, name='contact'),
+ # url(r'^contact/$', contact_page, name='contact'),
? ++
- url(r'^admin/', include(admin.site.urls)),
+ # url(r'^admin/', include(admin.site.urls)),
? ++
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) |
6fd8bf7a3113c82c88325dd04fe610ba10049855 | setup.py | setup.py |
from distutils.core import setup
long_description = '''
This module allows you to perform IP subnet calculations, there is support
for both IPv4 and IPv6 CIDR notation.
'''
setup(name='ipcalc',
version='0.4',
description='IP subnet calculator',
long_description=long_description,
author='Wijnand Modderman',
author_email='python@tehmaze.com',
url='http://dev.tehmaze.com/projects/ipcalc',
packages = [''],
package_dir = {'': 'src'},
)
|
from distutils.core import setup
setup(name='ipcalc',
version='0.4',
description='IP subnet calculator',
long_description=file('README.rst').read(),
author='Wijnand Modderman',
author_email='python@tehmaze.com',
url='http://dev.tehmaze.com/projects/ipcalc',
packages = [''],
package_dir = {'': 'src'},
)
| Read README.rst for the long description | Read README.rst for the long description
| Python | bsd-2-clause | panaceya/ipcalc,tehmaze/ipcalc |
from distutils.core import setup
-
- long_description = '''
- This module allows you to perform IP subnet calculations, there is support
- for both IPv4 and IPv6 CIDR notation.
- '''
setup(name='ipcalc',
version='0.4',
description='IP subnet calculator',
- long_description=long_description,
+ long_description=file('README.rst').read(),
author='Wijnand Modderman',
author_email='python@tehmaze.com',
url='http://dev.tehmaze.com/projects/ipcalc',
packages = [''],
package_dir = {'': 'src'},
)
| Read README.rst for the long description | ## Code Before:
from distutils.core import setup
long_description = '''
This module allows you to perform IP subnet calculations, there is support
for both IPv4 and IPv6 CIDR notation.
'''
setup(name='ipcalc',
version='0.4',
description='IP subnet calculator',
long_description=long_description,
author='Wijnand Modderman',
author_email='python@tehmaze.com',
url='http://dev.tehmaze.com/projects/ipcalc',
packages = [''],
package_dir = {'': 'src'},
)
## Instruction:
Read README.rst for the long description
## Code After:
from distutils.core import setup
setup(name='ipcalc',
version='0.4',
description='IP subnet calculator',
long_description=file('README.rst').read(),
author='Wijnand Modderman',
author_email='python@tehmaze.com',
url='http://dev.tehmaze.com/projects/ipcalc',
packages = [''],
package_dir = {'': 'src'},
)
|
from distutils.core import setup
-
- long_description = '''
- This module allows you to perform IP subnet calculations, there is support
- for both IPv4 and IPv6 CIDR notation.
- '''
setup(name='ipcalc',
version='0.4',
description='IP subnet calculator',
- long_description=long_description,
+ long_description=file('README.rst').read(),
author='Wijnand Modderman',
author_email='python@tehmaze.com',
url='http://dev.tehmaze.com/projects/ipcalc',
packages = [''],
package_dir = {'': 'src'},
) |
c08e6a22e589880d97b92048cfaec994c41a23d4 | pylama/lint/pylama_pydocstyle.py | pylama/lint/pylama_pydocstyle.py | """pydocstyle support."""
from pydocstyle import PEP257Checker
from pylama.lint import Linter as Abstract
class Linter(Abstract):
"""Check pydocstyle errors."""
@staticmethod
def run(path, code=None, **meta):
"""pydocstyle code checking.
:return list: List of errors.
"""
return [{
'lnum': e.line,
# Remove colon after error code ("D403: ..." => "D403 ...").
'text': (e.message[0:4] + e.message[5:]
if e.message[4] == ':' else e.message),
'type': 'D',
'number': e.code
} for e in PEP257Checker().check_source(code, path)]
| """pydocstyle support."""
THIRD_ARG = True
try:
#: Import for pydocstyle 2.0.0 and newer
from pydocstyle import ConventionChecker as PyDocChecker
except ImportError:
#: Backward compatibility for pydocstyle prior to 2.0.0
from pydocstyle import PEP257Checker as PyDocChecker
THIRD_ARG = False
from pylama.lint import Linter as Abstract
class Linter(Abstract):
"""Check pydocstyle errors."""
@staticmethod
def run(path, code=None, **meta):
"""pydocstyle code checking.
:return list: List of errors.
"""
check_source_args = (code, path, None) if THIRD_ARG else (code, path)
return [{
'lnum': e.line,
# Remove colon after error code ("D403: ..." => "D403 ...").
'text': (e.message[0:4] + e.message[5:]
if e.message[4] == ':' else e.message),
'type': 'D',
'number': e.code
} for e in PyDocChecker().check_source(*check_source_args)]
| Update for pydocstyle 2.0.0 compatibility | Update for pydocstyle 2.0.0 compatibility
Fix klen/pylama#96
Adding the newer ignore_decorators argument. Thanks to @not-raspberry for the tip!
| Python | mit | klen/pylama | """pydocstyle support."""
+ THIRD_ARG = True
+ try:
+ #: Import for pydocstyle 2.0.0 and newer
+ from pydocstyle import ConventionChecker as PyDocChecker
+ except ImportError:
+ #: Backward compatibility for pydocstyle prior to 2.0.0
- from pydocstyle import PEP257Checker
+ from pydocstyle import PEP257Checker as PyDocChecker
+ THIRD_ARG = False
from pylama.lint import Linter as Abstract
class Linter(Abstract):
"""Check pydocstyle errors."""
@staticmethod
def run(path, code=None, **meta):
"""pydocstyle code checking.
:return list: List of errors.
"""
+ check_source_args = (code, path, None) if THIRD_ARG else (code, path)
return [{
'lnum': e.line,
# Remove colon after error code ("D403: ..." => "D403 ...").
'text': (e.message[0:4] + e.message[5:]
if e.message[4] == ':' else e.message),
'type': 'D',
'number': e.code
- } for e in PEP257Checker().check_source(code, path)]
+ } for e in PyDocChecker().check_source(*check_source_args)]
| Update for pydocstyle 2.0.0 compatibility | ## Code Before:
"""pydocstyle support."""
from pydocstyle import PEP257Checker
from pylama.lint import Linter as Abstract
class Linter(Abstract):
"""Check pydocstyle errors."""
@staticmethod
def run(path, code=None, **meta):
"""pydocstyle code checking.
:return list: List of errors.
"""
return [{
'lnum': e.line,
# Remove colon after error code ("D403: ..." => "D403 ...").
'text': (e.message[0:4] + e.message[5:]
if e.message[4] == ':' else e.message),
'type': 'D',
'number': e.code
} for e in PEP257Checker().check_source(code, path)]
## Instruction:
Update for pydocstyle 2.0.0 compatibility
## Code After:
"""pydocstyle support."""
THIRD_ARG = True
try:
#: Import for pydocstyle 2.0.0 and newer
from pydocstyle import ConventionChecker as PyDocChecker
except ImportError:
#: Backward compatibility for pydocstyle prior to 2.0.0
from pydocstyle import PEP257Checker as PyDocChecker
THIRD_ARG = False
from pylama.lint import Linter as Abstract
class Linter(Abstract):
"""Check pydocstyle errors."""
@staticmethod
def run(path, code=None, **meta):
"""pydocstyle code checking.
:return list: List of errors.
"""
check_source_args = (code, path, None) if THIRD_ARG else (code, path)
return [{
'lnum': e.line,
# Remove colon after error code ("D403: ..." => "D403 ...").
'text': (e.message[0:4] + e.message[5:]
if e.message[4] == ':' else e.message),
'type': 'D',
'number': e.code
} for e in PyDocChecker().check_source(*check_source_args)]
| """pydocstyle support."""
+ THIRD_ARG = True
+ try:
+ #: Import for pydocstyle 2.0.0 and newer
+ from pydocstyle import ConventionChecker as PyDocChecker
+ except ImportError:
+ #: Backward compatibility for pydocstyle prior to 2.0.0
- from pydocstyle import PEP257Checker
+ from pydocstyle import PEP257Checker as PyDocChecker
? ++++ ++++++++++++++++
+ THIRD_ARG = False
from pylama.lint import Linter as Abstract
class Linter(Abstract):
"""Check pydocstyle errors."""
@staticmethod
def run(path, code=None, **meta):
"""pydocstyle code checking.
:return list: List of errors.
"""
+ check_source_args = (code, path, None) if THIRD_ARG else (code, path)
return [{
'lnum': e.line,
# Remove colon after error code ("D403: ..." => "D403 ...").
'text': (e.message[0:4] + e.message[5:]
if e.message[4] == ':' else e.message),
'type': 'D',
'number': e.code
- } for e in PEP257Checker().check_source(code, path)]
? ^^^^^ ^ ^^^ ^^
+ } for e in PyDocChecker().check_source(*check_source_args)]
? ^^^^ + ++++++ ^^^ ^ ^^^
|
7923baf30bcd41e17182599e46b4efd86f4eab49 | tests/conftest.py | tests/conftest.py | import matplotlib
matplotlib.use('pdf')
import pytest
def pytest_addoption(parser):
parser.addoption('--integration', action='store_true',
help='run integration tests')
| import matplotlib
matplotlib.use('pdf')
| Stop making --integration an argument. | Stop making --integration an argument.
| Python | apache-2.0 | probcomp/cgpm,probcomp/cgpm | import matplotlib
matplotlib.use('pdf')
- import pytest
-
- def pytest_addoption(parser):
- parser.addoption('--integration', action='store_true',
- help='run integration tests')
- | Stop making --integration an argument. | ## Code Before:
import matplotlib
matplotlib.use('pdf')
import pytest
def pytest_addoption(parser):
parser.addoption('--integration', action='store_true',
help='run integration tests')
## Instruction:
Stop making --integration an argument.
## Code After:
import matplotlib
matplotlib.use('pdf')
| import matplotlib
matplotlib.use('pdf')
-
- import pytest
-
- def pytest_addoption(parser):
- parser.addoption('--integration', action='store_true',
- help='run integration tests') |
435ac02a320582fb8ede698da579d5c4fdd2d600 | summary_footnotes.py | summary_footnotes.py |
from pelican import signals
from pelican.contents import Content, Article
from BeautifulSoup import BeautifulSoup
from six import text_type
def summary_footnotes(instance):
if "SUMMARY_FOOTNOTES_MODE" in instance.settings:
mode = instance.settings["SUMMARY_FOOTNOTES_MODE"]
else:
mode = 'link'
if type(instance) == Article:
summary = BeautifulSoup(instance.summary)
footnote_links = summary.findAll('a', {'rel':'footnote'})
if footnote_links:
for link in footnote_links:
if mode == 'remove':
link.extract()
elif mode == 'link':
link['href'] = "%s/%s%s" % (instance.settings["SITEURL"],
instance.url,
link['href'])
else:
raise Exception("Unknown summary footnote mode: %s" % mode)
instance._summary = text_type(summary)
def register():
signals.content_object_init.connect(summary_footnotes)
|
from pelican import signals
from pelican.contents import Content, Article
from BeautifulSoup import BeautifulSoup
from six import text_type
def initialized(pelican):
from pelican.settings import DEFAULT_CONFIG
DEFAULT_CONFIG.setdefault('SUMMARY_FOOTNOTES_MODE',
'link')
if pelican:
pelican.settings.setdefault('SUMMARY_FOOTNOTES_MODE',
'link')
def transform_summary(summary, article_url, site_url, mode):
summary = BeautifulSoup(summary)
footnote_links = summary.findAll('a', {'rel':'footnote'})
if footnote_links:
for link in footnote_links:
if mode == 'remove':
link.extract()
elif mode == 'link':
link['href'] = "%s/%s%s" % (site_url,
article_url,
link['href'])
else:
raise Exception("Unknown summary_footnote mode: %s" % mode)
return text_type(summary)
return None
def summary_footnotes(instance):
mode = instance.settings["SUMMARY_FOOTNOTES_MODE"]
if type(instance) == Article:
# Monkeypatch in the rewrite on the summary because when this is run
# the content might not be ready yet if it depends on other files
# being loaded.
instance._orig_get_summary = instance._get_summary
def _get_summary(self):
summary = self._orig_get_summary()
new_summary = transform_summary(summary,
self.url,
self.settings['SITEURL'],
mode)
if new_summary is not None:
return new_summary
else:
return summary
funcType = type(instance._get_summary)
instance._get_summary = funcType(_get_summary, instance, Article)
def register():
signals.initialized.connect(initialized)
signals.content_object_init.connect(summary_footnotes)
| Rewrite summary as late as possible. | Rewrite summary as late as possible.
Fixes issue where {filename} links would sometimes not work.
| Python | agpl-3.0 | dperelman/summary_footnotes |
from pelican import signals
from pelican.contents import Content, Article
from BeautifulSoup import BeautifulSoup
from six import text_type
+ def initialized(pelican):
+ from pelican.settings import DEFAULT_CONFIG
+ DEFAULT_CONFIG.setdefault('SUMMARY_FOOTNOTES_MODE',
+ 'link')
+ if pelican:
+ pelican.settings.setdefault('SUMMARY_FOOTNOTES_MODE',
+ 'link')
+
+ def transform_summary(summary, article_url, site_url, mode):
+ summary = BeautifulSoup(summary)
+ footnote_links = summary.findAll('a', {'rel':'footnote'})
+ if footnote_links:
+ for link in footnote_links:
+ if mode == 'remove':
+ link.extract()
+ elif mode == 'link':
+ link['href'] = "%s/%s%s" % (site_url,
+ article_url,
+ link['href'])
+ else:
+ raise Exception("Unknown summary_footnote mode: %s" % mode)
+ return text_type(summary)
+ return None
+
def summary_footnotes(instance):
- if "SUMMARY_FOOTNOTES_MODE" in instance.settings:
- mode = instance.settings["SUMMARY_FOOTNOTES_MODE"]
+ mode = instance.settings["SUMMARY_FOOTNOTES_MODE"]
- else:
- mode = 'link'
if type(instance) == Article:
- summary = BeautifulSoup(instance.summary)
- footnote_links = summary.findAll('a', {'rel':'footnote'})
- if footnote_links:
- for link in footnote_links:
- if mode == 'remove':
- link.extract()
- elif mode == 'link':
- link['href'] = "%s/%s%s" % (instance.settings["SITEURL"],
+ # Monkeypatch in the rewrite on the summary because when this is run
+ # the content might not be ready yet if it depends on other files
+ # being loaded.
+ instance._orig_get_summary = instance._get_summary
+
+ def _get_summary(self):
+ summary = self._orig_get_summary()
+ new_summary = transform_summary(summary,
- instance.url,
+ self.url,
+ self.settings['SITEURL'],
- link['href'])
+ mode)
+ if new_summary is not None:
+ return new_summary
- else:
+ else:
- raise Exception("Unknown summary footnote mode: %s" % mode)
- instance._summary = text_type(summary)
+ return summary
+
+ funcType = type(instance._get_summary)
+ instance._get_summary = funcType(_get_summary, instance, Article)
def register():
+ signals.initialized.connect(initialized)
signals.content_object_init.connect(summary_footnotes)
| Rewrite summary as late as possible. | ## Code Before:
from pelican import signals
from pelican.contents import Content, Article
from BeautifulSoup import BeautifulSoup
from six import text_type
def summary_footnotes(instance):
if "SUMMARY_FOOTNOTES_MODE" in instance.settings:
mode = instance.settings["SUMMARY_FOOTNOTES_MODE"]
else:
mode = 'link'
if type(instance) == Article:
summary = BeautifulSoup(instance.summary)
footnote_links = summary.findAll('a', {'rel':'footnote'})
if footnote_links:
for link in footnote_links:
if mode == 'remove':
link.extract()
elif mode == 'link':
link['href'] = "%s/%s%s" % (instance.settings["SITEURL"],
instance.url,
link['href'])
else:
raise Exception("Unknown summary footnote mode: %s" % mode)
instance._summary = text_type(summary)
def register():
signals.content_object_init.connect(summary_footnotes)
## Instruction:
Rewrite summary as late as possible.
## Code After:
from pelican import signals
from pelican.contents import Content, Article
from BeautifulSoup import BeautifulSoup
from six import text_type
def initialized(pelican):
from pelican.settings import DEFAULT_CONFIG
DEFAULT_CONFIG.setdefault('SUMMARY_FOOTNOTES_MODE',
'link')
if pelican:
pelican.settings.setdefault('SUMMARY_FOOTNOTES_MODE',
'link')
def transform_summary(summary, article_url, site_url, mode):
summary = BeautifulSoup(summary)
footnote_links = summary.findAll('a', {'rel':'footnote'})
if footnote_links:
for link in footnote_links:
if mode == 'remove':
link.extract()
elif mode == 'link':
link['href'] = "%s/%s%s" % (site_url,
article_url,
link['href'])
else:
raise Exception("Unknown summary_footnote mode: %s" % mode)
return text_type(summary)
return None
def summary_footnotes(instance):
mode = instance.settings["SUMMARY_FOOTNOTES_MODE"]
if type(instance) == Article:
# Monkeypatch in the rewrite on the summary because when this is run
# the content might not be ready yet if it depends on other files
# being loaded.
instance._orig_get_summary = instance._get_summary
def _get_summary(self):
summary = self._orig_get_summary()
new_summary = transform_summary(summary,
self.url,
self.settings['SITEURL'],
mode)
if new_summary is not None:
return new_summary
else:
return summary
funcType = type(instance._get_summary)
instance._get_summary = funcType(_get_summary, instance, Article)
def register():
signals.initialized.connect(initialized)
signals.content_object_init.connect(summary_footnotes)
|
from pelican import signals
from pelican.contents import Content, Article
from BeautifulSoup import BeautifulSoup
from six import text_type
+ def initialized(pelican):
+ from pelican.settings import DEFAULT_CONFIG
+ DEFAULT_CONFIG.setdefault('SUMMARY_FOOTNOTES_MODE',
+ 'link')
+ if pelican:
+ pelican.settings.setdefault('SUMMARY_FOOTNOTES_MODE',
+ 'link')
+
+ def transform_summary(summary, article_url, site_url, mode):
+ summary = BeautifulSoup(summary)
+ footnote_links = summary.findAll('a', {'rel':'footnote'})
+ if footnote_links:
+ for link in footnote_links:
+ if mode == 'remove':
+ link.extract()
+ elif mode == 'link':
+ link['href'] = "%s/%s%s" % (site_url,
+ article_url,
+ link['href'])
+ else:
+ raise Exception("Unknown summary_footnote mode: %s" % mode)
+ return text_type(summary)
+ return None
+
def summary_footnotes(instance):
- if "SUMMARY_FOOTNOTES_MODE" in instance.settings:
- mode = instance.settings["SUMMARY_FOOTNOTES_MODE"]
? ----
+ mode = instance.settings["SUMMARY_FOOTNOTES_MODE"]
- else:
- mode = 'link'
if type(instance) == Article:
- summary = BeautifulSoup(instance.summary)
- footnote_links = summary.findAll('a', {'rel':'footnote'})
- if footnote_links:
- for link in footnote_links:
- if mode == 'remove':
- link.extract()
- elif mode == 'link':
- link['href'] = "%s/%s%s" % (instance.settings["SITEURL"],
+ # Monkeypatch in the rewrite on the summary because when this is run
+ # the content might not be ready yet if it depends on other files
+ # being loaded.
+ instance._orig_get_summary = instance._get_summary
+
+ def _get_summary(self):
+ summary = self._orig_get_summary()
+ new_summary = transform_summary(summary,
- instance.url,
? ------ ----
+ self.url,
? ++
+ self.settings['SITEURL'],
- link['href'])
? ^^^^^^^^^^^^ ---
+ mode)
? ^^^
+ if new_summary is not None:
+ return new_summary
- else:
? ----
+ else:
- raise Exception("Unknown summary footnote mode: %s" % mode)
- instance._summary = text_type(summary)
+ return summary
+
+ funcType = type(instance._get_summary)
+ instance._get_summary = funcType(_get_summary, instance, Article)
def register():
+ signals.initialized.connect(initialized)
signals.content_object_init.connect(summary_footnotes) |
3ede075c812b116629c5f514596669b16c4784df | fulltext/backends/__json.py | fulltext/backends/__json.py | import json
from six import StringIO
from six import string_types
from six import integer_types
def _to_text(text, obj):
if isinstance(obj, dict):
for key in sorted(obj.keys()):
_to_text(text, key)
_to_text(text, obj[key])
elif isinstance(obj, list):
for item in obj:
_to_text(text, item)
elif isinstance(obj, string_types):
text.write(obj)
text.write(u' ')
elif isinstance(obj, integer_types):
text.write(str(obj))
text.write(u' ')
def _get_file(f, **kwargs):
text, obj = StringIO(), json.loads(f.read().decode('utf8'))
_to_text(text, obj)
return text.getvalue()
| import json
from six import StringIO
from six import string_types
from six import integer_types
def _to_text(text, obj):
if isinstance(obj, dict):
for key in sorted(obj.keys()):
_to_text(text, key)
_to_text(text, obj[key])
elif isinstance(obj, list):
for item in obj:
_to_text(text, item)
elif isinstance(obj, string_types + integer_types):
text.write(u'%s ' % obj)
else:
raise ValueError('Unrecognized type: %s' % obj.__class__)
def _get_file(f, **kwargs):
text, data = StringIO(), f.read()
obj = json.loads(data.decode('utf8'))
_to_text(text, obj)
return text.getvalue()
| Use format string. Readability. ValueError. | Use format string. Readability. ValueError.
| Python | mit | btimby/fulltext,btimby/fulltext | import json
from six import StringIO
from six import string_types
from six import integer_types
def _to_text(text, obj):
if isinstance(obj, dict):
for key in sorted(obj.keys()):
_to_text(text, key)
_to_text(text, obj[key])
elif isinstance(obj, list):
for item in obj:
_to_text(text, item)
- elif isinstance(obj, string_types):
+ elif isinstance(obj, string_types + integer_types):
- text.write(obj)
- text.write(u' ')
+ text.write(u'%s ' % obj)
+ else:
+ raise ValueError('Unrecognized type: %s' % obj.__class__)
- elif isinstance(obj, integer_types):
- text.write(str(obj))
- text.write(u' ')
def _get_file(f, **kwargs):
- text, obj = StringIO(), json.loads(f.read().decode('utf8'))
+ text, data = StringIO(), f.read()
+ obj = json.loads(data.decode('utf8'))
_to_text(text, obj)
return text.getvalue()
| Use format string. Readability. ValueError. | ## Code Before:
import json
from six import StringIO
from six import string_types
from six import integer_types
def _to_text(text, obj):
if isinstance(obj, dict):
for key in sorted(obj.keys()):
_to_text(text, key)
_to_text(text, obj[key])
elif isinstance(obj, list):
for item in obj:
_to_text(text, item)
elif isinstance(obj, string_types):
text.write(obj)
text.write(u' ')
elif isinstance(obj, integer_types):
text.write(str(obj))
text.write(u' ')
def _get_file(f, **kwargs):
text, obj = StringIO(), json.loads(f.read().decode('utf8'))
_to_text(text, obj)
return text.getvalue()
## Instruction:
Use format string. Readability. ValueError.
## Code After:
import json
from six import StringIO
from six import string_types
from six import integer_types
def _to_text(text, obj):
if isinstance(obj, dict):
for key in sorted(obj.keys()):
_to_text(text, key)
_to_text(text, obj[key])
elif isinstance(obj, list):
for item in obj:
_to_text(text, item)
elif isinstance(obj, string_types + integer_types):
text.write(u'%s ' % obj)
else:
raise ValueError('Unrecognized type: %s' % obj.__class__)
def _get_file(f, **kwargs):
text, data = StringIO(), f.read()
obj = json.loads(data.decode('utf8'))
_to_text(text, obj)
return text.getvalue()
| import json
from six import StringIO
from six import string_types
from six import integer_types
def _to_text(text, obj):
if isinstance(obj, dict):
for key in sorted(obj.keys()):
_to_text(text, key)
_to_text(text, obj[key])
elif isinstance(obj, list):
for item in obj:
_to_text(text, item)
- elif isinstance(obj, string_types):
+ elif isinstance(obj, string_types + integer_types):
? ++++++++++++++++
- text.write(obj)
- text.write(u' ')
+ text.write(u'%s ' % obj)
? ++ ++++++
+ else:
+ raise ValueError('Unrecognized type: %s' % obj.__class__)
- elif isinstance(obj, integer_types):
- text.write(str(obj))
- text.write(u' ')
def _get_file(f, **kwargs):
- text, obj = StringIO(), json.loads(f.read().decode('utf8'))
+ text, data = StringIO(), f.read()
+ obj = json.loads(data.decode('utf8'))
_to_text(text, obj)
return text.getvalue() |
8fa1cae882c0ff020c0b9c3c2fac9e4248d46ce4 | deploy/common/sqlite_wrapper.py | deploy/common/sqlite_wrapper.py | import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=OFF")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=NORMAL")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096. | Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096.
| Python | mit | mikispag/bitiodine | import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
+ self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
- self.cursor.execute("PRAGMA synchronous=OFF")
+ self.cursor.execute("PRAGMA synchronous=NORMAL")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096. | ## Code Before:
import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=OFF")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
## Instruction:
Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096.
## Code After:
import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=NORMAL")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
+ self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
- self.cursor.execute("PRAGMA synchronous=OFF")
? ^^
+ self.cursor.execute("PRAGMA synchronous=NORMAL")
? + ^^^^
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close() |
1ca76f55adaa4ffe305f732df47f1a070449d549 | rpmvenv/extensions/loader.py | rpmvenv/extensions/loader.py | """Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = tuple(set(('core',) + tuple(whitelist)))
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
| """Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = ('core',) + tuple(whitelist)
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
| Fix indeterminate ordering issue for extensions | Fix indeterminate ordering issue for extensions
The original code used set() to dedupe enabled extensions. This resulted
in an arbitrary ordering of the values. The expected result was a
deterministic ordering of loaded extensions that matches the order given
by the whitelist. This removes the set() usage to preserve order.
Existing users subject to the arbitrary ordering should be unaffected as
their builds must already be tolerant to ordering changes to have worked
thus far.
| Python | mit | kevinconway/rpmvenv | """Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
- whitelist = tuple(set(('core',) + tuple(whitelist)))
+ whitelist = ('core',) + tuple(whitelist)
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
| Fix indeterminate ordering issue for extensions | ## Code Before:
"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = tuple(set(('core',) + tuple(whitelist)))
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
## Instruction:
Fix indeterminate ordering issue for extensions
## Code After:
"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = ('core',) + tuple(whitelist)
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
| """Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
- whitelist = tuple(set(('core',) + tuple(whitelist)))
? ---------- --
+ whitelist = ('core',) + tuple(whitelist)
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions |
caab908d8f8948c3035c94018d7a1e31332edbad | udata/tests/frontend/__init__.py | udata/tests/frontend/__init__.py | from __future__ import unicode_literals
import json
import re
from udata.tests import TestCase, WebTestMixin, SearchTestMixin
from udata import frontend, api
class FrontTestCase(WebTestMixin, SearchTestMixin, TestCase):
def create_app(self):
app = super(FrontTestCase, self).create_app()
api.init_app(app)
frontend.init_app(app)
return app
def get_json_ld(self, response):
# In the pattern below, we extract the content of the JSON-LD script
# The first ? is used to name the extracted string
# The second ? is used to express the non-greediness of the extraction
pattern = '<script id="json_ld" type="application/ld\+json">(?P<json_ld>[\s\S]*?)</script>'
json_ld = re.search(pattern, response.data).group('json_ld')
return json.loads(json_ld)
| from __future__ import unicode_literals
import json
import re
from udata.tests import TestCase, WebTestMixin, SearchTestMixin
from udata import frontend, api
class FrontTestCase(WebTestMixin, SearchTestMixin, TestCase):
def create_app(self):
app = super(FrontTestCase, self).create_app()
api.init_app(app)
frontend.init_app(app)
return app
def get_json_ld(self, response):
# In the pattern below, we extract the content of the JSON-LD script
# The first ? is used to name the extracted string
# The second ? is used to express the non-greediness of the extraction
pattern = '<script id="json_ld" type="application/ld\+json">(?P<json_ld>[\s\S]*?)</script>'
search = re.search(pattern, response.data)
self.assertIsNotNone(search, (pattern, response.data))
json_ld = search.group('json_ld')
return json.loads(json_ld)
| Add traces if there is no JSON-LD while it was expected | Add traces if there is no JSON-LD while it was expected
| Python | agpl-3.0 | opendatateam/udata,opendatateam/udata,etalab/udata,jphnoel/udata,jphnoel/udata,etalab/udata,davidbgk/udata,davidbgk/udata,jphnoel/udata,etalab/udata,davidbgk/udata,opendatateam/udata | from __future__ import unicode_literals
import json
import re
from udata.tests import TestCase, WebTestMixin, SearchTestMixin
from udata import frontend, api
class FrontTestCase(WebTestMixin, SearchTestMixin, TestCase):
def create_app(self):
app = super(FrontTestCase, self).create_app()
api.init_app(app)
frontend.init_app(app)
return app
def get_json_ld(self, response):
# In the pattern below, we extract the content of the JSON-LD script
# The first ? is used to name the extracted string
# The second ? is used to express the non-greediness of the extraction
pattern = '<script id="json_ld" type="application/ld\+json">(?P<json_ld>[\s\S]*?)</script>'
- json_ld = re.search(pattern, response.data).group('json_ld')
+ search = re.search(pattern, response.data)
+ self.assertIsNotNone(search, (pattern, response.data))
+ json_ld = search.group('json_ld')
return json.loads(json_ld)
| Add traces if there is no JSON-LD while it was expected | ## Code Before:
from __future__ import unicode_literals
import json
import re
from udata.tests import TestCase, WebTestMixin, SearchTestMixin
from udata import frontend, api
class FrontTestCase(WebTestMixin, SearchTestMixin, TestCase):
def create_app(self):
app = super(FrontTestCase, self).create_app()
api.init_app(app)
frontend.init_app(app)
return app
def get_json_ld(self, response):
# In the pattern below, we extract the content of the JSON-LD script
# The first ? is used to name the extracted string
# The second ? is used to express the non-greediness of the extraction
pattern = '<script id="json_ld" type="application/ld\+json">(?P<json_ld>[\s\S]*?)</script>'
json_ld = re.search(pattern, response.data).group('json_ld')
return json.loads(json_ld)
## Instruction:
Add traces if there is no JSON-LD while it was expected
## Code After:
from __future__ import unicode_literals
import json
import re
from udata.tests import TestCase, WebTestMixin, SearchTestMixin
from udata import frontend, api
class FrontTestCase(WebTestMixin, SearchTestMixin, TestCase):
def create_app(self):
app = super(FrontTestCase, self).create_app()
api.init_app(app)
frontend.init_app(app)
return app
def get_json_ld(self, response):
# In the pattern below, we extract the content of the JSON-LD script
# The first ? is used to name the extracted string
# The second ? is used to express the non-greediness of the extraction
pattern = '<script id="json_ld" type="application/ld\+json">(?P<json_ld>[\s\S]*?)</script>'
search = re.search(pattern, response.data)
self.assertIsNotNone(search, (pattern, response.data))
json_ld = search.group('json_ld')
return json.loads(json_ld)
| from __future__ import unicode_literals
import json
import re
from udata.tests import TestCase, WebTestMixin, SearchTestMixin
from udata import frontend, api
class FrontTestCase(WebTestMixin, SearchTestMixin, TestCase):
def create_app(self):
app = super(FrontTestCase, self).create_app()
api.init_app(app)
frontend.init_app(app)
return app
def get_json_ld(self, response):
# In the pattern below, we extract the content of the JSON-LD script
# The first ? is used to name the extracted string
# The second ? is used to express the non-greediness of the extraction
pattern = '<script id="json_ld" type="application/ld\+json">(?P<json_ld>[\s\S]*?)</script>'
- json_ld = re.search(pattern, response.data).group('json_ld')
? - ^^^^^ -----------------
+ search = re.search(pattern, response.data)
? ^^^^^
+ self.assertIsNotNone(search, (pattern, response.data))
+ json_ld = search.group('json_ld')
return json.loads(json_ld) |
edb905aec44e3fb2086ae87df960597e7b4c8356 | scoring/machinelearning/neuralnetwork.py | scoring/machinelearning/neuralnetwork.py | from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors)).flatten()
| from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
from scipy.stats import linregress
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors))
def score(X, y):
return linregress(self.predict(X), y)[2]**2
| Add missing methods to NN class | Add missing methods to NN class
| Python | bsd-3-clause | mwojcikowski/opendrugdiscovery | from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
+ from scipy.stats import linregress
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
- return np.array(self.model.call(input_descriptors)).flatten()
+ return np.array(self.model.call(input_descriptors))
+
+ def score(X, y):
+ return linregress(self.predict(X), y)[2]**2
+
| Add missing methods to NN class | ## Code Before:
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors)).flatten()
## Instruction:
Add missing methods to NN class
## Code After:
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
from scipy.stats import linregress
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors))
def score(X, y):
return linregress(self.predict(X), y)[2]**2
| from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
+ from scipy.stats import linregress
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
- return np.array(self.model.call(input_descriptors)).flatten()
? - ----------
+ return np.array(self.model.call(input_descriptors))
+
+ def score(X, y):
+ return linregress(self.predict(X), y)[2]**2
+ |
6d645d5b58043d0668721727bbfdcc7ee021b504 | rwt/tests/test_scripts.py | rwt/tests/test_scripts.py | import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
| from __future__ import unicode_literals
import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
| Add support for Python 2.7 | Add support for Python 2.7
| Python | mit | jaraco/rwt | + from __future__ import unicode_literals
+
import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
| Add support for Python 2.7 | ## Code Before:
import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
## Instruction:
Add support for Python 2.7
## Code After:
from __future__ import unicode_literals
import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out
| + from __future__ import unicode_literals
+
import textwrap
import sys
import subprocess
def test_pkg_imported(tmpdir):
"""
Create a script that loads cython and ensure it runs.
"""
body = textwrap.dedent("""
import cython
print("Successfully imported cython")
""").lstrip()
script_file = tmpdir / 'script'
script_file.write_text(body, 'utf-8')
pip_args = ['cython']
cmd = [sys.executable, '-m', 'rwt'] + pip_args + ['--', str(script_file)]
out = subprocess.check_output(cmd, universal_newlines=True)
assert 'Successfully imported cython' in out |
edc88421bec2798d82faeece2df5229888d97db9 | contrib/performance/graph.py | contrib/performance/graph.py |
import sys, pickle
from matplotlib import pyplot
import numpy
from compare import select
def main():
fig = pyplot.figure()
ax = fig.add_subplot(111)
data = []
for fname in sys.argv[1:]:
stats, samples = select(
pickle.load(file(fname)), 'vfreebusy', 1, 'urlopen time')
data.append(samples)
if data:
assert len(samples) == len(data[0])
bars = []
color = iter('rgbcmy').next
w = 1.0 / len(data)
xs = numpy.arange(len(data[0]))
for i, s in enumerate(data):
bars.append(ax.bar(xs + i * w, s, width=w, color=color())[0])
ax.set_xlabel('sample #')
ax.set_ylabel('seconds')
ax.legend(bars, sys.argv[1:])
pyplot.show()
|
import sys, pickle
from matplotlib import pyplot
import numpy
from compare import select
def main():
fig = pyplot.figure()
ax = fig.add_subplot(111)
data = []
for fname in sys.argv[1:]:
fname, bench, param, stat = fname.split(',')
stats, samples = select(
pickle.load(file(fname)), bench, param, stat)
data.append(samples)
if data:
assert len(samples) == len(data[0])
bars = []
color = iter('rgbcmy').next
w = 1.0 / len(data)
xs = numpy.arange(len(data[0]))
for i, s in enumerate(data):
bars.append(ax.bar(xs + i * w, s, width=w, color=color())[0])
ax.set_xlabel('sample #')
ax.set_ylabel('seconds')
ax.legend(bars, sys.argv[1:])
pyplot.show()
| Make the bench/param/stat a commandline parameter | Make the bench/param/stat a commandline parameter
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6150 e27351fd-9f3e-4f54-a53b-843176b1656c
| Python | apache-2.0 | trevor/calendarserver,trevor/calendarserver,trevor/calendarserver |
import sys, pickle
from matplotlib import pyplot
import numpy
from compare import select
def main():
fig = pyplot.figure()
ax = fig.add_subplot(111)
data = []
for fname in sys.argv[1:]:
+ fname, bench, param, stat = fname.split(',')
stats, samples = select(
- pickle.load(file(fname)), 'vfreebusy', 1, 'urlopen time')
+ pickle.load(file(fname)), bench, param, stat)
data.append(samples)
if data:
assert len(samples) == len(data[0])
bars = []
color = iter('rgbcmy').next
w = 1.0 / len(data)
xs = numpy.arange(len(data[0]))
for i, s in enumerate(data):
bars.append(ax.bar(xs + i * w, s, width=w, color=color())[0])
ax.set_xlabel('sample #')
ax.set_ylabel('seconds')
ax.legend(bars, sys.argv[1:])
pyplot.show()
| Make the bench/param/stat a commandline parameter | ## Code Before:
import sys, pickle
from matplotlib import pyplot
import numpy
from compare import select
def main():
fig = pyplot.figure()
ax = fig.add_subplot(111)
data = []
for fname in sys.argv[1:]:
stats, samples = select(
pickle.load(file(fname)), 'vfreebusy', 1, 'urlopen time')
data.append(samples)
if data:
assert len(samples) == len(data[0])
bars = []
color = iter('rgbcmy').next
w = 1.0 / len(data)
xs = numpy.arange(len(data[0]))
for i, s in enumerate(data):
bars.append(ax.bar(xs + i * w, s, width=w, color=color())[0])
ax.set_xlabel('sample #')
ax.set_ylabel('seconds')
ax.legend(bars, sys.argv[1:])
pyplot.show()
## Instruction:
Make the bench/param/stat a commandline parameter
## Code After:
import sys, pickle
from matplotlib import pyplot
import numpy
from compare import select
def main():
fig = pyplot.figure()
ax = fig.add_subplot(111)
data = []
for fname in sys.argv[1:]:
fname, bench, param, stat = fname.split(',')
stats, samples = select(
pickle.load(file(fname)), bench, param, stat)
data.append(samples)
if data:
assert len(samples) == len(data[0])
bars = []
color = iter('rgbcmy').next
w = 1.0 / len(data)
xs = numpy.arange(len(data[0]))
for i, s in enumerate(data):
bars.append(ax.bar(xs + i * w, s, width=w, color=color())[0])
ax.set_xlabel('sample #')
ax.set_ylabel('seconds')
ax.legend(bars, sys.argv[1:])
pyplot.show()
|
import sys, pickle
from matplotlib import pyplot
import numpy
from compare import select
def main():
fig = pyplot.figure()
ax = fig.add_subplot(111)
data = []
for fname in sys.argv[1:]:
+ fname, bench, param, stat = fname.split(',')
stats, samples = select(
- pickle.load(file(fname)), 'vfreebusy', 1, 'urlopen time')
+ pickle.load(file(fname)), bench, param, stat)
data.append(samples)
if data:
assert len(samples) == len(data[0])
bars = []
color = iter('rgbcmy').next
w = 1.0 / len(data)
xs = numpy.arange(len(data[0]))
for i, s in enumerate(data):
bars.append(ax.bar(xs + i * w, s, width=w, color=color())[0])
ax.set_xlabel('sample #')
ax.set_ylabel('seconds')
ax.legend(bars, sys.argv[1:])
pyplot.show() |
ebb0916a7c63c1aaf383c696c203199ca79f70ac | nereid/backend.py | nereid/backend.py | '''
nereid.backend
Backed - Tryton specific features
:copyright: (c) 2010-2012 by Openlabs Technologies & Consulting (P) Ltd.
:license: GPLv3, see LICENSE for more details
'''
class TransactionManager(object):
def __init__(self, database_name, user, context=None):
self.database_name = database_name
self.user = user
self.context = context if context is not None else {}
def __enter__(self):
from trytond.transaction import Transaction
Transaction().start(self.database_name, self.user, self.context.copy())
return Transaction()
def __exit__(self, type, value, traceback):
from trytond.transaction import Transaction
Transaction().stop()
| '''
nereid.backend
Backed - Tryton specific features
:copyright: (c) 2010-2012 by Openlabs Technologies & Consulting (P) Ltd.
:license: GPLv3, see LICENSE for more details
'''
class TransactionManager(object):
def __init__(self, database_name, user, context=None):
self.database_name = database_name
self.user = user
self.context = context if context is not None else {}
def __enter__(self):
from trytond.transaction import Transaction
Transaction().start(
self.database_name, self.user,
readonly=False, context=self.context.copy()
)
return Transaction()
def __exit__(self, type, value, traceback):
from trytond.transaction import Transaction
Transaction().stop()
| Change the way transaction is initiated as readonly support was introduced in version 2.4 | Change the way transaction is initiated as readonly support was introduced in version 2.4
| Python | bsd-3-clause | riteshshrv/nereid,usudaysingh/nereid,usudaysingh/nereid,riteshshrv/nereid,fulfilio/nereid,fulfilio/nereid,prakashpp/nereid,prakashpp/nereid | '''
nereid.backend
Backed - Tryton specific features
:copyright: (c) 2010-2012 by Openlabs Technologies & Consulting (P) Ltd.
:license: GPLv3, see LICENSE for more details
'''
class TransactionManager(object):
def __init__(self, database_name, user, context=None):
self.database_name = database_name
self.user = user
self.context = context if context is not None else {}
def __enter__(self):
from trytond.transaction import Transaction
- Transaction().start(self.database_name, self.user, self.context.copy())
+ Transaction().start(
+ self.database_name, self.user,
+ readonly=False, context=self.context.copy()
+ )
return Transaction()
def __exit__(self, type, value, traceback):
from trytond.transaction import Transaction
Transaction().stop()
| Change the way transaction is initiated as readonly support was introduced in version 2.4 | ## Code Before:
'''
nereid.backend
Backed - Tryton specific features
:copyright: (c) 2010-2012 by Openlabs Technologies & Consulting (P) Ltd.
:license: GPLv3, see LICENSE for more details
'''
class TransactionManager(object):
def __init__(self, database_name, user, context=None):
self.database_name = database_name
self.user = user
self.context = context if context is not None else {}
def __enter__(self):
from trytond.transaction import Transaction
Transaction().start(self.database_name, self.user, self.context.copy())
return Transaction()
def __exit__(self, type, value, traceback):
from trytond.transaction import Transaction
Transaction().stop()
## Instruction:
Change the way transaction is initiated as readonly support was introduced in version 2.4
## Code After:
'''
nereid.backend
Backed - Tryton specific features
:copyright: (c) 2010-2012 by Openlabs Technologies & Consulting (P) Ltd.
:license: GPLv3, see LICENSE for more details
'''
class TransactionManager(object):
def __init__(self, database_name, user, context=None):
self.database_name = database_name
self.user = user
self.context = context if context is not None else {}
def __enter__(self):
from trytond.transaction import Transaction
Transaction().start(
self.database_name, self.user,
readonly=False, context=self.context.copy()
)
return Transaction()
def __exit__(self, type, value, traceback):
from trytond.transaction import Transaction
Transaction().stop()
| '''
nereid.backend
Backed - Tryton specific features
:copyright: (c) 2010-2012 by Openlabs Technologies & Consulting (P) Ltd.
:license: GPLv3, see LICENSE for more details
'''
class TransactionManager(object):
def __init__(self, database_name, user, context=None):
self.database_name = database_name
self.user = user
self.context = context if context is not None else {}
def __enter__(self):
from trytond.transaction import Transaction
- Transaction().start(self.database_name, self.user, self.context.copy())
+ Transaction().start(
+ self.database_name, self.user,
+ readonly=False, context=self.context.copy()
+ )
return Transaction()
def __exit__(self, type, value, traceback):
from trytond.transaction import Transaction
Transaction().stop() |
e9f2a3c29185466f1c92121e9f4e4b727fb20fd0 | scripts/rename_tutorial_src_files.py | scripts/rename_tutorial_src_files.py | from pathlib import Path, PurePath
from string import digits
directory = Path("./docs/tutorial/src")
dirs = sorted([Path(f) for f in directory.iterdir()])
d: PurePath
sufix = "__out__"
for d in dirs:
if d.name.endswith(sufix):
continue
output_dir_name = d.name + "__out__"
output_directory = directory / output_dir_name
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in d.iterdir()])
for i, f in enumerate(files):
index = str(i + 1).zfill(3)
new_name = output_directory / f"tutorial{index}.py"
print(new_name)
f.rename(new_name)
for d in dirs:
current_dir = Path(str(d) + sufix)
print(current_dir)
current_dir.rename(d)
#%%
| from pathlib import Path, PurePath
from string import digits
directory = Path("./docs/tutorial/src")
skip_names = {"bigger_applications"}
skip_dirs = {directory / name for name in skip_names}
dirs = sorted([Path(f) for f in directory.iterdir() if f not in skip_dirs])
d: PurePath
sufix = "__out__"
for d in dirs:
if d.name.endswith(sufix):
continue
output_dir_name = d.name + "__out__"
output_directory = directory / output_dir_name
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in d.iterdir()])
f: PurePath
for i, f in enumerate(files):
index = str(i + 1).zfill(3)
if f.name != "__init__.py" and f.name.endswith(".py"):
new_name = output_directory / f"tutorial{index}.py"
else:
new_name = output_directory / f.name
print(new_name)
f.rename(new_name)
for d in dirs:
current_dir = Path(str(d) + sufix)
print(current_dir)
current_dir.rename(d)
#%%
| Update tutorial renamer to exclude files | :sparkles: Update tutorial renamer to exclude files
| Python | mit | tiangolo/fastapi,tiangolo/fastapi,tiangolo/fastapi | from pathlib import Path, PurePath
from string import digits
directory = Path("./docs/tutorial/src")
+ skip_names = {"bigger_applications"}
+ skip_dirs = {directory / name for name in skip_names}
- dirs = sorted([Path(f) for f in directory.iterdir()])
+ dirs = sorted([Path(f) for f in directory.iterdir() if f not in skip_dirs])
d: PurePath
sufix = "__out__"
for d in dirs:
if d.name.endswith(sufix):
continue
output_dir_name = d.name + "__out__"
output_directory = directory / output_dir_name
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in d.iterdir()])
+ f: PurePath
for i, f in enumerate(files):
index = str(i + 1).zfill(3)
+ if f.name != "__init__.py" and f.name.endswith(".py"):
- new_name = output_directory / f"tutorial{index}.py"
+ new_name = output_directory / f"tutorial{index}.py"
+ else:
+ new_name = output_directory / f.name
print(new_name)
f.rename(new_name)
for d in dirs:
current_dir = Path(str(d) + sufix)
print(current_dir)
current_dir.rename(d)
#%%
| Update tutorial renamer to exclude files | ## Code Before:
from pathlib import Path, PurePath
from string import digits
directory = Path("./docs/tutorial/src")
dirs = sorted([Path(f) for f in directory.iterdir()])
d: PurePath
sufix = "__out__"
for d in dirs:
if d.name.endswith(sufix):
continue
output_dir_name = d.name + "__out__"
output_directory = directory / output_dir_name
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in d.iterdir()])
for i, f in enumerate(files):
index = str(i + 1).zfill(3)
new_name = output_directory / f"tutorial{index}.py"
print(new_name)
f.rename(new_name)
for d in dirs:
current_dir = Path(str(d) + sufix)
print(current_dir)
current_dir.rename(d)
#%%
## Instruction:
Update tutorial renamer to exclude files
## Code After:
from pathlib import Path, PurePath
from string import digits
directory = Path("./docs/tutorial/src")
skip_names = {"bigger_applications"}
skip_dirs = {directory / name for name in skip_names}
dirs = sorted([Path(f) for f in directory.iterdir() if f not in skip_dirs])
d: PurePath
sufix = "__out__"
for d in dirs:
if d.name.endswith(sufix):
continue
output_dir_name = d.name + "__out__"
output_directory = directory / output_dir_name
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in d.iterdir()])
f: PurePath
for i, f in enumerate(files):
index = str(i + 1).zfill(3)
if f.name != "__init__.py" and f.name.endswith(".py"):
new_name = output_directory / f"tutorial{index}.py"
else:
new_name = output_directory / f.name
print(new_name)
f.rename(new_name)
for d in dirs:
current_dir = Path(str(d) + sufix)
print(current_dir)
current_dir.rename(d)
#%%
| from pathlib import Path, PurePath
from string import digits
directory = Path("./docs/tutorial/src")
+ skip_names = {"bigger_applications"}
+ skip_dirs = {directory / name for name in skip_names}
- dirs = sorted([Path(f) for f in directory.iterdir()])
+ dirs = sorted([Path(f) for f in directory.iterdir() if f not in skip_dirs])
? ++++++++++++++++++++++
d: PurePath
sufix = "__out__"
for d in dirs:
if d.name.endswith(sufix):
continue
output_dir_name = d.name + "__out__"
output_directory = directory / output_dir_name
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in d.iterdir()])
+ f: PurePath
for i, f in enumerate(files):
index = str(i + 1).zfill(3)
+ if f.name != "__init__.py" and f.name.endswith(".py"):
- new_name = output_directory / f"tutorial{index}.py"
+ new_name = output_directory / f"tutorial{index}.py"
? ++++
+ else:
+ new_name = output_directory / f.name
print(new_name)
f.rename(new_name)
for d in dirs:
current_dir = Path(str(d) + sufix)
print(current_dir)
current_dir.rename(d)
#%% |
7c847513155b1bdc29c04a10dbfd2efd669d1507 | async/spam_echo_clients.py | async/spam_echo_clients.py | import socket
import sys
import time
SERVER_HOST = 'localhost'
SERVER_PORT = 40404
sockets = []
msg = b'first message'
for i in range(20):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((SERVER_HOST, SERVER_PORT))
sockets.append(sock)
time.sleep(0.1)
for sock in sockets:
sock.send(msg)
time.sleep(0.1)
for sock in sockets:
sock.close()
time.sleep(0.1)
| import socket
import sys
import time
SERVER_HOST = 'localhost'
SERVER_PORT = 40404
sockets = []
msg = b'first message'
for i in range(20):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((SERVER_HOST, SERVER_PORT))
sockets.append(sock)
time.sleep(0.1)
for sock in sockets:
sock.send(msg)
time.sleep(0.1)
for sock in sockets:
data = sock.recv(1024)
if data != msg:
print('Error! No reply to', sock.getsockname())
time.sleep(0.1)
for sock in sockets:
sock.close()
time.sleep(0.1)
| Add reply checks to the spam client too | Add reply checks to the spam client too
| Python | unlicense | eliben/python3-samples | import socket
import sys
import time
SERVER_HOST = 'localhost'
SERVER_PORT = 40404
sockets = []
msg = b'first message'
for i in range(20):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((SERVER_HOST, SERVER_PORT))
sockets.append(sock)
time.sleep(0.1)
for sock in sockets:
sock.send(msg)
time.sleep(0.1)
for sock in sockets:
+ data = sock.recv(1024)
+ if data != msg:
+ print('Error! No reply to', sock.getsockname())
+ time.sleep(0.1)
+
+ for sock in sockets:
sock.close()
time.sleep(0.1)
| Add reply checks to the spam client too | ## Code Before:
import socket
import sys
import time
SERVER_HOST = 'localhost'
SERVER_PORT = 40404
sockets = []
msg = b'first message'
for i in range(20):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((SERVER_HOST, SERVER_PORT))
sockets.append(sock)
time.sleep(0.1)
for sock in sockets:
sock.send(msg)
time.sleep(0.1)
for sock in sockets:
sock.close()
time.sleep(0.1)
## Instruction:
Add reply checks to the spam client too
## Code After:
import socket
import sys
import time
SERVER_HOST = 'localhost'
SERVER_PORT = 40404
sockets = []
msg = b'first message'
for i in range(20):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((SERVER_HOST, SERVER_PORT))
sockets.append(sock)
time.sleep(0.1)
for sock in sockets:
sock.send(msg)
time.sleep(0.1)
for sock in sockets:
data = sock.recv(1024)
if data != msg:
print('Error! No reply to', sock.getsockname())
time.sleep(0.1)
for sock in sockets:
sock.close()
time.sleep(0.1)
| import socket
import sys
import time
SERVER_HOST = 'localhost'
SERVER_PORT = 40404
sockets = []
msg = b'first message'
for i in range(20):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((SERVER_HOST, SERVER_PORT))
sockets.append(sock)
time.sleep(0.1)
for sock in sockets:
sock.send(msg)
time.sleep(0.1)
for sock in sockets:
+ data = sock.recv(1024)
+ if data != msg:
+ print('Error! No reply to', sock.getsockname())
+ time.sleep(0.1)
+
+ for sock in sockets:
sock.close()
time.sleep(0.1)
|
45c4c1f627f224f36c24acebbec43a17a5c59fcb | nib/plugins/lesscss.py | nib/plugins/lesscss.py | from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
import sh
from nib import Processor, resource
@resource('.less')
class LessCSSProcessor(Processor):
def resource(self, resource):
filepath = path.join(self.options['resource_path'],
resource.path + resource.extension)
resource.content = bytearray(str(sh.lessc(filepath)), 'utf-8')
resource.extension = '.css'
return resource
| from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
import sh
from nib import Processor, resource
@resource('.less')
class LessCSSProcessor(Processor):
def resource(self, resource):
filepath = path.join(self.options['resource_path'],
resource.path + resource.extension)
print("Processing: ", filepath)
resource.content = bytearray(str(sh.lessc(filepath)), 'utf-8')
resource.extension = '.css'
return resource
| Print out file being processed, need to do to other modules, add -v flag | Print out file being processed, need to do to other modules, add -v flag
| Python | mit | jreese/nib | from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
import sh
from nib import Processor, resource
@resource('.less')
class LessCSSProcessor(Processor):
def resource(self, resource):
filepath = path.join(self.options['resource_path'],
resource.path + resource.extension)
+ print("Processing: ", filepath)
resource.content = bytearray(str(sh.lessc(filepath)), 'utf-8')
resource.extension = '.css'
return resource
| Print out file being processed, need to do to other modules, add -v flag | ## Code Before:
from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
import sh
from nib import Processor, resource
@resource('.less')
class LessCSSProcessor(Processor):
def resource(self, resource):
filepath = path.join(self.options['resource_path'],
resource.path + resource.extension)
resource.content = bytearray(str(sh.lessc(filepath)), 'utf-8')
resource.extension = '.css'
return resource
## Instruction:
Print out file being processed, need to do to other modules, add -v flag
## Code After:
from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
import sh
from nib import Processor, resource
@resource('.less')
class LessCSSProcessor(Processor):
def resource(self, resource):
filepath = path.join(self.options['resource_path'],
resource.path + resource.extension)
print("Processing: ", filepath)
resource.content = bytearray(str(sh.lessc(filepath)), 'utf-8')
resource.extension = '.css'
return resource
| from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
import sh
from nib import Processor, resource
@resource('.less')
class LessCSSProcessor(Processor):
def resource(self, resource):
filepath = path.join(self.options['resource_path'],
resource.path + resource.extension)
+ print("Processing: ", filepath)
resource.content = bytearray(str(sh.lessc(filepath)), 'utf-8')
resource.extension = '.css'
return resource |
3f7a9d900a1f2cd2f5522735815c999040a920e0 | pajbot/web/routes/api/users.py | pajbot/web/routes/api/users.py | from flask_restful import Resource
from pajbot.managers.redis import RedisManager
from pajbot.managers.user import UserManager
from pajbot.streamhelper import StreamHelper
class APIUser(Resource):
@staticmethod
def get(username):
user = UserManager.find_static(username)
if not user:
return {"error": "Not found"}, 404
redis = RedisManager.get()
key = "{streamer}:users:num_lines".format(streamer=StreamHelper.get_streamer())
rank = redis.zrevrank(key, user.username)
if rank is None:
rank = redis.zcard(key)
else:
rank = rank + 1
return user.jsonify()
def init(api):
api.add_resource(APIUser, "/users/<username>")
| from flask_restful import Resource
from pajbot.managers.redis import RedisManager
from pajbot.managers.user import UserManager
from pajbot.streamhelper import StreamHelper
class APIUser(Resource):
@staticmethod
def get(username):
user = UserManager.find_static(username)
if not user:
return {"error": "Not found"}, 404
return user.jsonify()
def init(api):
api.add_resource(APIUser, "/users/<username>")
| Remove dead code in get user API endpoint | Remove dead code in get user API endpoint
| Python | mit | pajlada/tyggbot,pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/pajbot,pajlada/tyggbot | from flask_restful import Resource
from pajbot.managers.redis import RedisManager
from pajbot.managers.user import UserManager
from pajbot.streamhelper import StreamHelper
class APIUser(Resource):
@staticmethod
def get(username):
user = UserManager.find_static(username)
if not user:
return {"error": "Not found"}, 404
- redis = RedisManager.get()
- key = "{streamer}:users:num_lines".format(streamer=StreamHelper.get_streamer())
- rank = redis.zrevrank(key, user.username)
- if rank is None:
- rank = redis.zcard(key)
- else:
- rank = rank + 1
-
return user.jsonify()
def init(api):
api.add_resource(APIUser, "/users/<username>")
| Remove dead code in get user API endpoint | ## Code Before:
from flask_restful import Resource
from pajbot.managers.redis import RedisManager
from pajbot.managers.user import UserManager
from pajbot.streamhelper import StreamHelper
class APIUser(Resource):
@staticmethod
def get(username):
user = UserManager.find_static(username)
if not user:
return {"error": "Not found"}, 404
redis = RedisManager.get()
key = "{streamer}:users:num_lines".format(streamer=StreamHelper.get_streamer())
rank = redis.zrevrank(key, user.username)
if rank is None:
rank = redis.zcard(key)
else:
rank = rank + 1
return user.jsonify()
def init(api):
api.add_resource(APIUser, "/users/<username>")
## Instruction:
Remove dead code in get user API endpoint
## Code After:
from flask_restful import Resource
from pajbot.managers.redis import RedisManager
from pajbot.managers.user import UserManager
from pajbot.streamhelper import StreamHelper
class APIUser(Resource):
@staticmethod
def get(username):
user = UserManager.find_static(username)
if not user:
return {"error": "Not found"}, 404
return user.jsonify()
def init(api):
api.add_resource(APIUser, "/users/<username>")
| from flask_restful import Resource
from pajbot.managers.redis import RedisManager
from pajbot.managers.user import UserManager
from pajbot.streamhelper import StreamHelper
class APIUser(Resource):
@staticmethod
def get(username):
user = UserManager.find_static(username)
if not user:
return {"error": "Not found"}, 404
- redis = RedisManager.get()
- key = "{streamer}:users:num_lines".format(streamer=StreamHelper.get_streamer())
- rank = redis.zrevrank(key, user.username)
- if rank is None:
- rank = redis.zcard(key)
- else:
- rank = rank + 1
-
return user.jsonify()
def init(api):
api.add_resource(APIUser, "/users/<username>") |
7cf37b966049cfc47ef200ad8ae69763d98185c5 | collector/description/normal/L2.py | collector/description/normal/L2.py | from __future__ import absolute_import
import math, utilities.operator
from ...weight import WeightDict, normalize_exp
from .L1 import phase_description
# Normalised distances and L2-normalised (Euclidean norm) collector sets
collector_weights = \
WeightDict(normalize_exp, (utilities.operator.square, math.sqrt),
tags=('normalized'))
| from __future__ import absolute_import
import math, utilities.operator
from ...weight import WeightDict, normalize_exp
from .L1 import descriptions
# Normalised distances and L2-normalised (Euclidean norm) collector sets
weights = \
WeightDict(normalize_exp, (utilities.operator.square, math.sqrt),
tags=('normalized'))
| Update secondary collector description module | Update secondary collector description module
| Python | mit | davidfoerster/schema-matching | from __future__ import absolute_import
import math, utilities.operator
from ...weight import WeightDict, normalize_exp
- from .L1 import phase_description
+ from .L1 import descriptions
# Normalised distances and L2-normalised (Euclidean norm) collector sets
- collector_weights = \
+ weights = \
WeightDict(normalize_exp, (utilities.operator.square, math.sqrt),
tags=('normalized'))
| Update secondary collector description module | ## Code Before:
from __future__ import absolute_import
import math, utilities.operator
from ...weight import WeightDict, normalize_exp
from .L1 import phase_description
# Normalised distances and L2-normalised (Euclidean norm) collector sets
collector_weights = \
WeightDict(normalize_exp, (utilities.operator.square, math.sqrt),
tags=('normalized'))
## Instruction:
Update secondary collector description module
## Code After:
from __future__ import absolute_import
import math, utilities.operator
from ...weight import WeightDict, normalize_exp
from .L1 import descriptions
# Normalised distances and L2-normalised (Euclidean norm) collector sets
weights = \
WeightDict(normalize_exp, (utilities.operator.square, math.sqrt),
tags=('normalized'))
| from __future__ import absolute_import
import math, utilities.operator
from ...weight import WeightDict, normalize_exp
- from .L1 import phase_description
? ------
+ from .L1 import descriptions
? +
# Normalised distances and L2-normalised (Euclidean norm) collector sets
- collector_weights = \
+ weights = \
WeightDict(normalize_exp, (utilities.operator.square, math.sqrt),
tags=('normalized')) |
2b479927ee33181c57081df941bfdf347cd45423 | test/test_serenata_de_amor.py | test/test_serenata_de_amor.py | from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def test_it_works(self):
self.assertEqual(4, 2 + 2)
self.assertNotEqual(5, 2 + 2)
| import glob
import subprocess
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def setUp(self):
self.notebook_files = glob.glob('develop/*.ipynb')
def test_html_versions_present(self):
"""There is a *.html version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.html')
for filename in self.notebook_files]
html_files = glob.glob('develop/*.html')
self.assertEqual(expected, html_files)
def test_py_versions_present(self):
"""There is a *.py version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.py')
for filename in self.notebook_files]
py_files = glob.glob('develop/*.py')
self.assertEqual(expected, py_files)
| Verify existence of *.html and *.py versions for every notebook | Verify existence of *.html and *.py versions for every notebook
| Python | mit | marcusrehm/serenata-de-amor,datasciencebr/serenata-de-amor,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,datasciencebr/serenata-de-amor | + import glob
+ import subprocess
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
+ def setUp(self):
+ self.notebook_files = glob.glob('develop/*.ipynb')
- def test_it_works(self):
- self.assertEqual(4, 2 + 2)
- self.assertNotEqual(5, 2 + 2)
+ def test_html_versions_present(self):
+ """There is a *.html version of every Jupyter notebook."""
+ expected = [filename.replace('.ipynb', '.html')
+ for filename in self.notebook_files]
+ html_files = glob.glob('develop/*.html')
+ self.assertEqual(expected, html_files)
+
+ def test_py_versions_present(self):
+ """There is a *.py version of every Jupyter notebook."""
+ expected = [filename.replace('.ipynb', '.py')
+ for filename in self.notebook_files]
+ py_files = glob.glob('develop/*.py')
+ self.assertEqual(expected, py_files)
+ | Verify existence of *.html and *.py versions for every notebook | ## Code Before:
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def test_it_works(self):
self.assertEqual(4, 2 + 2)
self.assertNotEqual(5, 2 + 2)
## Instruction:
Verify existence of *.html and *.py versions for every notebook
## Code After:
import glob
import subprocess
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def setUp(self):
self.notebook_files = glob.glob('develop/*.ipynb')
def test_html_versions_present(self):
"""There is a *.html version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.html')
for filename in self.notebook_files]
html_files = glob.glob('develop/*.html')
self.assertEqual(expected, html_files)
def test_py_versions_present(self):
"""There is a *.py version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.py')
for filename in self.notebook_files]
py_files = glob.glob('develop/*.py')
self.assertEqual(expected, py_files)
| + import glob
+ import subprocess
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
- def test_it_works(self):
- self.assertEqual(4, 2 + 2)
- self.assertNotEqual(5, 2 + 2)
+ def setUp(self):
+ self.notebook_files = glob.glob('develop/*.ipynb')
+
+ def test_html_versions_present(self):
+ """There is a *.html version of every Jupyter notebook."""
+ expected = [filename.replace('.ipynb', '.html')
+ for filename in self.notebook_files]
+ html_files = glob.glob('develop/*.html')
+ self.assertEqual(expected, html_files)
+
+ def test_py_versions_present(self):
+ """There is a *.py version of every Jupyter notebook."""
+ expected = [filename.replace('.ipynb', '.py')
+ for filename in self.notebook_files]
+ py_files = glob.glob('develop/*.py')
+ self.assertEqual(expected, py_files) |
27723696885319aabea974f83189d3a43770b7d5 | spillway/fields.py | spillway/fields.py | """Serializer fields"""
from django.contrib.gis import forms
from rest_framework.fields import WritableField
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
| """Serializer fields"""
from django.contrib.gis import forms
from rest_framework.fields import FileField, WritableField
from greenwich.raster import Raster
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
class NDArrayField(FileField):
type_name = 'NDArrayField'
type_label = 'ndarray'
def to_native(self, value):
params = self.context.get('params', {})
geom = params.get('g')
with Raster(getattr(value, 'path', value)) as r:
arr = r.clip(geom).masked_array() if geom else r.array()
return arr.tolist()
| Add numpy array serializer field | Add numpy array serializer field
| Python | bsd-3-clause | bkg/django-spillway,barseghyanartur/django-spillway,kuzmich/django-spillway | """Serializer fields"""
from django.contrib.gis import forms
- from rest_framework.fields import WritableField
+ from rest_framework.fields import FileField, WritableField
+ from greenwich.raster import Raster
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
+
+ class NDArrayField(FileField):
+ type_name = 'NDArrayField'
+ type_label = 'ndarray'
+
+ def to_native(self, value):
+ params = self.context.get('params', {})
+ geom = params.get('g')
+ with Raster(getattr(value, 'path', value)) as r:
+ arr = r.clip(geom).masked_array() if geom else r.array()
+ return arr.tolist()
+ | Add numpy array serializer field | ## Code Before:
"""Serializer fields"""
from django.contrib.gis import forms
from rest_framework.fields import WritableField
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
## Instruction:
Add numpy array serializer field
## Code After:
"""Serializer fields"""
from django.contrib.gis import forms
from rest_framework.fields import FileField, WritableField
from greenwich.raster import Raster
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
class NDArrayField(FileField):
type_name = 'NDArrayField'
type_label = 'ndarray'
def to_native(self, value):
params = self.context.get('params', {})
geom = params.get('g')
with Raster(getattr(value, 'path', value)) as r:
arr = r.clip(geom).masked_array() if geom else r.array()
return arr.tolist()
| """Serializer fields"""
from django.contrib.gis import forms
- from rest_framework.fields import WritableField
+ from rest_framework.fields import FileField, WritableField
? +++++++++++
+ from greenwich.raster import Raster
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
+
+
+ class NDArrayField(FileField):
+ type_name = 'NDArrayField'
+ type_label = 'ndarray'
+
+ def to_native(self, value):
+ params = self.context.get('params', {})
+ geom = params.get('g')
+ with Raster(getattr(value, 'path', value)) as r:
+ arr = r.clip(geom).masked_array() if geom else r.array()
+ return arr.tolist() |
c5609fe1b48cdd5740215c1d0783eaafdfe2e76b | listen/__init__.py | listen/__init__.py |
from __future__ import print_function # This API requires Python 2.7 or more recent
import sys
if sys.version < "2.7.0":
print("listen requires Python 2.7 or more recent")
sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
__version__ = "0.1.0"
|
# Prepare for deprication of versions < 2.7
#from __future__ import print_function # This API requires Python 2.7 or more recent
#import sys
#if sys.version < "2.7.0":
# print("listen requires Python 2.7 or more recent")
# sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
__version__ = "0.1.1"
| Remove requirement on python > 2.7 | Remove requirement on python > 2.7
| Python | mit | antevens/listen,antevens/listen |
+ # Prepare for deprication of versions < 2.7
- from __future__ import print_function # This API requires Python 2.7 or more recent
+ #from __future__ import print_function # This API requires Python 2.7 or more recent
- import sys
+ #import sys
- if sys.version < "2.7.0":
+ #if sys.version < "2.7.0":
- print("listen requires Python 2.7 or more recent")
+ # print("listen requires Python 2.7 or more recent")
- sys.exit(1)
+ # sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
- __version__ = "0.1.0"
+ __version__ = "0.1.1"
| Remove requirement on python > 2.7 | ## Code Before:
from __future__ import print_function # This API requires Python 2.7 or more recent
import sys
if sys.version < "2.7.0":
print("listen requires Python 2.7 or more recent")
sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
__version__ = "0.1.0"
## Instruction:
Remove requirement on python > 2.7
## Code After:
# Prepare for deprication of versions < 2.7
#from __future__ import print_function # This API requires Python 2.7 or more recent
#import sys
#if sys.version < "2.7.0":
# print("listen requires Python 2.7 or more recent")
# sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
__version__ = "0.1.1"
|
+ # Prepare for deprication of versions < 2.7
- from __future__ import print_function # This API requires Python 2.7 or more recent
+ #from __future__ import print_function # This API requires Python 2.7 or more recent
? +
- import sys
+ #import sys
? +
- if sys.version < "2.7.0":
+ #if sys.version < "2.7.0":
? +
- print("listen requires Python 2.7 or more recent")
+ # print("listen requires Python 2.7 or more recent")
? +
- sys.exit(1)
+ # sys.exit(1)
? +
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
- __version__ = "0.1.0"
? ^
+ __version__ = "0.1.1"
? ^
|
e1e430f74902d653e9c46878a8f254f8feb478ca | example/article/models.py | example/article/models.py | from django.core.urlresolvers import reverse
from django.db import models
from fluent_comments.moderation import moderate_model, comments_are_open, comments_are_moderated
from fluent_comments.models import get_comments_for_model, CommentsRelation
class Article(models.Model):
title = models.CharField("Title", max_length=200)
slug = models.SlugField("Slug", unique=True)
content = models.TextField("Content")
publication_date = models.DateTimeField("Publication date")
enable_comments = models.BooleanField("Enable comments", default=True)
# Optional reverse relation, allow ORM querying:
comments_set = CommentsRelation()
class Meta:
verbose_name = "Article"
verbose_name_plural = "Articles"
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('article-details', kwargs={'slug': self.slug})
# Optional, give direct access to moderation info via the model:
comments = property(get_comments_for_model)
comments_are_open = property(comments_are_open)
comments_are_moderated = property(comments_are_moderated)
# Give the generic app support for moderation by django-fluent-comments:
moderate_model(
Article,
publication_date_field='publication_date',
enable_comments_field='enable_comments'
)
| from django.core.urlresolvers import reverse
from django.db import models
from django.utils.six import python_2_unicode_compatible
from fluent_comments.moderation import moderate_model, comments_are_open, comments_are_moderated
from fluent_comments.models import get_comments_for_model, CommentsRelation
@python_2_unicode_compatible
class Article(models.Model):
title = models.CharField("Title", max_length=200)
slug = models.SlugField("Slug", unique=True)
content = models.TextField("Content")
publication_date = models.DateTimeField("Publication date")
enable_comments = models.BooleanField("Enable comments", default=True)
# Optional reverse relation, allow ORM querying:
comments_set = CommentsRelation()
class Meta:
verbose_name = "Article"
verbose_name_plural = "Articles"
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('article-details', kwargs={'slug': self.slug})
# Optional, give direct access to moderation info via the model:
comments = property(get_comments_for_model)
comments_are_open = property(comments_are_open)
comments_are_moderated = property(comments_are_moderated)
# Give the generic app support for moderation by django-fluent-comments:
moderate_model(
Article,
publication_date_field='publication_date',
enable_comments_field='enable_comments'
)
| Fix example Article.__str__ in Python 3 | Fix example Article.__str__ in Python 3
| Python | apache-2.0 | django-fluent/django-fluent-comments,django-fluent/django-fluent-comments,edoburu/django-fluent-comments,edoburu/django-fluent-comments,django-fluent/django-fluent-comments,django-fluent/django-fluent-comments,edoburu/django-fluent-comments | from django.core.urlresolvers import reverse
from django.db import models
+ from django.utils.six import python_2_unicode_compatible
+
from fluent_comments.moderation import moderate_model, comments_are_open, comments_are_moderated
from fluent_comments.models import get_comments_for_model, CommentsRelation
+ @python_2_unicode_compatible
class Article(models.Model):
title = models.CharField("Title", max_length=200)
slug = models.SlugField("Slug", unique=True)
content = models.TextField("Content")
publication_date = models.DateTimeField("Publication date")
enable_comments = models.BooleanField("Enable comments", default=True)
# Optional reverse relation, allow ORM querying:
comments_set = CommentsRelation()
class Meta:
verbose_name = "Article"
verbose_name_plural = "Articles"
- def __unicode__(self):
+ def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('article-details', kwargs={'slug': self.slug})
# Optional, give direct access to moderation info via the model:
comments = property(get_comments_for_model)
comments_are_open = property(comments_are_open)
comments_are_moderated = property(comments_are_moderated)
# Give the generic app support for moderation by django-fluent-comments:
moderate_model(
Article,
publication_date_field='publication_date',
enable_comments_field='enable_comments'
)
| Fix example Article.__str__ in Python 3 | ## Code Before:
from django.core.urlresolvers import reverse
from django.db import models
from fluent_comments.moderation import moderate_model, comments_are_open, comments_are_moderated
from fluent_comments.models import get_comments_for_model, CommentsRelation
class Article(models.Model):
title = models.CharField("Title", max_length=200)
slug = models.SlugField("Slug", unique=True)
content = models.TextField("Content")
publication_date = models.DateTimeField("Publication date")
enable_comments = models.BooleanField("Enable comments", default=True)
# Optional reverse relation, allow ORM querying:
comments_set = CommentsRelation()
class Meta:
verbose_name = "Article"
verbose_name_plural = "Articles"
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('article-details', kwargs={'slug': self.slug})
# Optional, give direct access to moderation info via the model:
comments = property(get_comments_for_model)
comments_are_open = property(comments_are_open)
comments_are_moderated = property(comments_are_moderated)
# Give the generic app support for moderation by django-fluent-comments:
moderate_model(
Article,
publication_date_field='publication_date',
enable_comments_field='enable_comments'
)
## Instruction:
Fix example Article.__str__ in Python 3
## Code After:
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.six import python_2_unicode_compatible
from fluent_comments.moderation import moderate_model, comments_are_open, comments_are_moderated
from fluent_comments.models import get_comments_for_model, CommentsRelation
@python_2_unicode_compatible
class Article(models.Model):
title = models.CharField("Title", max_length=200)
slug = models.SlugField("Slug", unique=True)
content = models.TextField("Content")
publication_date = models.DateTimeField("Publication date")
enable_comments = models.BooleanField("Enable comments", default=True)
# Optional reverse relation, allow ORM querying:
comments_set = CommentsRelation()
class Meta:
verbose_name = "Article"
verbose_name_plural = "Articles"
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('article-details', kwargs={'slug': self.slug})
# Optional, give direct access to moderation info via the model:
comments = property(get_comments_for_model)
comments_are_open = property(comments_are_open)
comments_are_moderated = property(comments_are_moderated)
# Give the generic app support for moderation by django-fluent-comments:
moderate_model(
Article,
publication_date_field='publication_date',
enable_comments_field='enable_comments'
)
| from django.core.urlresolvers import reverse
from django.db import models
+ from django.utils.six import python_2_unicode_compatible
+
from fluent_comments.moderation import moderate_model, comments_are_open, comments_are_moderated
from fluent_comments.models import get_comments_for_model, CommentsRelation
+ @python_2_unicode_compatible
class Article(models.Model):
title = models.CharField("Title", max_length=200)
slug = models.SlugField("Slug", unique=True)
content = models.TextField("Content")
publication_date = models.DateTimeField("Publication date")
enable_comments = models.BooleanField("Enable comments", default=True)
# Optional reverse relation, allow ORM querying:
comments_set = CommentsRelation()
class Meta:
verbose_name = "Article"
verbose_name_plural = "Articles"
- def __unicode__(self):
? ^^^^^^^
+ def __str__(self):
? ^^^
return self.title
def get_absolute_url(self):
return reverse('article-details', kwargs={'slug': self.slug})
# Optional, give direct access to moderation info via the model:
comments = property(get_comments_for_model)
comments_are_open = property(comments_are_open)
comments_are_moderated = property(comments_are_moderated)
# Give the generic app support for moderation by django-fluent-comments:
moderate_model(
Article,
publication_date_field='publication_date',
enable_comments_field='enable_comments'
) |
dad7508aa6fc3f0b97975f0985c666fdfc191035 | api/__init__.py | api/__init__.py | from flask import Flask
DB_CONNECTION = "host='localhost' port=5432 user='postgres' password='secret' dbname='antismash'"
SQLALCHEMY_DATABASE_URI = 'postgres://postgres:secret@localhost:5432/antismash'
app = Flask(__name__)
app.config.from_object(__name__)
from .models import db
db.init_app(app)
from . import api
from . import error_handlers
| import os
from flask import Flask
SQLALCHEMY_DATABASE_URI = os.getenv('AS_DB_URI', 'postgres://postgres:secret@localhost:5432/antismash')
app = Flask(__name__)
app.config.from_object(__name__)
from .models import db
db.init_app(app)
from . import api
from . import error_handlers
| Allow overriding database URI from command line | api: Allow overriding database URI from command line
Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk>
| Python | agpl-3.0 | antismash/db-api,antismash/db-api | + import os
from flask import Flask
- DB_CONNECTION = "host='localhost' port=5432 user='postgres' password='secret' dbname='antismash'"
- SQLALCHEMY_DATABASE_URI = 'postgres://postgres:secret@localhost:5432/antismash'
+ SQLALCHEMY_DATABASE_URI = os.getenv('AS_DB_URI', 'postgres://postgres:secret@localhost:5432/antismash')
app = Flask(__name__)
app.config.from_object(__name__)
from .models import db
db.init_app(app)
from . import api
from . import error_handlers
| Allow overriding database URI from command line | ## Code Before:
from flask import Flask
DB_CONNECTION = "host='localhost' port=5432 user='postgres' password='secret' dbname='antismash'"
SQLALCHEMY_DATABASE_URI = 'postgres://postgres:secret@localhost:5432/antismash'
app = Flask(__name__)
app.config.from_object(__name__)
from .models import db
db.init_app(app)
from . import api
from . import error_handlers
## Instruction:
Allow overriding database URI from command line
## Code After:
import os
from flask import Flask
SQLALCHEMY_DATABASE_URI = os.getenv('AS_DB_URI', 'postgres://postgres:secret@localhost:5432/antismash')
app = Flask(__name__)
app.config.from_object(__name__)
from .models import db
db.init_app(app)
from . import api
from . import error_handlers
| + import os
from flask import Flask
- DB_CONNECTION = "host='localhost' port=5432 user='postgres' password='secret' dbname='antismash'"
- SQLALCHEMY_DATABASE_URI = 'postgres://postgres:secret@localhost:5432/antismash'
+ SQLALCHEMY_DATABASE_URI = os.getenv('AS_DB_URI', 'postgres://postgres:secret@localhost:5432/antismash')
? +++++++++++++++++++++++ +
app = Flask(__name__)
app.config.from_object(__name__)
from .models import db
db.init_app(app)
from . import api
from . import error_handlers |
5cf17b6a46a3d4bbf4cecb65e4b9ef43066869d9 | feincms/templatetags/applicationcontent_tags.py | feincms/templatetags/applicationcontent_tags.py | from django import template
# backwards compatibility import
from feincms.templatetags.fragment_tags import fragment, get_fragment, has_fragment
register = template.Library()
register.tag(fragment)
register.tag(get_fragment)
register.filter(has_fragment)
@register.simple_tag
def feincms_render_region_appcontent(page, region, request):
"""Render only the application content for the region
This allows template authors to choose whether their page behaves
differently when displaying embedded application subpages by doing
something like this::
{% if not in_appcontent_subpage %}
{% feincms_render_region feincms_page "main" request %}
{% else %}
{% feincms_render_region_appcontent feincms_page "main" request %}
{% endif %}
"""
from feincms.content.application.models import ApplicationContent
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
getattr(page.content, region) if isinstance(content, ApplicationContent))
| from django import template
# backwards compatibility import
from feincms.templatetags.fragment_tags import fragment, get_fragment, has_fragment
register = template.Library()
register.tag(fragment)
register.tag(get_fragment)
register.filter(has_fragment)
@register.simple_tag
def feincms_render_region_appcontent(page, region, request):
"""Render only the application content for the region
This allows template authors to choose whether their page behaves
differently when displaying embedded application subpages by doing
something like this::
{% if not in_appcontent_subpage %}
{% feincms_render_region feincms_page "main" request %}
{% else %}
{% feincms_render_region_appcontent feincms_page "main" request %}
{% endif %}
"""
from feincms.content.application.models import ApplicationContent
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
page.content.all_of_type(ApplicationContent) if content.region == region)
| Use all_of_type instead of isinstance check in feincms_render_region_appcontent | Use all_of_type instead of isinstance check in feincms_render_region_appcontent
| Python | bsd-3-clause | feincms/feincms,joshuajonah/feincms,feincms/feincms,matthiask/feincms2-content,matthiask/django-content-editor,michaelkuty/feincms,mjl/feincms,matthiask/feincms2-content,mjl/feincms,matthiask/django-content-editor,matthiask/django-content-editor,michaelkuty/feincms,nickburlett/feincms,matthiask/django-content-editor,joshuajonah/feincms,matthiask/feincms2-content,nickburlett/feincms,pjdelport/feincms,pjdelport/feincms,joshuajonah/feincms,pjdelport/feincms,michaelkuty/feincms,michaelkuty/feincms,nickburlett/feincms,feincms/feincms,nickburlett/feincms,joshuajonah/feincms,mjl/feincms | from django import template
# backwards compatibility import
from feincms.templatetags.fragment_tags import fragment, get_fragment, has_fragment
register = template.Library()
register.tag(fragment)
register.tag(get_fragment)
register.filter(has_fragment)
@register.simple_tag
def feincms_render_region_appcontent(page, region, request):
"""Render only the application content for the region
This allows template authors to choose whether their page behaves
differently when displaying embedded application subpages by doing
something like this::
{% if not in_appcontent_subpage %}
{% feincms_render_region feincms_page "main" request %}
{% else %}
{% feincms_render_region_appcontent feincms_page "main" request %}
{% endif %}
"""
from feincms.content.application.models import ApplicationContent
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
- getattr(page.content, region) if isinstance(content, ApplicationContent))
+ page.content.all_of_type(ApplicationContent) if content.region == region)
| Use all_of_type instead of isinstance check in feincms_render_region_appcontent | ## Code Before:
from django import template
# backwards compatibility import
from feincms.templatetags.fragment_tags import fragment, get_fragment, has_fragment
register = template.Library()
register.tag(fragment)
register.tag(get_fragment)
register.filter(has_fragment)
@register.simple_tag
def feincms_render_region_appcontent(page, region, request):
"""Render only the application content for the region
This allows template authors to choose whether their page behaves
differently when displaying embedded application subpages by doing
something like this::
{% if not in_appcontent_subpage %}
{% feincms_render_region feincms_page "main" request %}
{% else %}
{% feincms_render_region_appcontent feincms_page "main" request %}
{% endif %}
"""
from feincms.content.application.models import ApplicationContent
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
getattr(page.content, region) if isinstance(content, ApplicationContent))
## Instruction:
Use all_of_type instead of isinstance check in feincms_render_region_appcontent
## Code After:
from django import template
# backwards compatibility import
from feincms.templatetags.fragment_tags import fragment, get_fragment, has_fragment
register = template.Library()
register.tag(fragment)
register.tag(get_fragment)
register.filter(has_fragment)
@register.simple_tag
def feincms_render_region_appcontent(page, region, request):
"""Render only the application content for the region
This allows template authors to choose whether their page behaves
differently when displaying embedded application subpages by doing
something like this::
{% if not in_appcontent_subpage %}
{% feincms_render_region feincms_page "main" request %}
{% else %}
{% feincms_render_region_appcontent feincms_page "main" request %}
{% endif %}
"""
from feincms.content.application.models import ApplicationContent
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
page.content.all_of_type(ApplicationContent) if content.region == region)
| from django import template
# backwards compatibility import
from feincms.templatetags.fragment_tags import fragment, get_fragment, has_fragment
register = template.Library()
register.tag(fragment)
register.tag(get_fragment)
register.filter(has_fragment)
@register.simple_tag
def feincms_render_region_appcontent(page, region, request):
"""Render only the application content for the region
This allows template authors to choose whether their page behaves
differently when displaying embedded application subpages by doing
something like this::
{% if not in_appcontent_subpage %}
{% feincms_render_region feincms_page "main" request %}
{% else %}
{% feincms_render_region_appcontent feincms_page "main" request %}
{% endif %}
"""
from feincms.content.application.models import ApplicationContent
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
- getattr(page.content, region) if isinstance(content, ApplicationContent))
+ page.content.all_of_type(ApplicationContent) if content.region == region) |
d879d74aa078ca5a89a7e7cbd1bebe095449411d | snobol/constants.py | snobol/constants.py | coeff_BminusV = [-0.823, 5.027, -13.409, 20.133, -18.096, 9.084, -1.950]
coeff_VminusI = [-1.355, 6.262, -2.676, -22.973, 35.524, -15.340]
coeff_BminusI = [-1.096, 3.038, -2.246, -0.497, 0.7078, 0.576, -0.713,
0.239, -0.027]
# Ranges of validity for polynomial fits
min_BminusV = -0.2
max_BminusV = 1.65
min_VminusI = -0.1
max_VminusI = 1.0
min_BminusI = -0.4
max_BminusI = 3.0
# RMS errors in polynomial fits
rms_err_BminusV = 0.113
rms_err_VminusI = 0.109
rms_err_BminusI = 0.091
# Zeropoint for use in the calculation of bolometric magnitude
mbol_zeropoint = 11.64
|
# Coefficients for polynomial fit to bolometric correction - color relation
coeff_BminusV = [-0.823, 5.027, -13.409, 20.133, -18.096, 9.084, -1.950]
coeff_VminusI = [-1.355, 6.262, -2.676, -22.973, 35.524, -15.340]
coeff_BminusI = [-1.096, 3.038, -2.246, -0.497, 0.7078, 0.576, -0.713,
0.239, -0.027]
# Ranges of validity for polynomial fits
min_BminusV = -0.2
max_BminusV = 1.65
min_VminusI = -0.1
max_VminusI = 1.0
min_BminusI = -0.4
max_BminusI = 3.0
# RMS errors in polynomial fits
rms_err_BminusV = 0.113
rms_err_VminusI = 0.109
rms_err_BminusI = 0.091
# Zeropoint for use in the calculation of bolometric magnitude
mbol_zeropoint = 11.64
| Add documentation string for cosntants module | Add documentation string for cosntants module
| Python | mit | JALusk/SNoBoL,JALusk/SNoBoL,JALusk/SuperBoL | +
+ # Coefficients for polynomial fit to bolometric correction - color relation
coeff_BminusV = [-0.823, 5.027, -13.409, 20.133, -18.096, 9.084, -1.950]
coeff_VminusI = [-1.355, 6.262, -2.676, -22.973, 35.524, -15.340]
coeff_BminusI = [-1.096, 3.038, -2.246, -0.497, 0.7078, 0.576, -0.713,
0.239, -0.027]
# Ranges of validity for polynomial fits
min_BminusV = -0.2
max_BminusV = 1.65
min_VminusI = -0.1
max_VminusI = 1.0
min_BminusI = -0.4
max_BminusI = 3.0
# RMS errors in polynomial fits
rms_err_BminusV = 0.113
rms_err_VminusI = 0.109
rms_err_BminusI = 0.091
# Zeropoint for use in the calculation of bolometric magnitude
mbol_zeropoint = 11.64
| Add documentation string for cosntants module | ## Code Before:
coeff_BminusV = [-0.823, 5.027, -13.409, 20.133, -18.096, 9.084, -1.950]
coeff_VminusI = [-1.355, 6.262, -2.676, -22.973, 35.524, -15.340]
coeff_BminusI = [-1.096, 3.038, -2.246, -0.497, 0.7078, 0.576, -0.713,
0.239, -0.027]
# Ranges of validity for polynomial fits
min_BminusV = -0.2
max_BminusV = 1.65
min_VminusI = -0.1
max_VminusI = 1.0
min_BminusI = -0.4
max_BminusI = 3.0
# RMS errors in polynomial fits
rms_err_BminusV = 0.113
rms_err_VminusI = 0.109
rms_err_BminusI = 0.091
# Zeropoint for use in the calculation of bolometric magnitude
mbol_zeropoint = 11.64
## Instruction:
Add documentation string for cosntants module
## Code After:
# Coefficients for polynomial fit to bolometric correction - color relation
coeff_BminusV = [-0.823, 5.027, -13.409, 20.133, -18.096, 9.084, -1.950]
coeff_VminusI = [-1.355, 6.262, -2.676, -22.973, 35.524, -15.340]
coeff_BminusI = [-1.096, 3.038, -2.246, -0.497, 0.7078, 0.576, -0.713,
0.239, -0.027]
# Ranges of validity for polynomial fits
min_BminusV = -0.2
max_BminusV = 1.65
min_VminusI = -0.1
max_VminusI = 1.0
min_BminusI = -0.4
max_BminusI = 3.0
# RMS errors in polynomial fits
rms_err_BminusV = 0.113
rms_err_VminusI = 0.109
rms_err_BminusI = 0.091
# Zeropoint for use in the calculation of bolometric magnitude
mbol_zeropoint = 11.64
| +
+ # Coefficients for polynomial fit to bolometric correction - color relation
coeff_BminusV = [-0.823, 5.027, -13.409, 20.133, -18.096, 9.084, -1.950]
coeff_VminusI = [-1.355, 6.262, -2.676, -22.973, 35.524, -15.340]
coeff_BminusI = [-1.096, 3.038, -2.246, -0.497, 0.7078, 0.576, -0.713,
0.239, -0.027]
# Ranges of validity for polynomial fits
min_BminusV = -0.2
max_BminusV = 1.65
min_VminusI = -0.1
max_VminusI = 1.0
min_BminusI = -0.4
max_BminusI = 3.0
# RMS errors in polynomial fits
rms_err_BminusV = 0.113
rms_err_VminusI = 0.109
rms_err_BminusI = 0.091
# Zeropoint for use in the calculation of bolometric magnitude
mbol_zeropoint = 11.64 |
3f90d0ec25491eb64f164180139d4baf9ff238a9 | libravatar/context_processors.py | libravatar/context_processors.py |
import settings
"""
Default useful variables for the base page template.
"""
def basepage(request):
context = {}
context["site_name"] = settings.SITE_NAME
context["libravatar_version"] = settings.LIBRAVATAR_VERSION
context["avatar_url"] = settings.AVATAR_URL
context["secure_avatar_url"] = settings.SECURE_AVATAR_URL
context["media_url"] = settings.MEDIA_URL
context["site_url"] = settings.SITE_URL
context["disable_signup"] = settings.DISABLE_SIGNUP
context["analytics_propertyid"] = settings.ANALYTICS_PROPERTYID
context['support_email'] = settings.SUPPORT_EMAIL
return context
|
import settings
"""
Default useful variables for the base page template.
"""
def basepage(request):
context = {}
context['analytics_propertyid'] = settings.ANALYTICS_PROPERTYID
context['avatar_url'] = settings.AVATAR_URL
context['disable_signup'] = settings.DISABLE_SIGNUP
context['libravatar_version'] = settings.LIBRAVATAR_VERSION
context['media_url'] = settings.MEDIA_URL
context['secure_avatar_url'] = settings.SECURE_AVATAR_URL
context['site_name'] = settings.SITE_NAME
context['site_url'] = settings.SITE_URL
context['support_email'] = settings.SUPPORT_EMAIL
return context
| Sort the context list in alphabetical order | Sort the context list in alphabetical order
| Python | agpl-3.0 | libravatar/libravatar,libravatar/libravatar,libravatar/libravatar,libravatar/libravatar,libravatar/libravatar,libravatar/libravatar,libravatar/libravatar |
import settings
"""
Default useful variables for the base page template.
"""
def basepage(request):
context = {}
- context["site_name"] = settings.SITE_NAME
- context["libravatar_version"] = settings.LIBRAVATAR_VERSION
- context["avatar_url"] = settings.AVATAR_URL
- context["secure_avatar_url"] = settings.SECURE_AVATAR_URL
- context["media_url"] = settings.MEDIA_URL
- context["site_url"] = settings.SITE_URL
- context["disable_signup"] = settings.DISABLE_SIGNUP
- context["analytics_propertyid"] = settings.ANALYTICS_PROPERTYID
+ context['analytics_propertyid'] = settings.ANALYTICS_PROPERTYID
+ context['avatar_url'] = settings.AVATAR_URL
+ context['disable_signup'] = settings.DISABLE_SIGNUP
+ context['libravatar_version'] = settings.LIBRAVATAR_VERSION
+ context['media_url'] = settings.MEDIA_URL
+ context['secure_avatar_url'] = settings.SECURE_AVATAR_URL
+ context['site_name'] = settings.SITE_NAME
+ context['site_url'] = settings.SITE_URL
context['support_email'] = settings.SUPPORT_EMAIL
return context
| Sort the context list in alphabetical order | ## Code Before:
import settings
"""
Default useful variables for the base page template.
"""
def basepage(request):
context = {}
context["site_name"] = settings.SITE_NAME
context["libravatar_version"] = settings.LIBRAVATAR_VERSION
context["avatar_url"] = settings.AVATAR_URL
context["secure_avatar_url"] = settings.SECURE_AVATAR_URL
context["media_url"] = settings.MEDIA_URL
context["site_url"] = settings.SITE_URL
context["disable_signup"] = settings.DISABLE_SIGNUP
context["analytics_propertyid"] = settings.ANALYTICS_PROPERTYID
context['support_email'] = settings.SUPPORT_EMAIL
return context
## Instruction:
Sort the context list in alphabetical order
## Code After:
import settings
"""
Default useful variables for the base page template.
"""
def basepage(request):
context = {}
context['analytics_propertyid'] = settings.ANALYTICS_PROPERTYID
context['avatar_url'] = settings.AVATAR_URL
context['disable_signup'] = settings.DISABLE_SIGNUP
context['libravatar_version'] = settings.LIBRAVATAR_VERSION
context['media_url'] = settings.MEDIA_URL
context['secure_avatar_url'] = settings.SECURE_AVATAR_URL
context['site_name'] = settings.SITE_NAME
context['site_url'] = settings.SITE_URL
context['support_email'] = settings.SUPPORT_EMAIL
return context
|
import settings
"""
Default useful variables for the base page template.
"""
def basepage(request):
context = {}
- context["site_name"] = settings.SITE_NAME
- context["libravatar_version"] = settings.LIBRAVATAR_VERSION
- context["avatar_url"] = settings.AVATAR_URL
- context["secure_avatar_url"] = settings.SECURE_AVATAR_URL
- context["media_url"] = settings.MEDIA_URL
- context["site_url"] = settings.SITE_URL
- context["disable_signup"] = settings.DISABLE_SIGNUP
- context["analytics_propertyid"] = settings.ANALYTICS_PROPERTYID
? ^ ^
+ context['analytics_propertyid'] = settings.ANALYTICS_PROPERTYID
? ^ ^
+ context['avatar_url'] = settings.AVATAR_URL
+ context['disable_signup'] = settings.DISABLE_SIGNUP
+ context['libravatar_version'] = settings.LIBRAVATAR_VERSION
+ context['media_url'] = settings.MEDIA_URL
+ context['secure_avatar_url'] = settings.SECURE_AVATAR_URL
+ context['site_name'] = settings.SITE_NAME
+ context['site_url'] = settings.SITE_URL
context['support_email'] = settings.SUPPORT_EMAIL
return context
|
9cb21f98e1b6670d733940ea74d75a7a01a1b38e | misp_modules/modules/expansion/__init__.py | misp_modules/modules/expansion/__init__.py | from . import _vmray # noqa
__all__ = ['vmray_submit', 'bgpranking', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',
'farsight_passivedns', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal',
'whois', 'shodan', 'reversedns', 'geoip_country', 'wiki', 'iprep',
'threatminer', 'otx', 'threatcrowd', 'vulndb', 'crowdstrike_falcon',
'yara_syntax_validator', 'hashdd', 'onyphe', 'onyphe_full', 'rbl',
'xforceexchange', 'sigma_syntax_validator', 'stix2_pattern_syntax_validator',
'sigma_queries', 'dbl_spamhaus', 'vulners', 'yara_query', 'macaddress_io',
'intel471', 'backscatter_io', 'btc_scam_check']
| from . import _vmray # noqa
__all__ = ['vmray_submit', 'bgpranking', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',
'farsight_passivedns', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal',
'whois', 'shodan', 'reversedns', 'geoip_country', 'wiki', 'iprep',
'threatminer', 'otx', 'threatcrowd', 'vulndb', 'crowdstrike_falcon',
'yara_syntax_validator', 'hashdd', 'onyphe', 'onyphe_full', 'rbl',
'xforceexchange', 'sigma_syntax_validator', 'stix2_pattern_syntax_validator',
'sigma_queries', 'dbl_spamhaus', 'vulners', 'yara_query', 'macaddress_io',
'intel471', 'backscatter_io', 'btc_scam_check', 'hibp', 'greynoise', 'macvendors']
| Add the new module sin the list of modules availables. | fix: Add the new module sin the list of modules availables.
| Python | agpl-3.0 | VirusTotal/misp-modules,MISP/misp-modules,VirusTotal/misp-modules,VirusTotal/misp-modules,MISP/misp-modules,amuehlem/misp-modules,MISP/misp-modules,amuehlem/misp-modules,amuehlem/misp-modules | from . import _vmray # noqa
__all__ = ['vmray_submit', 'bgpranking', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',
'farsight_passivedns', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal',
'whois', 'shodan', 'reversedns', 'geoip_country', 'wiki', 'iprep',
'threatminer', 'otx', 'threatcrowd', 'vulndb', 'crowdstrike_falcon',
'yara_syntax_validator', 'hashdd', 'onyphe', 'onyphe_full', 'rbl',
'xforceexchange', 'sigma_syntax_validator', 'stix2_pattern_syntax_validator',
'sigma_queries', 'dbl_spamhaus', 'vulners', 'yara_query', 'macaddress_io',
- 'intel471', 'backscatter_io', 'btc_scam_check']
+ 'intel471', 'backscatter_io', 'btc_scam_check', 'hibp', 'greynoise', 'macvendors']
| Add the new module sin the list of modules availables. | ## Code Before:
from . import _vmray # noqa
__all__ = ['vmray_submit', 'bgpranking', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',
'farsight_passivedns', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal',
'whois', 'shodan', 'reversedns', 'geoip_country', 'wiki', 'iprep',
'threatminer', 'otx', 'threatcrowd', 'vulndb', 'crowdstrike_falcon',
'yara_syntax_validator', 'hashdd', 'onyphe', 'onyphe_full', 'rbl',
'xforceexchange', 'sigma_syntax_validator', 'stix2_pattern_syntax_validator',
'sigma_queries', 'dbl_spamhaus', 'vulners', 'yara_query', 'macaddress_io',
'intel471', 'backscatter_io', 'btc_scam_check']
## Instruction:
Add the new module sin the list of modules availables.
## Code After:
from . import _vmray # noqa
__all__ = ['vmray_submit', 'bgpranking', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',
'farsight_passivedns', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal',
'whois', 'shodan', 'reversedns', 'geoip_country', 'wiki', 'iprep',
'threatminer', 'otx', 'threatcrowd', 'vulndb', 'crowdstrike_falcon',
'yara_syntax_validator', 'hashdd', 'onyphe', 'onyphe_full', 'rbl',
'xforceexchange', 'sigma_syntax_validator', 'stix2_pattern_syntax_validator',
'sigma_queries', 'dbl_spamhaus', 'vulners', 'yara_query', 'macaddress_io',
'intel471', 'backscatter_io', 'btc_scam_check', 'hibp', 'greynoise', 'macvendors']
| from . import _vmray # noqa
__all__ = ['vmray_submit', 'bgpranking', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',
'farsight_passivedns', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal',
'whois', 'shodan', 'reversedns', 'geoip_country', 'wiki', 'iprep',
'threatminer', 'otx', 'threatcrowd', 'vulndb', 'crowdstrike_falcon',
'yara_syntax_validator', 'hashdd', 'onyphe', 'onyphe_full', 'rbl',
'xforceexchange', 'sigma_syntax_validator', 'stix2_pattern_syntax_validator',
'sigma_queries', 'dbl_spamhaus', 'vulners', 'yara_query', 'macaddress_io',
- 'intel471', 'backscatter_io', 'btc_scam_check']
+ 'intel471', 'backscatter_io', 'btc_scam_check', 'hibp', 'greynoise', 'macvendors']
? +++++++++++++++++++++++++++++++++++
|
5957999c52f939691cbe6b8dd5aa929980a24501 | tests/unit/test_start.py | tests/unit/test_start.py | import pytest
from iwant_bot import start
def test_add():
assert start.add_numbers(0, 0) == 0
assert start.add_numbers(1, 1) == 2
| from iwant_bot import start
def test_add():
assert start.add_numbers(0, 0) == 0
assert start.add_numbers(1, 1) == 2
| Remove the unused pytest import | Remove the unused pytest import
| Python | mit | kiwicom/iwant-bot | - import pytest
-
-
from iwant_bot import start
def test_add():
assert start.add_numbers(0, 0) == 0
assert start.add_numbers(1, 1) == 2
| Remove the unused pytest import | ## Code Before:
import pytest
from iwant_bot import start
def test_add():
assert start.add_numbers(0, 0) == 0
assert start.add_numbers(1, 1) == 2
## Instruction:
Remove the unused pytest import
## Code After:
from iwant_bot import start
def test_add():
assert start.add_numbers(0, 0) == 0
assert start.add_numbers(1, 1) == 2
| - import pytest
-
-
from iwant_bot import start
def test_add():
assert start.add_numbers(0, 0) == 0
assert start.add_numbers(1, 1) == 2 |
699a2d8d97d8c526f9fb269245d5fb593d47d3ca | rasa/nlu/tokenizers/__init__.py | rasa/nlu/tokenizers/__init__.py | class Tokenizer:
pass
class Token:
def __init__(self, text, offset, data=None):
self.offset = offset
self.text = text
self.end = offset + len(text)
self.data = data if data else {}
def set(self, prop, info):
self.data[prop] = info
def get(self, prop, default=None):
return self.data.get(prop, default)
| import functools
class Tokenizer:
pass
@functools.total_ordering
class Token:
def __init__(self, text, offset, data=None):
self.offset = offset
self.text = text
self.end = offset + len(text)
self.data = data if data else {}
def set(self, prop, info):
self.data[prop] = info
def get(self, prop, default=None):
return self.data.get(prop, default)
def __eq__(self, other):
if not isinstance(other, Token):
return NotImplemented
return self.text == other.text
def __lt__(self, other):
if not isinstance(other, Token):
return NotImplemented
return self.text < other.text
| Fix to make sanitize_examples() be able to sort tokens | Fix to make sanitize_examples() be able to sort tokens
| Python | apache-2.0 | RasaHQ/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu | + import functools
+
+
class Tokenizer:
pass
+ @functools.total_ordering
class Token:
def __init__(self, text, offset, data=None):
self.offset = offset
self.text = text
self.end = offset + len(text)
self.data = data if data else {}
def set(self, prop, info):
self.data[prop] = info
def get(self, prop, default=None):
return self.data.get(prop, default)
+ def __eq__(self, other):
+ if not isinstance(other, Token):
+ return NotImplemented
+ return self.text == other.text
+
+ def __lt__(self, other):
+ if not isinstance(other, Token):
+ return NotImplemented
+ return self.text < other.text
+ | Fix to make sanitize_examples() be able to sort tokens | ## Code Before:
class Tokenizer:
pass
class Token:
def __init__(self, text, offset, data=None):
self.offset = offset
self.text = text
self.end = offset + len(text)
self.data = data if data else {}
def set(self, prop, info):
self.data[prop] = info
def get(self, prop, default=None):
return self.data.get(prop, default)
## Instruction:
Fix to make sanitize_examples() be able to sort tokens
## Code After:
import functools
class Tokenizer:
pass
@functools.total_ordering
class Token:
def __init__(self, text, offset, data=None):
self.offset = offset
self.text = text
self.end = offset + len(text)
self.data = data if data else {}
def set(self, prop, info):
self.data[prop] = info
def get(self, prop, default=None):
return self.data.get(prop, default)
def __eq__(self, other):
if not isinstance(other, Token):
return NotImplemented
return self.text == other.text
def __lt__(self, other):
if not isinstance(other, Token):
return NotImplemented
return self.text < other.text
| + import functools
+
+
class Tokenizer:
pass
+ @functools.total_ordering
class Token:
def __init__(self, text, offset, data=None):
self.offset = offset
self.text = text
self.end = offset + len(text)
self.data = data if data else {}
def set(self, prop, info):
self.data[prop] = info
def get(self, prop, default=None):
return self.data.get(prop, default)
+
+ def __eq__(self, other):
+ if not isinstance(other, Token):
+ return NotImplemented
+ return self.text == other.text
+
+ def __lt__(self, other):
+ if not isinstance(other, Token):
+ return NotImplemented
+ return self.text < other.text |
4adb78fde502faed78350233896f3efd3f42816e | cytoplasm/interpreters.py | cytoplasm/interpreters.py | '''
These are some utilites used when writing and handling interpreters.
'''
import shutil
from cytoplasm import configuration
from cytoplasm.errors import InterpreterError
def SaveReturned(fn):
'''Some potential interpreters, like Mako, don't give you an easy way to save to a destination.
In these cases, simply use this function as a decorater.'''
def InterpreterWithSave(source, destination, **kwargs):
# under some circumstances, this should be able to write to file-like objects;
# so if destination is a string, assume it's a path; otherwise, it's a file-like object
if isinstance(destination, str):
f = open(destination, "w")
else:
f = destination
# pass **kwargs to this function.
f.write(fn(source, **kwargs))
f.close()
return InterpreterWithSave
def interpret(file, destination, **kwargs):
"Interpret a file with an interpreter according to its suffix."
# get the list of interpreters from the configuration
interpreters = configuration.get_config().interpreters
# figure out the suffix of the file, to use to determine which interpreter to use
ending = ".".join(file.split(".")[:-1])
try:
interpreters.get(ending, shutil.copyfile)(file, destination, **kwargs)
except Exception as exception:
# if the interpreter chokes, raise an InterpreterError with some useful information.
raise InterpreterError("%s on file '%s': %s" %(ending, file, exception))
| '''
These are some utilites used when writing and handling interpreters.
'''
import shutil
from cytoplasm import configuration
from cytoplasm.errors import InterpreterError
def SaveReturned(fn):
'''Some potential interpreters, like Mako, don't give you an easy way to save to a destination.
In these cases, simply use this function as a decorater.'''
def InterpreterWithSave(source, destination, **kwargs):
# under some circumstances, this should be able to write to file-like objects;
# so if destination is a string, assume it's a path; otherwise, it's a file-like object
if isinstance(destination, str):
f = open(destination, "w")
else:
f = destination
# pass **kwargs to this function.
f.write(fn(source, **kwargs))
f.close()
return InterpreterWithSave
@SaveReturned
def default_interpreter(source, **kwargs):
f = open(source)
source_string = f.read()
f.close()
return source_string
def interpret(file, destination, **kwargs):
"Interpret a file with an interpreter according to its suffix."
# get the list of interpreters from the configuration
interpreters = configuration.get_config().interpreters
# figure out the suffix of the file, to use to determine which interpreter to use
ending = file.split(".")[-1]
interpreters.get(ending, default_interpreter)(file, destination, **kwargs)
| Define a default interpreter rather than using shutil.copyfile. | Define a default interpreter rather than using shutil.copyfile.
It would choke before if it was handed a file-like object rather than a
file name. Even more bleh!.
| Python | mit | startling/cytoplasm | '''
These are some utilites used when writing and handling interpreters.
'''
import shutil
from cytoplasm import configuration
from cytoplasm.errors import InterpreterError
def SaveReturned(fn):
'''Some potential interpreters, like Mako, don't give you an easy way to save to a destination.
In these cases, simply use this function as a decorater.'''
def InterpreterWithSave(source, destination, **kwargs):
# under some circumstances, this should be able to write to file-like objects;
# so if destination is a string, assume it's a path; otherwise, it's a file-like object
if isinstance(destination, str):
f = open(destination, "w")
else:
f = destination
# pass **kwargs to this function.
f.write(fn(source, **kwargs))
f.close()
return InterpreterWithSave
+ @SaveReturned
+ def default_interpreter(source, **kwargs):
+ f = open(source)
+ source_string = f.read()
+ f.close()
+ return source_string
+
def interpret(file, destination, **kwargs):
"Interpret a file with an interpreter according to its suffix."
# get the list of interpreters from the configuration
interpreters = configuration.get_config().interpreters
# figure out the suffix of the file, to use to determine which interpreter to use
- ending = ".".join(file.split(".")[:-1])
+ ending = file.split(".")[-1]
- try:
- interpreters.get(ending, shutil.copyfile)(file, destination, **kwargs)
+ interpreters.get(ending, default_interpreter)(file, destination, **kwargs)
- except Exception as exception:
- # if the interpreter chokes, raise an InterpreterError with some useful information.
- raise InterpreterError("%s on file '%s': %s" %(ending, file, exception))
| Define a default interpreter rather than using shutil.copyfile. | ## Code Before:
'''
These are some utilites used when writing and handling interpreters.
'''
import shutil
from cytoplasm import configuration
from cytoplasm.errors import InterpreterError
def SaveReturned(fn):
'''Some potential interpreters, like Mako, don't give you an easy way to save to a destination.
In these cases, simply use this function as a decorater.'''
def InterpreterWithSave(source, destination, **kwargs):
# under some circumstances, this should be able to write to file-like objects;
# so if destination is a string, assume it's a path; otherwise, it's a file-like object
if isinstance(destination, str):
f = open(destination, "w")
else:
f = destination
# pass **kwargs to this function.
f.write(fn(source, **kwargs))
f.close()
return InterpreterWithSave
def interpret(file, destination, **kwargs):
"Interpret a file with an interpreter according to its suffix."
# get the list of interpreters from the configuration
interpreters = configuration.get_config().interpreters
# figure out the suffix of the file, to use to determine which interpreter to use
ending = ".".join(file.split(".")[:-1])
try:
interpreters.get(ending, shutil.copyfile)(file, destination, **kwargs)
except Exception as exception:
# if the interpreter chokes, raise an InterpreterError with some useful information.
raise InterpreterError("%s on file '%s': %s" %(ending, file, exception))
## Instruction:
Define a default interpreter rather than using shutil.copyfile.
## Code After:
'''
These are some utilites used when writing and handling interpreters.
'''
import shutil
from cytoplasm import configuration
from cytoplasm.errors import InterpreterError
def SaveReturned(fn):
'''Some potential interpreters, like Mako, don't give you an easy way to save to a destination.
In these cases, simply use this function as a decorater.'''
def InterpreterWithSave(source, destination, **kwargs):
# under some circumstances, this should be able to write to file-like objects;
# so if destination is a string, assume it's a path; otherwise, it's a file-like object
if isinstance(destination, str):
f = open(destination, "w")
else:
f = destination
# pass **kwargs to this function.
f.write(fn(source, **kwargs))
f.close()
return InterpreterWithSave
@SaveReturned
def default_interpreter(source, **kwargs):
f = open(source)
source_string = f.read()
f.close()
return source_string
def interpret(file, destination, **kwargs):
"Interpret a file with an interpreter according to its suffix."
# get the list of interpreters from the configuration
interpreters = configuration.get_config().interpreters
# figure out the suffix of the file, to use to determine which interpreter to use
ending = file.split(".")[-1]
interpreters.get(ending, default_interpreter)(file, destination, **kwargs)
| '''
These are some utilites used when writing and handling interpreters.
'''
import shutil
from cytoplasm import configuration
from cytoplasm.errors import InterpreterError
def SaveReturned(fn):
'''Some potential interpreters, like Mako, don't give you an easy way to save to a destination.
In these cases, simply use this function as a decorater.'''
def InterpreterWithSave(source, destination, **kwargs):
# under some circumstances, this should be able to write to file-like objects;
# so if destination is a string, assume it's a path; otherwise, it's a file-like object
if isinstance(destination, str):
f = open(destination, "w")
else:
f = destination
# pass **kwargs to this function.
f.write(fn(source, **kwargs))
f.close()
return InterpreterWithSave
+ @SaveReturned
+ def default_interpreter(source, **kwargs):
+ f = open(source)
+ source_string = f.read()
+ f.close()
+ return source_string
+
def interpret(file, destination, **kwargs):
"Interpret a file with an interpreter according to its suffix."
# get the list of interpreters from the configuration
interpreters = configuration.get_config().interpreters
# figure out the suffix of the file, to use to determine which interpreter to use
- ending = ".".join(file.split(".")[:-1])
? --------- - -
+ ending = file.split(".")[-1]
- try:
- interpreters.get(ending, shutil.copyfile)(file, destination, **kwargs)
? ---- ^^ ^^^^ ^^^^
+ interpreters.get(ending, default_interpreter)(file, destination, **kwargs)
? ^^^^ + + ^^^^ ^ +++
- except Exception as exception:
- # if the interpreter chokes, raise an InterpreterError with some useful information.
- raise InterpreterError("%s on file '%s': %s" %(ending, file, exception))
|
8b30f787d3dabb9072ee0517cf0e5e92daa1038f | l10n_ch_dta_base_transaction_id/wizard/create_dta.py | l10n_ch_dta_base_transaction_id/wizard/create_dta.py |
from openerp.osv import orm
class DTAFileGenerator(orm.TransientModel):
_inherit = "create.dta.wizard"
def _set_bank_data(self, cr, uid, data, pline, elec_context,
seq, context=None):
super(DTAFileGenerator, self).\
_set_bank_data(cr, uid, data, pline,
elec_context, seq, context=context)
if pline.move_line_id.transaction_ref:
elec_context['reference'] = pline.move_line_id.transaction_ref
|
from openerp.osv import orm
class DTAFileGenerator(orm.TransientModel):
_inherit = "create.dta.wizard"
def _set_bank_data(self, cr, uid, pline, elec_context,
seq, context=None):
super(DTAFileGenerator, self).\
_set_bank_data(cr, uid, pline,
elec_context, seq, context=context)
if pline.move_line_id.transaction_ref:
elec_context['reference'] = pline.move_line_id.transaction_ref
| Fix TypeError: _set_bank_data() takes at least 7 arguments (7 given) | Fix TypeError: _set_bank_data() takes at least 7 arguments (7 given)
| Python | agpl-3.0 | open-net-sarl/l10n-switzerland,open-net-sarl/l10n-switzerland,BT-ojossen/l10n-switzerland,BT-ojossen/l10n-switzerland |
from openerp.osv import orm
class DTAFileGenerator(orm.TransientModel):
_inherit = "create.dta.wizard"
- def _set_bank_data(self, cr, uid, data, pline, elec_context,
+ def _set_bank_data(self, cr, uid, pline, elec_context,
seq, context=None):
super(DTAFileGenerator, self).\
- _set_bank_data(cr, uid, data, pline,
+ _set_bank_data(cr, uid, pline,
elec_context, seq, context=context)
if pline.move_line_id.transaction_ref:
elec_context['reference'] = pline.move_line_id.transaction_ref
| Fix TypeError: _set_bank_data() takes at least 7 arguments (7 given) | ## Code Before:
from openerp.osv import orm
class DTAFileGenerator(orm.TransientModel):
_inherit = "create.dta.wizard"
def _set_bank_data(self, cr, uid, data, pline, elec_context,
seq, context=None):
super(DTAFileGenerator, self).\
_set_bank_data(cr, uid, data, pline,
elec_context, seq, context=context)
if pline.move_line_id.transaction_ref:
elec_context['reference'] = pline.move_line_id.transaction_ref
## Instruction:
Fix TypeError: _set_bank_data() takes at least 7 arguments (7 given)
## Code After:
from openerp.osv import orm
class DTAFileGenerator(orm.TransientModel):
_inherit = "create.dta.wizard"
def _set_bank_data(self, cr, uid, pline, elec_context,
seq, context=None):
super(DTAFileGenerator, self).\
_set_bank_data(cr, uid, pline,
elec_context, seq, context=context)
if pline.move_line_id.transaction_ref:
elec_context['reference'] = pline.move_line_id.transaction_ref
|
from openerp.osv import orm
class DTAFileGenerator(orm.TransientModel):
_inherit = "create.dta.wizard"
- def _set_bank_data(self, cr, uid, data, pline, elec_context,
? ------
+ def _set_bank_data(self, cr, uid, pline, elec_context,
seq, context=None):
super(DTAFileGenerator, self).\
- _set_bank_data(cr, uid, data, pline,
? ------
+ _set_bank_data(cr, uid, pline,
elec_context, seq, context=context)
if pline.move_line_id.transaction_ref:
elec_context['reference'] = pline.move_line_id.transaction_ref |
76deb311dbb981501a1fa2686ec2cf4c92d7b83b | taggit/admin.py | taggit/admin.py | from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
admin.site.register(Tag, TagAdmin)
| from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
extra = 0
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
admin.site.register(Tag, TagAdmin)
| Remove extra inlines from django-taggit | Remove extra inlines from django-taggit | Python | bsd-3-clause | theatlantic/django-taggit,theatlantic/django-taggit2,theatlantic/django-taggit2,theatlantic/django-taggit,decibyte/django-taggit,decibyte/django-taggit | from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
+ extra = 0
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
admin.site.register(Tag, TagAdmin)
| Remove extra inlines from django-taggit | ## Code Before:
from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
admin.site.register(Tag, TagAdmin)
## Instruction:
Remove extra inlines from django-taggit
## Code After:
from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
extra = 0
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
admin.site.register(Tag, TagAdmin)
| from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
+ extra = 0
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
admin.site.register(Tag, TagAdmin) |
6091fccc90bb6b90c47a2e4fb7ee6821876eb1a1 | synthnotes/generators/lengthgenerator.py | synthnotes/generators/lengthgenerator.py | from pkg_resources import resource_filename
import pandas as pd
import numpy as np
class LengthGenerator(object):
def __init__(self,
length_file=resource_filename(__name__,
'resources/note_lengths.csv')):
# print(length_file)
df = pd.read_csv(length_file)
notes_count = df['count'].sum()
df['probability'] = df['count'] / notes_count
self.note_lengths = df['note_length'].as_matrix()
self.p = df['probability'].as_matrix()
def generate(self, size=1):
return np.random.choice(self.note_lengths,
size=size,
p=self.p)
| from pkg_resources import resource_filename
import pandas as pd
import numpy as np
class LengthGenerator(object):
def __init__(self,
length_file=resource_filename('synthnotes.resources',
'note_lengths.csv')):
# print(length_file)
df = pd.read_csv(length_file)
notes_count = df['count'].sum()
df['probability'] = df['count'] / notes_count
self.note_lengths = df['note_length'].as_matrix()
self.p = df['probability'].as_matrix()
def generate(self, size=1):
return np.random.choice(self.note_lengths,
size=size,
p=self.p)
| Change LengthGenerator to get appropriate file path | Change LengthGenerator to get appropriate file path
| Python | mit | ebegoli/SynthNotes | from pkg_resources import resource_filename
import pandas as pd
import numpy as np
class LengthGenerator(object):
def __init__(self,
- length_file=resource_filename(__name__,
+ length_file=resource_filename('synthnotes.resources',
- 'resources/note_lengths.csv')):
+ 'note_lengths.csv')):
# print(length_file)
df = pd.read_csv(length_file)
notes_count = df['count'].sum()
df['probability'] = df['count'] / notes_count
self.note_lengths = df['note_length'].as_matrix()
self.p = df['probability'].as_matrix()
def generate(self, size=1):
return np.random.choice(self.note_lengths,
size=size,
p=self.p)
| Change LengthGenerator to get appropriate file path | ## Code Before:
from pkg_resources import resource_filename
import pandas as pd
import numpy as np
class LengthGenerator(object):
def __init__(self,
length_file=resource_filename(__name__,
'resources/note_lengths.csv')):
# print(length_file)
df = pd.read_csv(length_file)
notes_count = df['count'].sum()
df['probability'] = df['count'] / notes_count
self.note_lengths = df['note_length'].as_matrix()
self.p = df['probability'].as_matrix()
def generate(self, size=1):
return np.random.choice(self.note_lengths,
size=size,
p=self.p)
## Instruction:
Change LengthGenerator to get appropriate file path
## Code After:
from pkg_resources import resource_filename
import pandas as pd
import numpy as np
class LengthGenerator(object):
def __init__(self,
length_file=resource_filename('synthnotes.resources',
'note_lengths.csv')):
# print(length_file)
df = pd.read_csv(length_file)
notes_count = df['count'].sum()
df['probability'] = df['count'] / notes_count
self.note_lengths = df['note_length'].as_matrix()
self.p = df['probability'].as_matrix()
def generate(self, size=1):
return np.random.choice(self.note_lengths,
size=size,
p=self.p)
| from pkg_resources import resource_filename
import pandas as pd
import numpy as np
class LengthGenerator(object):
def __init__(self,
- length_file=resource_filename(__name__,
? ^^ ^^ ^^
+ length_file=resource_filename('synthnotes.resources',
? ^^^ ^^^^^ ^^^^^^^^^^^^
- 'resources/note_lengths.csv')):
? ----------
+ 'note_lengths.csv')):
# print(length_file)
df = pd.read_csv(length_file)
notes_count = df['count'].sum()
df['probability'] = df['count'] / notes_count
self.note_lengths = df['note_length'].as_matrix()
self.p = df['probability'].as_matrix()
def generate(self, size=1):
return np.random.choice(self.note_lengths,
size=size,
p=self.p) |
0a19e2a0dd7bed604e5ddd402d2d9f47b2760d77 | bagpipe/bgp/engine/flowspec.py | bagpipe/bgp/engine/flowspec.py | from exabgp.bgp.message.update.nlri.flow import Flow as ExaBGPFlow
from exabgp.bgp.message.update.nlri.nlri import NLRI
from exabgp.reactor.protocol import AFI
from exabgp.reactor.protocol import SAFI
import logging
log = logging.getLogger(__name__)
@NLRI.register(AFI.ipv4, SAFI.flow_vpn, force=True)
@NLRI.register(AFI.ipv6, SAFI.flow_vpn, force=True)
class Flow(ExaBGPFlow):
'''This wraps an ExaBGP Flow so that __eq__ and __hash__
meet the criteria for RouteTableManager (in particular,
not look at actions and nexthop)
'''
def __eq__(self, other):
return self.rd == other.rd and self.rules == other.rules
def __hash__(self):
#FIXME: are dicts hashable ?
log.debug("flow rules: %s", repr(self.rules))
return hash((self.rd, repr(self.rules)))
def FlowRouteFactory(afi, rd):
flowRoute = Flow(afi, safi=SAFI.flow_vpn)
flowRoute.rd = rd
return flowRoute
| from exabgp.bgp.message.update.nlri.flow import Flow as ExaBGPFlow
from exabgp.bgp.message.update.nlri.nlri import NLRI
from exabgp.reactor.protocol import AFI
from exabgp.reactor.protocol import SAFI
import logging
log = logging.getLogger(__name__)
@NLRI.register(AFI.ipv4, SAFI.flow_vpn, force=True)
@NLRI.register(AFI.ipv6, SAFI.flow_vpn, force=True)
class Flow(ExaBGPFlow):
'''This wraps an ExaBGP Flow so that __eq__ and __hash__
meet the criteria for RouteTableManager (in particular,
not look at actions and nexthop)
'''
def __eq__(self, other):
return self.pack() == other.pack()
def __hash__(self):
return hash(self.pack())
def __repr__(self):
return str(self)
def FlowRouteFactory(afi, rd):
flowRoute = Flow(afi, safi=SAFI.flow_vpn)
flowRoute.rd = rd
return flowRoute
| Fix eq/hash for FlowSpec NLRI | Fix eq/hash for FlowSpec NLRI
Bogus eq/hash was preventing withdraws from behaving
properly.
| Python | apache-2.0 | openstack/networking-bagpipe,openstack/networking-bagpipe-l2,openstack/networking-bagpipe,stackforge/networking-bagpipe-l2,openstack/networking-bagpipe-l2,stackforge/networking-bagpipe-l2 | from exabgp.bgp.message.update.nlri.flow import Flow as ExaBGPFlow
from exabgp.bgp.message.update.nlri.nlri import NLRI
from exabgp.reactor.protocol import AFI
from exabgp.reactor.protocol import SAFI
import logging
log = logging.getLogger(__name__)
@NLRI.register(AFI.ipv4, SAFI.flow_vpn, force=True)
@NLRI.register(AFI.ipv6, SAFI.flow_vpn, force=True)
class Flow(ExaBGPFlow):
'''This wraps an ExaBGP Flow so that __eq__ and __hash__
meet the criteria for RouteTableManager (in particular,
not look at actions and nexthop)
'''
def __eq__(self, other):
- return self.rd == other.rd and self.rules == other.rules
+ return self.pack() == other.pack()
def __hash__(self):
- #FIXME: are dicts hashable ?
- log.debug("flow rules: %s", repr(self.rules))
- return hash((self.rd, repr(self.rules)))
+ return hash(self.pack())
+
+ def __repr__(self):
+ return str(self)
def FlowRouteFactory(afi, rd):
flowRoute = Flow(afi, safi=SAFI.flow_vpn)
flowRoute.rd = rd
return flowRoute
| Fix eq/hash for FlowSpec NLRI | ## Code Before:
from exabgp.bgp.message.update.nlri.flow import Flow as ExaBGPFlow
from exabgp.bgp.message.update.nlri.nlri import NLRI
from exabgp.reactor.protocol import AFI
from exabgp.reactor.protocol import SAFI
import logging
log = logging.getLogger(__name__)
@NLRI.register(AFI.ipv4, SAFI.flow_vpn, force=True)
@NLRI.register(AFI.ipv6, SAFI.flow_vpn, force=True)
class Flow(ExaBGPFlow):
'''This wraps an ExaBGP Flow so that __eq__ and __hash__
meet the criteria for RouteTableManager (in particular,
not look at actions and nexthop)
'''
def __eq__(self, other):
return self.rd == other.rd and self.rules == other.rules
def __hash__(self):
#FIXME: are dicts hashable ?
log.debug("flow rules: %s", repr(self.rules))
return hash((self.rd, repr(self.rules)))
def FlowRouteFactory(afi, rd):
flowRoute = Flow(afi, safi=SAFI.flow_vpn)
flowRoute.rd = rd
return flowRoute
## Instruction:
Fix eq/hash for FlowSpec NLRI
## Code After:
from exabgp.bgp.message.update.nlri.flow import Flow as ExaBGPFlow
from exabgp.bgp.message.update.nlri.nlri import NLRI
from exabgp.reactor.protocol import AFI
from exabgp.reactor.protocol import SAFI
import logging
log = logging.getLogger(__name__)
@NLRI.register(AFI.ipv4, SAFI.flow_vpn, force=True)
@NLRI.register(AFI.ipv6, SAFI.flow_vpn, force=True)
class Flow(ExaBGPFlow):
'''This wraps an ExaBGP Flow so that __eq__ and __hash__
meet the criteria for RouteTableManager (in particular,
not look at actions and nexthop)
'''
def __eq__(self, other):
return self.pack() == other.pack()
def __hash__(self):
return hash(self.pack())
def __repr__(self):
return str(self)
def FlowRouteFactory(afi, rd):
flowRoute = Flow(afi, safi=SAFI.flow_vpn)
flowRoute.rd = rd
return flowRoute
| from exabgp.bgp.message.update.nlri.flow import Flow as ExaBGPFlow
from exabgp.bgp.message.update.nlri.nlri import NLRI
from exabgp.reactor.protocol import AFI
from exabgp.reactor.protocol import SAFI
import logging
log = logging.getLogger(__name__)
@NLRI.register(AFI.ipv4, SAFI.flow_vpn, force=True)
@NLRI.register(AFI.ipv6, SAFI.flow_vpn, force=True)
class Flow(ExaBGPFlow):
'''This wraps an ExaBGP Flow so that __eq__ and __hash__
meet the criteria for RouteTableManager (in particular,
not look at actions and nexthop)
'''
def __eq__(self, other):
- return self.rd == other.rd and self.rules == other.rules
+ return self.pack() == other.pack()
def __hash__(self):
- #FIXME: are dicts hashable ?
- log.debug("flow rules: %s", repr(self.rules))
- return hash((self.rd, repr(self.rules)))
+ return hash(self.pack())
+
+ def __repr__(self):
+ return str(self)
def FlowRouteFactory(afi, rd):
flowRoute = Flow(afi, safi=SAFI.flow_vpn)
flowRoute.rd = rd
return flowRoute |
a5cd7e2bea66003c1223891853077e47df24b7cf | vx_intro.py | vx_intro.py | import vx
import math
from sys import argv
_tick_functions = []
def _register_tick_function(f, front=False):
if front:
_tick_functions.insert(0, f)
else:
_tick_functions.append(f)
def _tick():
for f in _tick_functions:
f()
vx.my_vx = _tick
vx.register_tick_function = _register_tick_function
vx.files = argv[1:]
import utils
import scheduler
import keybindings
import windows
import prompt
def _default_start():
if len(vx.files) == 0:
win = vx.window(vx.rows, vx.cols, 0, 0)
win.blank()
win.focus()
else:
d = math.floor(vx.rows / (len(vx.files)))
y = 0
for f in vx.files:
win = vx.window(d, vx.cols, y, 0)
win.attach_file(f)
y += d
win.focus()
vx.default_start = _default_start
| import vx
import math
import os
import sys
_tick_functions = []
def _register_tick_function(f, front=False):
if front:
_tick_functions.insert(0, f)
else:
_tick_functions.append(f)
def _tick():
for f in _tick_functions:
f()
vx.my_vx = _tick
vx.register_tick_function = _register_tick_function
vx.files = sys.argv[1:]
import utils
import scheduler
import keybindings
import windows
import prompt
def _default_start():
if len(vx.files) == 0:
win = vx.window(vx.rows, vx.cols, 0, 0)
win.blank()
win.focus()
else:
d = math.floor(vx.rows / (len(vx.files)))
y = 0
for f in vx.files:
win = vx.window(d, vx.cols, y, 0)
win.attach_file(f)
y += d
win.focus()
vx.default_start = _default_start
sys.path.append(os.path.expanduser('~/.python'))
import rc
| Add ~/.python to PYTHONPATH and import rc | Add ~/.python to PYTHONPATH and import rc
| Python | mit | philipdexter/vx,philipdexter/vx | import vx
import math
- from sys import argv
+ import os
+ import sys
_tick_functions = []
def _register_tick_function(f, front=False):
if front:
_tick_functions.insert(0, f)
else:
_tick_functions.append(f)
def _tick():
for f in _tick_functions:
f()
vx.my_vx = _tick
vx.register_tick_function = _register_tick_function
- vx.files = argv[1:]
+ vx.files = sys.argv[1:]
import utils
import scheduler
import keybindings
import windows
import prompt
def _default_start():
if len(vx.files) == 0:
win = vx.window(vx.rows, vx.cols, 0, 0)
win.blank()
win.focus()
else:
d = math.floor(vx.rows / (len(vx.files)))
y = 0
for f in vx.files:
win = vx.window(d, vx.cols, y, 0)
win.attach_file(f)
y += d
win.focus()
vx.default_start = _default_start
+ sys.path.append(os.path.expanduser('~/.python'))
+ import rc
+ | Add ~/.python to PYTHONPATH and import rc | ## Code Before:
import vx
import math
from sys import argv
_tick_functions = []
def _register_tick_function(f, front=False):
if front:
_tick_functions.insert(0, f)
else:
_tick_functions.append(f)
def _tick():
for f in _tick_functions:
f()
vx.my_vx = _tick
vx.register_tick_function = _register_tick_function
vx.files = argv[1:]
import utils
import scheduler
import keybindings
import windows
import prompt
def _default_start():
if len(vx.files) == 0:
win = vx.window(vx.rows, vx.cols, 0, 0)
win.blank()
win.focus()
else:
d = math.floor(vx.rows / (len(vx.files)))
y = 0
for f in vx.files:
win = vx.window(d, vx.cols, y, 0)
win.attach_file(f)
y += d
win.focus()
vx.default_start = _default_start
## Instruction:
Add ~/.python to PYTHONPATH and import rc
## Code After:
import vx
import math
import os
import sys
_tick_functions = []
def _register_tick_function(f, front=False):
if front:
_tick_functions.insert(0, f)
else:
_tick_functions.append(f)
def _tick():
for f in _tick_functions:
f()
vx.my_vx = _tick
vx.register_tick_function = _register_tick_function
vx.files = sys.argv[1:]
import utils
import scheduler
import keybindings
import windows
import prompt
def _default_start():
if len(vx.files) == 0:
win = vx.window(vx.rows, vx.cols, 0, 0)
win.blank()
win.focus()
else:
d = math.floor(vx.rows / (len(vx.files)))
y = 0
for f in vx.files:
win = vx.window(d, vx.cols, y, 0)
win.attach_file(f)
y += d
win.focus()
vx.default_start = _default_start
sys.path.append(os.path.expanduser('~/.python'))
import rc
| import vx
import math
- from sys import argv
+ import os
+ import sys
_tick_functions = []
def _register_tick_function(f, front=False):
if front:
_tick_functions.insert(0, f)
else:
_tick_functions.append(f)
def _tick():
for f in _tick_functions:
f()
vx.my_vx = _tick
vx.register_tick_function = _register_tick_function
- vx.files = argv[1:]
+ vx.files = sys.argv[1:]
? ++++
import utils
import scheduler
import keybindings
import windows
import prompt
def _default_start():
if len(vx.files) == 0:
win = vx.window(vx.rows, vx.cols, 0, 0)
win.blank()
win.focus()
else:
d = math.floor(vx.rows / (len(vx.files)))
y = 0
for f in vx.files:
win = vx.window(d, vx.cols, y, 0)
win.attach_file(f)
y += d
win.focus()
vx.default_start = _default_start
+
+ sys.path.append(os.path.expanduser('~/.python'))
+ import rc |
ef102617e5d73b32c43e4e9422a19917a1d3d717 | molo/polls/wagtail_hooks.py | molo/polls/wagtail_hooks.py | from django.conf.urls import url
from molo.polls.admin import QuestionsModelAdmin
from molo.polls.admin_views import QuestionResultsAdminView
from wagtail.wagtailcore import hooks
from wagtail.contrib.modeladmin.options import modeladmin_register
from django.contrib.auth.models import User
@hooks.register('register_admin_urls')
def register_question_results_admin_view_url():
return [
url(r'polls/question/(?P<parent>\d+)/results/$',
QuestionResultsAdminView.as_view(),
name='question-results-admin'),
]
modeladmin_register(QuestionsModelAdmin)
@hooks.register('construct_main_menu')
def show_polls_entries_for_users_have_access(request, menu_items):
if not request.user.is_superuser and not User.objects.filter(
pk=request.user.pk, groups__name='Moderators').exists():
menu_items[:] = [
item for item in menu_items if item.name != 'polls']
| from django.conf.urls import url
from molo.polls.admin import QuestionsModelAdmin
from molo.polls.admin_views import QuestionResultsAdminView
from wagtail.wagtailcore import hooks
from wagtail.contrib.modeladmin.options import modeladmin_register
from django.contrib.auth.models import User
@hooks.register('register_admin_urls')
def register_question_results_admin_view_url():
return [
url(r'polls/question/(?P<parent>\d+)/results/$',
QuestionResultsAdminView.as_view(),
name='question-results-admin'),
]
modeladmin_register(QuestionsModelAdmin)
@hooks.register('construct_main_menu')
def show_polls_entries_for_users_have_access(request, menu_items):
if not request.user.is_superuser and not User.objects.filter(
pk=request.user.pk, groups__name='Moderators').exists()\
and not User.objects.filter(
pk=request.user.pk, groups__name='M&E Expert').exists():
menu_items[:] = [
item for item in menu_items if item.name != 'polls']
| Add M&E Expert to polls entries permissions | Add M&E Expert to polls entries permissions
| Python | bsd-2-clause | praekelt/molo.polls,praekelt/molo.polls | from django.conf.urls import url
from molo.polls.admin import QuestionsModelAdmin
from molo.polls.admin_views import QuestionResultsAdminView
from wagtail.wagtailcore import hooks
from wagtail.contrib.modeladmin.options import modeladmin_register
from django.contrib.auth.models import User
@hooks.register('register_admin_urls')
def register_question_results_admin_view_url():
return [
url(r'polls/question/(?P<parent>\d+)/results/$',
QuestionResultsAdminView.as_view(),
name='question-results-admin'),
]
modeladmin_register(QuestionsModelAdmin)
@hooks.register('construct_main_menu')
def show_polls_entries_for_users_have_access(request, menu_items):
if not request.user.is_superuser and not User.objects.filter(
- pk=request.user.pk, groups__name='Moderators').exists():
+ pk=request.user.pk, groups__name='Moderators').exists()\
+ and not User.objects.filter(
+ pk=request.user.pk, groups__name='M&E Expert').exists():
menu_items[:] = [
item for item in menu_items if item.name != 'polls']
| Add M&E Expert to polls entries permissions | ## Code Before:
from django.conf.urls import url
from molo.polls.admin import QuestionsModelAdmin
from molo.polls.admin_views import QuestionResultsAdminView
from wagtail.wagtailcore import hooks
from wagtail.contrib.modeladmin.options import modeladmin_register
from django.contrib.auth.models import User
@hooks.register('register_admin_urls')
def register_question_results_admin_view_url():
return [
url(r'polls/question/(?P<parent>\d+)/results/$',
QuestionResultsAdminView.as_view(),
name='question-results-admin'),
]
modeladmin_register(QuestionsModelAdmin)
@hooks.register('construct_main_menu')
def show_polls_entries_for_users_have_access(request, menu_items):
if not request.user.is_superuser and not User.objects.filter(
pk=request.user.pk, groups__name='Moderators').exists():
menu_items[:] = [
item for item in menu_items if item.name != 'polls']
## Instruction:
Add M&E Expert to polls entries permissions
## Code After:
from django.conf.urls import url
from molo.polls.admin import QuestionsModelAdmin
from molo.polls.admin_views import QuestionResultsAdminView
from wagtail.wagtailcore import hooks
from wagtail.contrib.modeladmin.options import modeladmin_register
from django.contrib.auth.models import User
@hooks.register('register_admin_urls')
def register_question_results_admin_view_url():
return [
url(r'polls/question/(?P<parent>\d+)/results/$',
QuestionResultsAdminView.as_view(),
name='question-results-admin'),
]
modeladmin_register(QuestionsModelAdmin)
@hooks.register('construct_main_menu')
def show_polls_entries_for_users_have_access(request, menu_items):
if not request.user.is_superuser and not User.objects.filter(
pk=request.user.pk, groups__name='Moderators').exists()\
and not User.objects.filter(
pk=request.user.pk, groups__name='M&E Expert').exists():
menu_items[:] = [
item for item in menu_items if item.name != 'polls']
| from django.conf.urls import url
from molo.polls.admin import QuestionsModelAdmin
from molo.polls.admin_views import QuestionResultsAdminView
from wagtail.wagtailcore import hooks
from wagtail.contrib.modeladmin.options import modeladmin_register
from django.contrib.auth.models import User
@hooks.register('register_admin_urls')
def register_question_results_admin_view_url():
return [
url(r'polls/question/(?P<parent>\d+)/results/$',
QuestionResultsAdminView.as_view(),
name='question-results-admin'),
]
modeladmin_register(QuestionsModelAdmin)
@hooks.register('construct_main_menu')
def show_polls_entries_for_users_have_access(request, menu_items):
if not request.user.is_superuser and not User.objects.filter(
- pk=request.user.pk, groups__name='Moderators').exists():
? ^
+ pk=request.user.pk, groups__name='Moderators').exists()\
? ^
+ and not User.objects.filter(
+ pk=request.user.pk, groups__name='M&E Expert').exists():
menu_items[:] = [
item for item in menu_items if item.name != 'polls'] |
59b59e75f87942dfd54f8542b04e4185a871cf4b | utils/messaging.py | utils/messaging.py |
def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'):
'Chop a string into even chunks of max_length around the given separator'
max_size = max_length - len(pref) - len(aff)
str_length = len(string)
if str_length <= max_size:
return [pref + string + aff]
else:
split = string.rfind(sep, 0, max_size) + 1
if split:
return ([pref + string[:split] + aff]
+ paginate(string[split:], pref, aff, max_length, sep))
else:
return ([pref + string[:max_size] + aff]
+ paginate(string[max_size:], pref, aff, max_length, sep))
async def notify_owner(bot, messages):
'Send message to the private channel of the owner'
channel = await bot.get_user_info(bot.config.get('owner_id'))
for message in messages:
await bot.send_message(channel, message)
|
def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'):
'Chop a string into even chunks of max_length around the given separator'
max_size = max_length - len(pref) - len(aff)
str_length = len(string)
if str_length <= max_size:
return [pref + string + aff]
else:
split = string.rfind(sep, 0, max_size) + 1
if split:
return ([pref + string[:split] + aff]
+ paginate(string[split:], pref, aff, max_length, sep))
else:
return ([pref + string[:max_size] + aff]
+ paginate(string[max_size:], pref, aff, max_length, sep))
async def notify_owner(bot, messages):
'Send message to the private channel of the owner'
channel = await bot.get_user_info(bot.config.get('owner_id'))
for message in messages:
await bot.send_message(channel, message)
async def message_input(ctx, prompt, timeout=60):
message = await ctx.bot.say(prompt)
password = await ctx.bot.wait_for_message(
timeout=timeout,
author=ctx.message.author,
channel=ctx.message.channel)
if not password:
await ctx.bot.edit_message(
message,
new_content='Timed out, cancelling.')
return password
| Add util function for accepting input by PM | Add util function for accepting input by PM
| Python | mit | randomic/antinub-gregbot |
def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'):
'Chop a string into even chunks of max_length around the given separator'
max_size = max_length - len(pref) - len(aff)
str_length = len(string)
if str_length <= max_size:
return [pref + string + aff]
else:
split = string.rfind(sep, 0, max_size) + 1
if split:
return ([pref + string[:split] + aff]
+ paginate(string[split:], pref, aff, max_length, sep))
else:
return ([pref + string[:max_size] + aff]
+ paginate(string[max_size:], pref, aff, max_length, sep))
async def notify_owner(bot, messages):
'Send message to the private channel of the owner'
channel = await bot.get_user_info(bot.config.get('owner_id'))
for message in messages:
await bot.send_message(channel, message)
+
+ async def message_input(ctx, prompt, timeout=60):
+ message = await ctx.bot.say(prompt)
+ password = await ctx.bot.wait_for_message(
+ timeout=timeout,
+ author=ctx.message.author,
+ channel=ctx.message.channel)
+ if not password:
+ await ctx.bot.edit_message(
+ message,
+ new_content='Timed out, cancelling.')
+ return password
+ | Add util function for accepting input by PM | ## Code Before:
def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'):
'Chop a string into even chunks of max_length around the given separator'
max_size = max_length - len(pref) - len(aff)
str_length = len(string)
if str_length <= max_size:
return [pref + string + aff]
else:
split = string.rfind(sep, 0, max_size) + 1
if split:
return ([pref + string[:split] + aff]
+ paginate(string[split:], pref, aff, max_length, sep))
else:
return ([pref + string[:max_size] + aff]
+ paginate(string[max_size:], pref, aff, max_length, sep))
async def notify_owner(bot, messages):
'Send message to the private channel of the owner'
channel = await bot.get_user_info(bot.config.get('owner_id'))
for message in messages:
await bot.send_message(channel, message)
## Instruction:
Add util function for accepting input by PM
## Code After:
def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'):
'Chop a string into even chunks of max_length around the given separator'
max_size = max_length - len(pref) - len(aff)
str_length = len(string)
if str_length <= max_size:
return [pref + string + aff]
else:
split = string.rfind(sep, 0, max_size) + 1
if split:
return ([pref + string[:split] + aff]
+ paginate(string[split:], pref, aff, max_length, sep))
else:
return ([pref + string[:max_size] + aff]
+ paginate(string[max_size:], pref, aff, max_length, sep))
async def notify_owner(bot, messages):
'Send message to the private channel of the owner'
channel = await bot.get_user_info(bot.config.get('owner_id'))
for message in messages:
await bot.send_message(channel, message)
async def message_input(ctx, prompt, timeout=60):
message = await ctx.bot.say(prompt)
password = await ctx.bot.wait_for_message(
timeout=timeout,
author=ctx.message.author,
channel=ctx.message.channel)
if not password:
await ctx.bot.edit_message(
message,
new_content='Timed out, cancelling.')
return password
|
def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'):
'Chop a string into even chunks of max_length around the given separator'
max_size = max_length - len(pref) - len(aff)
str_length = len(string)
if str_length <= max_size:
return [pref + string + aff]
else:
split = string.rfind(sep, 0, max_size) + 1
if split:
return ([pref + string[:split] + aff]
+ paginate(string[split:], pref, aff, max_length, sep))
else:
return ([pref + string[:max_size] + aff]
+ paginate(string[max_size:], pref, aff, max_length, sep))
async def notify_owner(bot, messages):
'Send message to the private channel of the owner'
channel = await bot.get_user_info(bot.config.get('owner_id'))
for message in messages:
await bot.send_message(channel, message)
+
+
+ async def message_input(ctx, prompt, timeout=60):
+ message = await ctx.bot.say(prompt)
+ password = await ctx.bot.wait_for_message(
+ timeout=timeout,
+ author=ctx.message.author,
+ channel=ctx.message.channel)
+ if not password:
+ await ctx.bot.edit_message(
+ message,
+ new_content='Timed out, cancelling.')
+ return password |
5ff58311b6cf2dc8ad03351e818d05fca9e33e1b | hastexo/migrations/0010_add_user_foreign_key.py | hastexo/migrations/0010_add_user_foreign_key.py | from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models
import django.db.models.deletion
import logging
logger = logging.getLogger(__name__)
class Migration(migrations.Migration):
def backfill_learner(apps, schema_editor):
"""
Use the 'student_id' to link stacks to the User model.
"""
Stack = apps.get_model("hastexo", "Stack")
AnonymousUserId = apps.get_model("student", "AnonymousUserId")
for stack in Stack.objects.all():
try:
stack.learner = AnonymousUserId.objects.get(
anonymous_user_id=stack.student_id).user
stack.save(update_fields=['learner'])
except ObjectDoesNotExist:
logger.warning('Unable to link stack to user: '
f'{stack.name}')
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hastexo', '0009_add_null_true_for_key_and_password'),
]
operations = [
migrations.AddField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
migrations.RunPython(backfill_learner),
]
| from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models
import django.db.models.deletion
import logging
logger = logging.getLogger(__name__)
class Migration(migrations.Migration):
def backfill_learner(apps, schema_editor):
"""
Use the 'student_id' to link stacks to the User model.
"""
Stack = apps.get_model("hastexo", "Stack")
AnonymousUserId = apps.get_model("student", "AnonymousUserId")
for stack in Stack.objects.all():
try:
stack.learner = AnonymousUserId.objects.get(
anonymous_user_id=stack.student_id).user
stack.save(update_fields=['learner'])
except ObjectDoesNotExist:
logger.warning('Unable to link stack to user: '
f'{stack.name}')
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hastexo', '0009_add_null_true_for_key_and_password'),
]
operations = [
migrations.AddField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
migrations.RunPython(backfill_learner),
migrations.AlterField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
]
| Apply additional fix to add_user_foreign_key migration | Apply additional fix to add_user_foreign_key migration
The hack in 583fb729b1e201c830579345dca5beca4b131006 modified
0010_add_user_foreign_key in such a way that it ended up *not* setting
a database constraint when it should have.
Enable the database-enforced constraint in the right place.
Co-authored-by: Florian Haas <73262ad0334ab37227b2f7a0205f51db1e606681@cleura.com>
| Python | agpl-3.0 | hastexo/hastexo-xblock,hastexo/hastexo-xblock,hastexo/hastexo-xblock,hastexo/hastexo-xblock | from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models
import django.db.models.deletion
import logging
logger = logging.getLogger(__name__)
class Migration(migrations.Migration):
def backfill_learner(apps, schema_editor):
"""
Use the 'student_id' to link stacks to the User model.
"""
Stack = apps.get_model("hastexo", "Stack")
AnonymousUserId = apps.get_model("student", "AnonymousUserId")
for stack in Stack.objects.all():
try:
stack.learner = AnonymousUserId.objects.get(
anonymous_user_id=stack.student_id).user
stack.save(update_fields=['learner'])
except ObjectDoesNotExist:
logger.warning('Unable to link stack to user: '
f'{stack.name}')
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hastexo', '0009_add_null_true_for_key_and_password'),
]
operations = [
migrations.AddField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
migrations.RunPython(backfill_learner),
+ migrations.AlterField(
+ model_name='stack',
+ name='learner',
+ field=models.ForeignKey(
+ db_constraint=True,
+ null=True,
+ on_delete=django.db.models.deletion.PROTECT,
+ to=settings.AUTH_USER_MODEL),
+ ),
]
| Apply additional fix to add_user_foreign_key migration | ## Code Before:
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models
import django.db.models.deletion
import logging
logger = logging.getLogger(__name__)
class Migration(migrations.Migration):
def backfill_learner(apps, schema_editor):
"""
Use the 'student_id' to link stacks to the User model.
"""
Stack = apps.get_model("hastexo", "Stack")
AnonymousUserId = apps.get_model("student", "AnonymousUserId")
for stack in Stack.objects.all():
try:
stack.learner = AnonymousUserId.objects.get(
anonymous_user_id=stack.student_id).user
stack.save(update_fields=['learner'])
except ObjectDoesNotExist:
logger.warning('Unable to link stack to user: '
f'{stack.name}')
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hastexo', '0009_add_null_true_for_key_and_password'),
]
operations = [
migrations.AddField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
migrations.RunPython(backfill_learner),
]
## Instruction:
Apply additional fix to add_user_foreign_key migration
## Code After:
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models
import django.db.models.deletion
import logging
logger = logging.getLogger(__name__)
class Migration(migrations.Migration):
def backfill_learner(apps, schema_editor):
"""
Use the 'student_id' to link stacks to the User model.
"""
Stack = apps.get_model("hastexo", "Stack")
AnonymousUserId = apps.get_model("student", "AnonymousUserId")
for stack in Stack.objects.all():
try:
stack.learner = AnonymousUserId.objects.get(
anonymous_user_id=stack.student_id).user
stack.save(update_fields=['learner'])
except ObjectDoesNotExist:
logger.warning('Unable to link stack to user: '
f'{stack.name}')
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hastexo', '0009_add_null_true_for_key_and_password'),
]
operations = [
migrations.AddField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
migrations.RunPython(backfill_learner),
migrations.AlterField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
]
| from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models
import django.db.models.deletion
import logging
logger = logging.getLogger(__name__)
class Migration(migrations.Migration):
def backfill_learner(apps, schema_editor):
"""
Use the 'student_id' to link stacks to the User model.
"""
Stack = apps.get_model("hastexo", "Stack")
AnonymousUserId = apps.get_model("student", "AnonymousUserId")
for stack in Stack.objects.all():
try:
stack.learner = AnonymousUserId.objects.get(
anonymous_user_id=stack.student_id).user
stack.save(update_fields=['learner'])
except ObjectDoesNotExist:
logger.warning('Unable to link stack to user: '
f'{stack.name}')
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hastexo', '0009_add_null_true_for_key_and_password'),
]
operations = [
migrations.AddField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
migrations.RunPython(backfill_learner),
+ migrations.AlterField(
+ model_name='stack',
+ name='learner',
+ field=models.ForeignKey(
+ db_constraint=True,
+ null=True,
+ on_delete=django.db.models.deletion.PROTECT,
+ to=settings.AUTH_USER_MODEL),
+ ),
] |
155b1e6b8d431f1169a3e71d08d93d76a3414c59 | turbustat/statistics/vca_vcs/slice_thickness.py | turbustat/statistics/vca_vcs/slice_thickness.py |
import numpy as np
def change_slice_thickness(cube, slice_thickness=1.0):
'''
Degrades the velocity resolution of a data cube. This is to avoid
shot noise by removing velocity fluctuations at small thicknesses.
Parameters
----------
cube : numpy.ndarray
3D data cube to degrade
slice_thickness : float, optional
Thicknesses of the new slices. Minimum is 1.0
Thickness must be integer multiple of the original cube size
Returns
-------
degraded_cube : numpy.ndarray
Data cube degraded to new slice thickness
'''
assert isinstance(slice_thickness, float)
if slice_thickness < 1:
slice_thickness == 1
print "Slice Thickness must be at least 1.0. Returning original cube."
if slice_thickness == 1:
return cube
if cube.shape[0] % slice_thickness != 0:
raise TypeError("Slice thickness must be integer multiple of dimension"
" size % s" % (cube.shape[0]))
slice_thickness = int(slice_thickness)
# Want to average over velocity channels
new_channel_indices = np.arange(0, cube.shape[0] / slice_thickness)
degraded_cube = np.ones(
(cube.shape[0] / slice_thickness, cube.shape[1], cube.shape[2]))
for channel in new_channel_indices:
old_index = int(channel * slice_thickness)
channel = int(channel)
degraded_cube[channel, :, :] = \
np.nanmean(cube[old_index:old_index + slice_thickness], axis=0)
return degraded_cube
|
import numpy as np
from astropy import units as u
from spectral_cube import SpectralCube
from astropy.convolution import Gaussian1DKernel
def spectral_regrid_cube(cube, channel_width):
fwhm_factor = np.sqrt(8 * np.log(2))
current_resolution = np.diff(cube.spectral_axis[:2])[0]
target_resolution = channel_width.to(current_resolution.unit)
diff_factor = np.abs(target_resolution / current_resolution).value
pixel_scale = np.abs(current_resolution)
gaussian_width = ((target_resolution**2 - current_resolution**2)**0.5 /
pixel_scale / fwhm_factor)
kernel = Gaussian1DKernel(gaussian_width)
new_cube = cube.spectral_smooth(kernel)
# Now define the new spectral axis at the new resolution
num_chan = int(np.floor_divide(cube.shape[0], diff_factor))
new_specaxis = np.linspace(cube.spectral_axis.min().value,
cube.spectral_axis.max().value,
num_chan) * current_resolution.unit
# Keep the same order (max to min or min to max)
if current_resolution.value < 0:
new_specaxis = new_specaxis[::-1]
return new_cube.spectral_interpolate(new_specaxis,
suppress_smooth_warning=True)
| Add a corrected spectral regridding function that smooths before interpolating to a new spectral axis | Add a corrected spectral regridding function that smooths before interpolating to a new spectral axis
| Python | mit | e-koch/TurbuStat,Astroua/TurbuStat |
import numpy as np
+ from astropy import units as u
+ from spectral_cube import SpectralCube
+ from astropy.convolution import Gaussian1DKernel
+ def spectral_regrid_cube(cube, channel_width):
- def change_slice_thickness(cube, slice_thickness=1.0):
- '''
- Degrades the velocity resolution of a data cube. This is to avoid
- shot noise by removing velocity fluctuations at small thicknesses.
+ fwhm_factor = np.sqrt(8 * np.log(2))
+ current_resolution = np.diff(cube.spectral_axis[:2])[0]
+ target_resolution = channel_width.to(current_resolution.unit)
+ diff_factor = np.abs(target_resolution / current_resolution).value
+ pixel_scale = np.abs(current_resolution)
- Parameters
- ----------
- cube : numpy.ndarray
- 3D data cube to degrade
- slice_thickness : float, optional
- Thicknesses of the new slices. Minimum is 1.0
- Thickness must be integer multiple of the original cube size
+ gaussian_width = ((target_resolution**2 - current_resolution**2)**0.5 /
+ pixel_scale / fwhm_factor)
+ kernel = Gaussian1DKernel(gaussian_width)
+ new_cube = cube.spectral_smooth(kernel)
- Returns
- -------
- degraded_cube : numpy.ndarray
- Data cube degraded to new slice thickness
- '''
- assert isinstance(slice_thickness, float)
- if slice_thickness < 1:
- slice_thickness == 1
- print "Slice Thickness must be at least 1.0. Returning original cube."
+ # Now define the new spectral axis at the new resolution
+ num_chan = int(np.floor_divide(cube.shape[0], diff_factor))
+ new_specaxis = np.linspace(cube.spectral_axis.min().value,
+ cube.spectral_axis.max().value,
+ num_chan) * current_resolution.unit
+ # Keep the same order (max to min or min to max)
+ if current_resolution.value < 0:
+ new_specaxis = new_specaxis[::-1]
- if slice_thickness == 1:
- return cube
+ return new_cube.spectral_interpolate(new_specaxis,
+ suppress_smooth_warning=True)
- if cube.shape[0] % slice_thickness != 0:
- raise TypeError("Slice thickness must be integer multiple of dimension"
- " size % s" % (cube.shape[0]))
-
- slice_thickness = int(slice_thickness)
-
- # Want to average over velocity channels
- new_channel_indices = np.arange(0, cube.shape[0] / slice_thickness)
- degraded_cube = np.ones(
- (cube.shape[0] / slice_thickness, cube.shape[1], cube.shape[2]))
-
- for channel in new_channel_indices:
- old_index = int(channel * slice_thickness)
- channel = int(channel)
- degraded_cube[channel, :, :] = \
- np.nanmean(cube[old_index:old_index + slice_thickness], axis=0)
-
- return degraded_cube
- | Add a corrected spectral regridding function that smooths before interpolating to a new spectral axis | ## Code Before:
import numpy as np
def change_slice_thickness(cube, slice_thickness=1.0):
'''
Degrades the velocity resolution of a data cube. This is to avoid
shot noise by removing velocity fluctuations at small thicknesses.
Parameters
----------
cube : numpy.ndarray
3D data cube to degrade
slice_thickness : float, optional
Thicknesses of the new slices. Minimum is 1.0
Thickness must be integer multiple of the original cube size
Returns
-------
degraded_cube : numpy.ndarray
Data cube degraded to new slice thickness
'''
assert isinstance(slice_thickness, float)
if slice_thickness < 1:
slice_thickness == 1
print "Slice Thickness must be at least 1.0. Returning original cube."
if slice_thickness == 1:
return cube
if cube.shape[0] % slice_thickness != 0:
raise TypeError("Slice thickness must be integer multiple of dimension"
" size % s" % (cube.shape[0]))
slice_thickness = int(slice_thickness)
# Want to average over velocity channels
new_channel_indices = np.arange(0, cube.shape[0] / slice_thickness)
degraded_cube = np.ones(
(cube.shape[0] / slice_thickness, cube.shape[1], cube.shape[2]))
for channel in new_channel_indices:
old_index = int(channel * slice_thickness)
channel = int(channel)
degraded_cube[channel, :, :] = \
np.nanmean(cube[old_index:old_index + slice_thickness], axis=0)
return degraded_cube
## Instruction:
Add a corrected spectral regridding function that smooths before interpolating to a new spectral axis
## Code After:
import numpy as np
from astropy import units as u
from spectral_cube import SpectralCube
from astropy.convolution import Gaussian1DKernel
def spectral_regrid_cube(cube, channel_width):
fwhm_factor = np.sqrt(8 * np.log(2))
current_resolution = np.diff(cube.spectral_axis[:2])[0]
target_resolution = channel_width.to(current_resolution.unit)
diff_factor = np.abs(target_resolution / current_resolution).value
pixel_scale = np.abs(current_resolution)
gaussian_width = ((target_resolution**2 - current_resolution**2)**0.5 /
pixel_scale / fwhm_factor)
kernel = Gaussian1DKernel(gaussian_width)
new_cube = cube.spectral_smooth(kernel)
# Now define the new spectral axis at the new resolution
num_chan = int(np.floor_divide(cube.shape[0], diff_factor))
new_specaxis = np.linspace(cube.spectral_axis.min().value,
cube.spectral_axis.max().value,
num_chan) * current_resolution.unit
# Keep the same order (max to min or min to max)
if current_resolution.value < 0:
new_specaxis = new_specaxis[::-1]
return new_cube.spectral_interpolate(new_specaxis,
suppress_smooth_warning=True)
|
import numpy as np
+ from astropy import units as u
+ from spectral_cube import SpectralCube
+ from astropy.convolution import Gaussian1DKernel
+ def spectral_regrid_cube(cube, channel_width):
- def change_slice_thickness(cube, slice_thickness=1.0):
- '''
- Degrades the velocity resolution of a data cube. This is to avoid
- shot noise by removing velocity fluctuations at small thicknesses.
+ fwhm_factor = np.sqrt(8 * np.log(2))
+ current_resolution = np.diff(cube.spectral_axis[:2])[0]
+ target_resolution = channel_width.to(current_resolution.unit)
+ diff_factor = np.abs(target_resolution / current_resolution).value
+ pixel_scale = np.abs(current_resolution)
- Parameters
- ----------
- cube : numpy.ndarray
- 3D data cube to degrade
- slice_thickness : float, optional
- Thicknesses of the new slices. Minimum is 1.0
- Thickness must be integer multiple of the original cube size
+ gaussian_width = ((target_resolution**2 - current_resolution**2)**0.5 /
+ pixel_scale / fwhm_factor)
+ kernel = Gaussian1DKernel(gaussian_width)
+ new_cube = cube.spectral_smooth(kernel)
- Returns
- -------
- degraded_cube : numpy.ndarray
- Data cube degraded to new slice thickness
- '''
- assert isinstance(slice_thickness, float)
- if slice_thickness < 1:
- slice_thickness == 1
- print "Slice Thickness must be at least 1.0. Returning original cube."
+ # Now define the new spectral axis at the new resolution
+ num_chan = int(np.floor_divide(cube.shape[0], diff_factor))
+ new_specaxis = np.linspace(cube.spectral_axis.min().value,
+ cube.spectral_axis.max().value,
+ num_chan) * current_resolution.unit
+ # Keep the same order (max to min or min to max)
+ if current_resolution.value < 0:
+ new_specaxis = new_specaxis[::-1]
+ return new_cube.spectral_interpolate(new_specaxis,
+ suppress_smooth_warning=True)
- if slice_thickness == 1:
- return cube
-
- if cube.shape[0] % slice_thickness != 0:
- raise TypeError("Slice thickness must be integer multiple of dimension"
- " size % s" % (cube.shape[0]))
-
- slice_thickness = int(slice_thickness)
-
- # Want to average over velocity channels
- new_channel_indices = np.arange(0, cube.shape[0] / slice_thickness)
- degraded_cube = np.ones(
- (cube.shape[0] / slice_thickness, cube.shape[1], cube.shape[2]))
-
- for channel in new_channel_indices:
- old_index = int(channel * slice_thickness)
- channel = int(channel)
- degraded_cube[channel, :, :] = \
- np.nanmean(cube[old_index:old_index + slice_thickness], axis=0)
-
- return degraded_cube |
cc841cc1020ca4df6f303fbb05e497a7c69c92f0 | akvo/rsr/migrations/0087_auto_20161110_0920.py | akvo/rsr/migrations/0087_auto_20161110_0920.py | from __future__ import unicode_literals
from django.db import migrations
def fix_employment_groups(apps, schema_editor):
# We can't import the Employment or Group model directly as it may be a
# newer version than this migration expects. We use the historical version.
Group = apps.get_model("auth", "Group")
Employment = apps.get_model("rsr", "Employment")
for employment in Employment.objects.filter(group=None):
employment.group = Group.objects.get(name='Users')
employment.save()
class Migration(migrations.Migration):
dependencies = [
('rsr', '0086_auto_20160921_0947'),
]
operations = [
migrations.RunPython(fix_employment_groups),
]
| from __future__ import unicode_literals
from django.db import migrations
def fix_employment_groups(apps, schema_editor):
# We can't import the Employment or Group model directly as it may be a
# newer version than this migration expects. We use the historical version.
Group = apps.get_model("auth", "Group")
Employment = apps.get_model("rsr", "Employment")
for employment in Employment.objects.filter(group=None):
try:
employment.group = Group.objects.get(name='Users')
employment.save()
except Exception as e:
print(e)
class Migration(migrations.Migration):
dependencies = [
('rsr', '0086_auto_20160921_0947'),
]
operations = [
migrations.RunPython(fix_employment_groups),
]
| Fix broken migration with try-except blocks | Fix broken migration with try-except blocks
Duplicate key errors were being caused if an employment similar to the
one being created by the migration already existed.
| Python | agpl-3.0 | akvo/akvo-rsr,akvo/akvo-rsr,akvo/akvo-rsr,akvo/akvo-rsr | from __future__ import unicode_literals
from django.db import migrations
def fix_employment_groups(apps, schema_editor):
# We can't import the Employment or Group model directly as it may be a
# newer version than this migration expects. We use the historical version.
Group = apps.get_model("auth", "Group")
Employment = apps.get_model("rsr", "Employment")
for employment in Employment.objects.filter(group=None):
+ try:
- employment.group = Group.objects.get(name='Users')
+ employment.group = Group.objects.get(name='Users')
- employment.save()
+ employment.save()
+ except Exception as e:
+ print(e)
class Migration(migrations.Migration):
dependencies = [
('rsr', '0086_auto_20160921_0947'),
]
operations = [
migrations.RunPython(fix_employment_groups),
]
| Fix broken migration with try-except blocks | ## Code Before:
from __future__ import unicode_literals
from django.db import migrations
def fix_employment_groups(apps, schema_editor):
# We can't import the Employment or Group model directly as it may be a
# newer version than this migration expects. We use the historical version.
Group = apps.get_model("auth", "Group")
Employment = apps.get_model("rsr", "Employment")
for employment in Employment.objects.filter(group=None):
employment.group = Group.objects.get(name='Users')
employment.save()
class Migration(migrations.Migration):
dependencies = [
('rsr', '0086_auto_20160921_0947'),
]
operations = [
migrations.RunPython(fix_employment_groups),
]
## Instruction:
Fix broken migration with try-except blocks
## Code After:
from __future__ import unicode_literals
from django.db import migrations
def fix_employment_groups(apps, schema_editor):
# We can't import the Employment or Group model directly as it may be a
# newer version than this migration expects. We use the historical version.
Group = apps.get_model("auth", "Group")
Employment = apps.get_model("rsr", "Employment")
for employment in Employment.objects.filter(group=None):
try:
employment.group = Group.objects.get(name='Users')
employment.save()
except Exception as e:
print(e)
class Migration(migrations.Migration):
dependencies = [
('rsr', '0086_auto_20160921_0947'),
]
operations = [
migrations.RunPython(fix_employment_groups),
]
| from __future__ import unicode_literals
from django.db import migrations
def fix_employment_groups(apps, schema_editor):
# We can't import the Employment or Group model directly as it may be a
# newer version than this migration expects. We use the historical version.
Group = apps.get_model("auth", "Group")
Employment = apps.get_model("rsr", "Employment")
for employment in Employment.objects.filter(group=None):
+ try:
- employment.group = Group.objects.get(name='Users')
+ employment.group = Group.objects.get(name='Users')
? ++++
- employment.save()
+ employment.save()
? ++++
+ except Exception as e:
+ print(e)
class Migration(migrations.Migration):
dependencies = [
('rsr', '0086_auto_20160921_0947'),
]
operations = [
migrations.RunPython(fix_employment_groups),
] |
5e67e16d17d06a0f4d307a035ca6b62f094995c6 | network/api/serializers.py | network/api/serializers.py | from rest_framework import serializers
from network.base.models import Data
class DataSerializer(serializers.ModelSerializer):
class Meta:
model = Data
fields = ('id', 'start', 'end', 'observation', 'ground_station', 'payload')
read_only_fields = ['id', 'start', 'end', 'observation', 'ground_station']
class JobSerializer(serializers.ModelSerializer):
frequency = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Data
fields = ('id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2',
'frequency')
def get_frequency(self, obj):
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
return obj.observation.satellite.tle0
def get_tle1(self, obj):
return obj.observation.satellite.tle1
def get_tle2(self, obj):
return obj.observation.satellite.tle2
| from rest_framework import serializers
from network.base.models import Data
class DataSerializer(serializers.ModelSerializer):
class Meta:
model = Data
fields = ('id', 'start', 'end', 'observation', 'ground_station', 'payload')
read_only_fields = ['id', 'start', 'end', 'observation', 'ground_station']
class JobSerializer(serializers.ModelSerializer):
frequency = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Data
fields = ('id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2',
'frequency')
def get_frequency(self, obj):
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
return obj.observation.tle.tle0
def get_tle1(self, obj):
return obj.observation.tle.tle1
def get_tle2(self, obj):
return obj.observation.tle.tle2
| Adjust API to TLE code changes | Adjust API to TLE code changes
| Python | agpl-3.0 | cshields/satnogs-network,cshields/satnogs-network,cshields/satnogs-network,cshields/satnogs-network | from rest_framework import serializers
from network.base.models import Data
class DataSerializer(serializers.ModelSerializer):
class Meta:
model = Data
fields = ('id', 'start', 'end', 'observation', 'ground_station', 'payload')
read_only_fields = ['id', 'start', 'end', 'observation', 'ground_station']
class JobSerializer(serializers.ModelSerializer):
frequency = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Data
fields = ('id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2',
'frequency')
def get_frequency(self, obj):
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
- return obj.observation.satellite.tle0
+ return obj.observation.tle.tle0
def get_tle1(self, obj):
- return obj.observation.satellite.tle1
+ return obj.observation.tle.tle1
def get_tle2(self, obj):
- return obj.observation.satellite.tle2
+ return obj.observation.tle.tle2
| Adjust API to TLE code changes | ## Code Before:
from rest_framework import serializers
from network.base.models import Data
class DataSerializer(serializers.ModelSerializer):
class Meta:
model = Data
fields = ('id', 'start', 'end', 'observation', 'ground_station', 'payload')
read_only_fields = ['id', 'start', 'end', 'observation', 'ground_station']
class JobSerializer(serializers.ModelSerializer):
frequency = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Data
fields = ('id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2',
'frequency')
def get_frequency(self, obj):
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
return obj.observation.satellite.tle0
def get_tle1(self, obj):
return obj.observation.satellite.tle1
def get_tle2(self, obj):
return obj.observation.satellite.tle2
## Instruction:
Adjust API to TLE code changes
## Code After:
from rest_framework import serializers
from network.base.models import Data
class DataSerializer(serializers.ModelSerializer):
class Meta:
model = Data
fields = ('id', 'start', 'end', 'observation', 'ground_station', 'payload')
read_only_fields = ['id', 'start', 'end', 'observation', 'ground_station']
class JobSerializer(serializers.ModelSerializer):
frequency = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Data
fields = ('id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2',
'frequency')
def get_frequency(self, obj):
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
return obj.observation.tle.tle0
def get_tle1(self, obj):
return obj.observation.tle.tle1
def get_tle2(self, obj):
return obj.observation.tle.tle2
| from rest_framework import serializers
from network.base.models import Data
class DataSerializer(serializers.ModelSerializer):
class Meta:
model = Data
fields = ('id', 'start', 'end', 'observation', 'ground_station', 'payload')
read_only_fields = ['id', 'start', 'end', 'observation', 'ground_station']
class JobSerializer(serializers.ModelSerializer):
frequency = serializers.SerializerMethodField()
tle0 = serializers.SerializerMethodField()
tle1 = serializers.SerializerMethodField()
tle2 = serializers.SerializerMethodField()
class Meta:
model = Data
fields = ('id', 'start', 'end', 'ground_station', 'tle0', 'tle1', 'tle2',
'frequency')
def get_frequency(self, obj):
return obj.observation.transmitter.downlink_low
def get_tle0(self, obj):
- return obj.observation.satellite.tle0
? -- - ---
+ return obj.observation.tle.tle0
def get_tle1(self, obj):
- return obj.observation.satellite.tle1
? -- - ---
+ return obj.observation.tle.tle1
def get_tle2(self, obj):
- return obj.observation.satellite.tle2
? -- - ---
+ return obj.observation.tle.tle2 |
c162514291428f26dc78d08393455ff33fe12f12 | requests_test.py | requests_test.py | import requests
from requests.auth import HTTPBasicAuth
import secret
parameters = 'teamstats'
response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
print(response.status_code)
# Make a get request with the parameters.
#response = requests.get("http://api.open-notify.org/iss-pass.json", params=parameters)
#https://www.mysportsfeeds.com/api/feed/pull/nfl/{2016-2017}/playoff_team_standings.json
#{format}?teamstats={team-stats}
#https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017/playoff_team_standings.json | import requests
from requests.auth import HTTPBasicAuth
import secret
parameters = 'teamstats'
response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
print(response.status_code)
| Clean up file and remove notes now that parameters in API feed are working | Clean up file and remove notes now that parameters in API feed are working
| Python | mit | prcutler/nflpool,prcutler/nflpool | import requests
from requests.auth import HTTPBasicAuth
import secret
parameters = 'teamstats'
response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
print(response.status_code)
- # Make a get request with the parameters.
- #response = requests.get("http://api.open-notify.org/iss-pass.json", params=parameters)
-
- #https://www.mysportsfeeds.com/api/feed/pull/nfl/{2016-2017}/playoff_team_standings.json
- #{format}?teamstats={team-stats}
- #https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017/playoff_team_standings.json | Clean up file and remove notes now that parameters in API feed are working | ## Code Before:
import requests
from requests.auth import HTTPBasicAuth
import secret
parameters = 'teamstats'
response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
print(response.status_code)
# Make a get request with the parameters.
#response = requests.get("http://api.open-notify.org/iss-pass.json", params=parameters)
#https://www.mysportsfeeds.com/api/feed/pull/nfl/{2016-2017}/playoff_team_standings.json
#{format}?teamstats={team-stats}
#https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017/playoff_team_standings.json
## Instruction:
Clean up file and remove notes now that parameters in API feed are working
## Code After:
import requests
from requests.auth import HTTPBasicAuth
import secret
parameters = 'teamstats'
response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
print(response.status_code)
| import requests
from requests.auth import HTTPBasicAuth
import secret
parameters = 'teamstats'
response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
print(response.status_code)
-
- # Make a get request with the parameters.
- #response = requests.get("http://api.open-notify.org/iss-pass.json", params=parameters)
-
- #https://www.mysportsfeeds.com/api/feed/pull/nfl/{2016-2017}/playoff_team_standings.json
- #{format}?teamstats={team-stats}
- #https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017/playoff_team_standings.json |
81d9558c5d75671349228b8cde84d7049289d3df | troposphere/settings/__init__.py | troposphere/settings/__init__.py | from troposphere.settings.default import *
from troposphere.settings.local import *
| from troposphere.settings.default import *
try:
from troposphere.settings.local import *
except ImportError:
raise Exception("No local settings module found. Refer to README.md")
| Add exception for people who dont read the docs | Add exception for people who dont read the docs
| Python | apache-2.0 | CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend | from troposphere.settings.default import *
+ try:
- from troposphere.settings.local import *
+ from troposphere.settings.local import *
+ except ImportError:
+ raise Exception("No local settings module found. Refer to README.md")
| Add exception for people who dont read the docs | ## Code Before:
from troposphere.settings.default import *
from troposphere.settings.local import *
## Instruction:
Add exception for people who dont read the docs
## Code After:
from troposphere.settings.default import *
try:
from troposphere.settings.local import *
except ImportError:
raise Exception("No local settings module found. Refer to README.md")
| from troposphere.settings.default import *
+ try:
- from troposphere.settings.local import *
+ from troposphere.settings.local import *
? ++++
+ except ImportError:
+ raise Exception("No local settings module found. Refer to README.md") |
3090f80fb75e28d76a2a9f5e25c507d095a695c8 | middleware/python/test_auth_middleware.py | middleware/python/test_auth_middleware.py | from tyk.decorators import *
from gateway import TykGateway as tyk
@CustomKeyCheck
def MyKeyCheck(request, session, metadata, spec):
print("Running MyKeyCheck?")
print("request:", request)
print("session:", session)
print("spec:", spec)
valid_token = 'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d'
request_token = request.get_header('Authorization')
if request_token == valid_token:
print("Token is OK")
session.rate = 1000
session.per = 1
metadata['token'] = "mytoken"
else:
print("Token is WRONG")
request.return_overrides = { 'response_code': 401, 'response_error': 'Not authorized (by the Python middleware)' }
return request, session, metadata
| from tyk.decorators import *
from gateway import TykGateway as tyk
@CustomKeyCheck
def MyKeyCheck(request, session, metadata, spec):
print("Running MyKeyCheck?")
print("request:", request)
print("session:", session)
print("spec:", spec)
valid_token = 'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d'
request_token = request.get_header('Authorization')
print("(python) request_token =", request_token)
if request_token == valid_token:
print("Token is OK")
session.rate = 1000.0
session.per = 1.0
metadata['token'] = "mytoken"
else:
print("Token is WRONG")
request.return_overrides = { 'response_code': 401, 'response_error': 'Not authorized (by the Python middleware)' }
return request, session, metadata
| Use float in session fields | Use float in session fields
| Python | mpl-2.0 | nebolsin/tyk,nebolsin/tyk,mvdan/tyk,lonelycode/tyk,mvdan/tyk,lonelycode/tyk,nebolsin/tyk,nebolsin/tyk,mvdan/tyk,mvdan/tyk,mvdan/tyk,mvdan/tyk,lonelycode/tyk,nebolsin/tyk,mvdan/tyk,nebolsin/tyk,nebolsin/tyk,nebolsin/tyk,mvdan/tyk | from tyk.decorators import *
from gateway import TykGateway as tyk
@CustomKeyCheck
def MyKeyCheck(request, session, metadata, spec):
print("Running MyKeyCheck?")
print("request:", request)
print("session:", session)
print("spec:", spec)
valid_token = 'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d'
request_token = request.get_header('Authorization')
+ print("(python) request_token =", request_token)
+
if request_token == valid_token:
print("Token is OK")
- session.rate = 1000
+ session.rate = 1000.0
- session.per = 1
+ session.per = 1.0
metadata['token'] = "mytoken"
else:
print("Token is WRONG")
request.return_overrides = { 'response_code': 401, 'response_error': 'Not authorized (by the Python middleware)' }
return request, session, metadata
| Use float in session fields | ## Code Before:
from tyk.decorators import *
from gateway import TykGateway as tyk
@CustomKeyCheck
def MyKeyCheck(request, session, metadata, spec):
print("Running MyKeyCheck?")
print("request:", request)
print("session:", session)
print("spec:", spec)
valid_token = 'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d'
request_token = request.get_header('Authorization')
if request_token == valid_token:
print("Token is OK")
session.rate = 1000
session.per = 1
metadata['token'] = "mytoken"
else:
print("Token is WRONG")
request.return_overrides = { 'response_code': 401, 'response_error': 'Not authorized (by the Python middleware)' }
return request, session, metadata
## Instruction:
Use float in session fields
## Code After:
from tyk.decorators import *
from gateway import TykGateway as tyk
@CustomKeyCheck
def MyKeyCheck(request, session, metadata, spec):
print("Running MyKeyCheck?")
print("request:", request)
print("session:", session)
print("spec:", spec)
valid_token = 'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d'
request_token = request.get_header('Authorization')
print("(python) request_token =", request_token)
if request_token == valid_token:
print("Token is OK")
session.rate = 1000.0
session.per = 1.0
metadata['token'] = "mytoken"
else:
print("Token is WRONG")
request.return_overrides = { 'response_code': 401, 'response_error': 'Not authorized (by the Python middleware)' }
return request, session, metadata
| from tyk.decorators import *
from gateway import TykGateway as tyk
@CustomKeyCheck
def MyKeyCheck(request, session, metadata, spec):
print("Running MyKeyCheck?")
print("request:", request)
print("session:", session)
print("spec:", spec)
valid_token = 'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d'
request_token = request.get_header('Authorization')
+ print("(python) request_token =", request_token)
+
if request_token == valid_token:
print("Token is OK")
- session.rate = 1000
+ session.rate = 1000.0
? ++
- session.per = 1
+ session.per = 1.0
? ++
metadata['token'] = "mytoken"
else:
print("Token is WRONG")
request.return_overrides = { 'response_code': 401, 'response_error': 'Not authorized (by the Python middleware)' }
return request, session, metadata |
adfaff320066422734c28759688f75e3f127078c | icekit/plugins/contact_person/models.py | icekit/plugins/contact_person/models.py | import os
from django.core.urlresolvers import NoReverseMatch
from fluent_contents.models import ContentItem
from fluent_pages.urlresolvers import app_reverse, PageTypeNotMounted
from icekit.publishing.models import PublishingModel
from timezone import timezone
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from icekit.content_collections.abstract_models import AbstractCollectedContent, \
TitleSlugMixin, AbstractListingPage
from icekit.mixins import FluentFieldsMixin
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return "{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
| from fluent_contents.models import ContentItem
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return u"{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
| Repair 500 viewing contact person | Repair 500 viewing contact person
| Python | mit | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit | - import os
- from django.core.urlresolvers import NoReverseMatch
from fluent_contents.models import ContentItem
- from fluent_pages.urlresolvers import app_reverse, PageTypeNotMounted
- from icekit.publishing.models import PublishingModel
- from timezone import timezone
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
-
- from icekit.content_collections.abstract_models import AbstractCollectedContent, \
- TitleSlugMixin, AbstractListingPage
- from icekit.mixins import FluentFieldsMixin
-
-
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
- return "{} ({})".format(self.name, self.title)
+ return u"{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
| Repair 500 viewing contact person | ## Code Before:
import os
from django.core.urlresolvers import NoReverseMatch
from fluent_contents.models import ContentItem
from fluent_pages.urlresolvers import app_reverse, PageTypeNotMounted
from icekit.publishing.models import PublishingModel
from timezone import timezone
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from icekit.content_collections.abstract_models import AbstractCollectedContent, \
TitleSlugMixin, AbstractListingPage
from icekit.mixins import FluentFieldsMixin
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return "{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
## Instruction:
Repair 500 viewing contact person
## Code After:
from fluent_contents.models import ContentItem
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return u"{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
| - import os
- from django.core.urlresolvers import NoReverseMatch
from fluent_contents.models import ContentItem
- from fluent_pages.urlresolvers import app_reverse, PageTypeNotMounted
- from icekit.publishing.models import PublishingModel
- from timezone import timezone
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
-
- from icekit.content_collections.abstract_models import AbstractCollectedContent, \
- TitleSlugMixin, AbstractListingPage
- from icekit.mixins import FluentFieldsMixin
-
-
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
- return "{} ({})".format(self.name, self.title)
+ return u"{} ({})".format(self.name, self.title)
? +
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact) |
abe744c5a099fd988ff3fe5eb1d50cca7a633d74 | var/spack/repos/builtin/packages/parallel-netcdf/package.py | var/spack/repos/builtin/packages/parallel-netcdf/package.py | from spack import *
class ParallelNetcdf(Package):
"""Parallel netCDF (PnetCDF) is a library providing high-performance
parallel I/O while still maintaining file-format compatibility with
Unidata's NetCDF."""
homepage = "https://trac.mcs.anl.gov/projects/parallel-netcdf"
url = "http://cucis.ece.northwestern.edu/projects/PnetCDF/Release/parallel-netcdf-1.6.1.tar.gz"
version('1.6.1', '62a094eb952f9d1e15f07d56e535052604f1ac34')
depends_on("m4")
depends_on("mpi")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
"--with-mpi=%s" % spec['mpi'].prefix)
make()
make("install")
| from spack import *
class ParallelNetcdf(Package):
"""Parallel netCDF (PnetCDF) is a library providing high-performance
parallel I/O while still maintaining file-format compatibility with
Unidata's NetCDF."""
homepage = "https://trac.mcs.anl.gov/projects/parallel-netcdf"
url = "http://cucis.ece.northwestern.edu/projects/PnetCDF/Release/parallel-netcdf-1.6.1.tar.gz"
version('1.7.0', '267eab7b6f9dc78c4d0e6def2def3aea4bc7c9f0')
version('1.6.1', '62a094eb952f9d1e15f07d56e535052604f1ac34')
depends_on("m4")
depends_on("mpi")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
"--with-mpi=%s" % spec['mpi'].prefix)
make()
make("install")
| Add latest version of PnetCDF | Add latest version of PnetCDF
| Python | lgpl-2.1 | mfherbst/spack,EmreAtes/spack,lgarren/spack,tmerrick1/spack,matthiasdiener/spack,skosukhin/spack,tmerrick1/spack,iulian787/spack,skosukhin/spack,LLNL/spack,matthiasdiener/spack,iulian787/spack,krafczyk/spack,TheTimmy/spack,mfherbst/spack,LLNL/spack,mfherbst/spack,EmreAtes/spack,tmerrick1/spack,mfherbst/spack,TheTimmy/spack,krafczyk/spack,EmreAtes/spack,iulian787/spack,iulian787/spack,lgarren/spack,LLNL/spack,lgarren/spack,lgarren/spack,matthiasdiener/spack,iulian787/spack,skosukhin/spack,mfherbst/spack,lgarren/spack,matthiasdiener/spack,tmerrick1/spack,matthiasdiener/spack,LLNL/spack,skosukhin/spack,krafczyk/spack,EmreAtes/spack,LLNL/spack,TheTimmy/spack,TheTimmy/spack,skosukhin/spack,EmreAtes/spack,tmerrick1/spack,krafczyk/spack,TheTimmy/spack,krafczyk/spack | from spack import *
class ParallelNetcdf(Package):
"""Parallel netCDF (PnetCDF) is a library providing high-performance
parallel I/O while still maintaining file-format compatibility with
Unidata's NetCDF."""
homepage = "https://trac.mcs.anl.gov/projects/parallel-netcdf"
url = "http://cucis.ece.northwestern.edu/projects/PnetCDF/Release/parallel-netcdf-1.6.1.tar.gz"
+ version('1.7.0', '267eab7b6f9dc78c4d0e6def2def3aea4bc7c9f0')
version('1.6.1', '62a094eb952f9d1e15f07d56e535052604f1ac34')
depends_on("m4")
depends_on("mpi")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
"--with-mpi=%s" % spec['mpi'].prefix)
make()
make("install")
| Add latest version of PnetCDF | ## Code Before:
from spack import *
class ParallelNetcdf(Package):
"""Parallel netCDF (PnetCDF) is a library providing high-performance
parallel I/O while still maintaining file-format compatibility with
Unidata's NetCDF."""
homepage = "https://trac.mcs.anl.gov/projects/parallel-netcdf"
url = "http://cucis.ece.northwestern.edu/projects/PnetCDF/Release/parallel-netcdf-1.6.1.tar.gz"
version('1.6.1', '62a094eb952f9d1e15f07d56e535052604f1ac34')
depends_on("m4")
depends_on("mpi")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
"--with-mpi=%s" % spec['mpi'].prefix)
make()
make("install")
## Instruction:
Add latest version of PnetCDF
## Code After:
from spack import *
class ParallelNetcdf(Package):
"""Parallel netCDF (PnetCDF) is a library providing high-performance
parallel I/O while still maintaining file-format compatibility with
Unidata's NetCDF."""
homepage = "https://trac.mcs.anl.gov/projects/parallel-netcdf"
url = "http://cucis.ece.northwestern.edu/projects/PnetCDF/Release/parallel-netcdf-1.6.1.tar.gz"
version('1.7.0', '267eab7b6f9dc78c4d0e6def2def3aea4bc7c9f0')
version('1.6.1', '62a094eb952f9d1e15f07d56e535052604f1ac34')
depends_on("m4")
depends_on("mpi")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
"--with-mpi=%s" % spec['mpi'].prefix)
make()
make("install")
| from spack import *
class ParallelNetcdf(Package):
"""Parallel netCDF (PnetCDF) is a library providing high-performance
parallel I/O while still maintaining file-format compatibility with
Unidata's NetCDF."""
homepage = "https://trac.mcs.anl.gov/projects/parallel-netcdf"
url = "http://cucis.ece.northwestern.edu/projects/PnetCDF/Release/parallel-netcdf-1.6.1.tar.gz"
+ version('1.7.0', '267eab7b6f9dc78c4d0e6def2def3aea4bc7c9f0')
version('1.6.1', '62a094eb952f9d1e15f07d56e535052604f1ac34')
depends_on("m4")
depends_on("mpi")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
"--with-mpi=%s" % spec['mpi'].prefix)
make()
make("install") |
1c51c772d4b21eba70cd09429e603f1873b2c13c | examples/demo.py | examples/demo.py |
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
t = pytaf.TAF(taf_str)
d = pytaf.Decoder(t)
print taf_str
print
dec = d.decode_taf()
print dec
|
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
# Create a parsed TAF object from string
t = pytaf.TAF(taf_str)
# Create a decoder object from the TAF object
d = pytaf.Decoder(t)
# Print the raw string for the reference
print(taf_str)
# Decode and print the decoded string
dec = d.decode_taf()
print(dec)
| Update the example script to work with python3. | Update the example script to work with python3.
| Python | mit | dmbaturin/pytaf |
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
+ # Create a parsed TAF object from string
t = pytaf.TAF(taf_str)
+ # Create a decoder object from the TAF object
d = pytaf.Decoder(t)
+ # Print the raw string for the reference
- print taf_str
+ print(taf_str)
- print
+
+ # Decode and print the decoded string
dec = d.decode_taf()
-
- print dec
+ print(dec)
| Update the example script to work with python3. | ## Code Before:
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
t = pytaf.TAF(taf_str)
d = pytaf.Decoder(t)
print taf_str
print
dec = d.decode_taf()
print dec
## Instruction:
Update the example script to work with python3.
## Code After:
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
# Create a parsed TAF object from string
t = pytaf.TAF(taf_str)
# Create a decoder object from the TAF object
d = pytaf.Decoder(t)
# Print the raw string for the reference
print(taf_str)
# Decode and print the decoded string
dec = d.decode_taf()
print(dec)
|
import pytaf
taf_str = """
TAF AMD KDEN 291134Z 2912/3018 32006KT 1/4SM FG OVC001
TEMPO 2914/2915 1SM -BR CLR
FM291500 04006KT P6SM SKC
TEMPO 2915/2917 2SM BR OVC008
FM291900 05007KT P6SM SCT050 BKN090 WS010/13040KT
PROB30 2921/3001 VRB20G30KT -TSRA BKN050CB
FM300100 31007KT P6SM SCT070 BKN120 +FC
FM300500 23006KT P6SM SCT120 $
"""
+ # Create a parsed TAF object from string
t = pytaf.TAF(taf_str)
+ # Create a decoder object from the TAF object
d = pytaf.Decoder(t)
+ # Print the raw string for the reference
- print taf_str
? ^
+ print(taf_str)
? ^ +
- print
+
+ # Decode and print the decoded string
dec = d.decode_taf()
+ print(dec)
- print dec
- |
82c57b4fad49b171cd0833b38867474d6578220c | client/examples/followbot.py | client/examples/followbot.py |
from botchallenge import *
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
robot = Robot(USERNAME, SERVER)
while True:
me = robot.get_location()
owner = robot.get_owner_location()
print(me.distance(owner))
if me.distance(owner) > 4:
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
|
from botchallenge import *
import time
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
robot = Robot(USERNAME, SERVER)
while True:
me = robot.get_location()
owner = robot.get_owner_location()
print(me.distance(owner))
if me.distance(owner) > 4:
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
else:
time.sleep(2)
| Add timer to follow bot | Add timer to follow bot
| Python | mit | Rafiot/botchallenge,Rafiot/botchallenge,Rafiot/botchallenge,Rafiot/botchallenge |
from botchallenge import *
+ import time
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
robot = Robot(USERNAME, SERVER)
while True:
me = robot.get_location()
owner = robot.get_owner_location()
print(me.distance(owner))
if me.distance(owner) > 4:
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
+ else:
+ time.sleep(2)
| Add timer to follow bot | ## Code Before:
from botchallenge import *
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
robot = Robot(USERNAME, SERVER)
while True:
me = robot.get_location()
owner = robot.get_owner_location()
print(me.distance(owner))
if me.distance(owner) > 4:
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
## Instruction:
Add timer to follow bot
## Code After:
from botchallenge import *
import time
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
robot = Robot(USERNAME, SERVER)
while True:
me = robot.get_location()
owner = robot.get_owner_location()
print(me.distance(owner))
if me.distance(owner) > 4:
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
else:
time.sleep(2)
|
from botchallenge import *
+ import time
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
robot = Robot(USERNAME, SERVER)
while True:
me = robot.get_location()
owner = robot.get_owner_location()
print(me.distance(owner))
if me.distance(owner) > 4:
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
+ else:
+ time.sleep(2)
|
8c6ff33c8a034c2eecf5f2244811c86acf96120a | tools/apollo/list_organisms.py | tools/apollo/list_organisms.py | from __future__ import print_function
import argparse
import json
from webapollo import AssertUser, WAAuth, WebApolloInstance, accessible_organisms, PasswordGenerator
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='List all organisms available in an Apollo instance')
WAAuth(parser)
parser.add_argument('email', help='User Email')
args = parser.parse_args()
wa = WebApolloInstance(args.apollo, args.username, args.password)
try:
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
except Exception:
returnData = wa.users.createUser(args.email, args.email, args.email, PasswordGenerator(12), role='user', addToHistory=True)
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
all_orgs = wa.organisms.findAllOrganisms()
orgs = accessible_organisms(gx_user, all_orgs)
print(json.dumps(orgs, indent=2))
| from __future__ import print_function
import argparse
import json
from webapollo import AssertUser, WAAuth, WebApolloInstance, accessible_organisms, PasswordGenerator
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='List all organisms available in an Apollo instance')
WAAuth(parser)
parser.add_argument('email', help='User Email')
args = parser.parse_args()
wa = WebApolloInstance(args.apollo, args.username, args.password)
try:
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
except Exception:
returnData = wa.users.createUser(args.email, args.email, args.email, PasswordGenerator(12), role='user', addToHistory=True)
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
all_orgs = wa.organisms.findAllOrganisms()
try:
orgs = accessible_organisms(gx_user, all_orgs)
except Exception:
orgs = []
print(json.dumps(orgs, indent=2))
| Add try-catch if no organism allowed | Add try-catch if no organism allowed
| Python | mit | galaxy-genome-annotation/galaxy-tools,galaxy-genome-annotation/galaxy-tools | from __future__ import print_function
import argparse
import json
from webapollo import AssertUser, WAAuth, WebApolloInstance, accessible_organisms, PasswordGenerator
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='List all organisms available in an Apollo instance')
WAAuth(parser)
parser.add_argument('email', help='User Email')
args = parser.parse_args()
wa = WebApolloInstance(args.apollo, args.username, args.password)
try:
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
except Exception:
returnData = wa.users.createUser(args.email, args.email, args.email, PasswordGenerator(12), role='user', addToHistory=True)
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
all_orgs = wa.organisms.findAllOrganisms()
+ try:
- orgs = accessible_organisms(gx_user, all_orgs)
+ orgs = accessible_organisms(gx_user, all_orgs)
+ except Exception:
+ orgs = []
print(json.dumps(orgs, indent=2))
| Add try-catch if no organism allowed | ## Code Before:
from __future__ import print_function
import argparse
import json
from webapollo import AssertUser, WAAuth, WebApolloInstance, accessible_organisms, PasswordGenerator
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='List all organisms available in an Apollo instance')
WAAuth(parser)
parser.add_argument('email', help='User Email')
args = parser.parse_args()
wa = WebApolloInstance(args.apollo, args.username, args.password)
try:
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
except Exception:
returnData = wa.users.createUser(args.email, args.email, args.email, PasswordGenerator(12), role='user', addToHistory=True)
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
all_orgs = wa.organisms.findAllOrganisms()
orgs = accessible_organisms(gx_user, all_orgs)
print(json.dumps(orgs, indent=2))
## Instruction:
Add try-catch if no organism allowed
## Code After:
from __future__ import print_function
import argparse
import json
from webapollo import AssertUser, WAAuth, WebApolloInstance, accessible_organisms, PasswordGenerator
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='List all organisms available in an Apollo instance')
WAAuth(parser)
parser.add_argument('email', help='User Email')
args = parser.parse_args()
wa = WebApolloInstance(args.apollo, args.username, args.password)
try:
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
except Exception:
returnData = wa.users.createUser(args.email, args.email, args.email, PasswordGenerator(12), role='user', addToHistory=True)
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
all_orgs = wa.organisms.findAllOrganisms()
try:
orgs = accessible_organisms(gx_user, all_orgs)
except Exception:
orgs = []
print(json.dumps(orgs, indent=2))
| from __future__ import print_function
import argparse
import json
from webapollo import AssertUser, WAAuth, WebApolloInstance, accessible_organisms, PasswordGenerator
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='List all organisms available in an Apollo instance')
WAAuth(parser)
parser.add_argument('email', help='User Email')
args = parser.parse_args()
wa = WebApolloInstance(args.apollo, args.username, args.password)
try:
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
except Exception:
returnData = wa.users.createUser(args.email, args.email, args.email, PasswordGenerator(12), role='user', addToHistory=True)
gx_user = AssertUser(wa.users.loadUsers(email=args.email))
all_orgs = wa.organisms.findAllOrganisms()
+ try:
- orgs = accessible_organisms(gx_user, all_orgs)
+ orgs = accessible_organisms(gx_user, all_orgs)
? ++++
+ except Exception:
+ orgs = []
print(json.dumps(orgs, indent=2)) |
d68f28581cd3c3f57f7c41adbd65676887a51136 | opps/channels/tests/test_forms.py | opps/channels/tests/test_forms.py |
from django.test import TestCase
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from opps.channels.models import Channel
from opps.channels.forms import ChannelAdminForm
class ChannelFormTest(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username=u'test', password='test')
self.site = Site.objects.filter(name=u'example.com').get()
self.parent = Channel.objects.create(name=u'Home', slug=u'home',
description=u'home page',
site=self.site, user=self.user)
def test_init(self):
"""
Test successful init without data
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(isinstance(form.instance, Channel))
self.assertEqual(form.instance.pk, self.parent.pk)
|
from django.test import TestCase
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from opps.channels.models import Channel
from opps.channels.forms import ChannelAdminForm
class ChannelFormTest(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username=u'test', password='test')
self.site = Site.objects.filter(name=u'example.com').get()
self.parent = Channel.objects.create(name=u'Home', slug=u'home',
description=u'home page',
site=self.site, user=self.user)
def test_init(self):
"""
Test successful init without data
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(isinstance(form.instance, Channel))
self.assertEqual(form.instance.pk, self.parent.pk)
self.assertEqual(int(form.fields['slug'].widget.attrs['maxlength']), 150)
def test_readonly_slug(self):
"""
Check readonly field slug
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(form.fields['slug'].widget.attrs['readonly'])
form_2 = ChannelAdminForm()
self.assertNotIn('readonly', form_2.fields['slug'].widget.attrs)
| Add test check readonly field slug of channel | Add test check readonly field slug of channel
| Python | mit | jeanmask/opps,opps/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,opps/opps |
from django.test import TestCase
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from opps.channels.models import Channel
from opps.channels.forms import ChannelAdminForm
class ChannelFormTest(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username=u'test', password='test')
self.site = Site.objects.filter(name=u'example.com').get()
self.parent = Channel.objects.create(name=u'Home', slug=u'home',
description=u'home page',
site=self.site, user=self.user)
def test_init(self):
"""
Test successful init without data
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(isinstance(form.instance, Channel))
self.assertEqual(form.instance.pk, self.parent.pk)
+ self.assertEqual(int(form.fields['slug'].widget.attrs['maxlength']), 150)
+ def test_readonly_slug(self):
+ """
+ Check readonly field slug
+ """
+ form = ChannelAdminForm(instance=self.parent)
+ self.assertTrue(form.fields['slug'].widget.attrs['readonly'])
+ form_2 = ChannelAdminForm()
+ self.assertNotIn('readonly', form_2.fields['slug'].widget.attrs)
+ | Add test check readonly field slug of channel | ## Code Before:
from django.test import TestCase
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from opps.channels.models import Channel
from opps.channels.forms import ChannelAdminForm
class ChannelFormTest(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username=u'test', password='test')
self.site = Site.objects.filter(name=u'example.com').get()
self.parent = Channel.objects.create(name=u'Home', slug=u'home',
description=u'home page',
site=self.site, user=self.user)
def test_init(self):
"""
Test successful init without data
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(isinstance(form.instance, Channel))
self.assertEqual(form.instance.pk, self.parent.pk)
## Instruction:
Add test check readonly field slug of channel
## Code After:
from django.test import TestCase
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from opps.channels.models import Channel
from opps.channels.forms import ChannelAdminForm
class ChannelFormTest(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username=u'test', password='test')
self.site = Site.objects.filter(name=u'example.com').get()
self.parent = Channel.objects.create(name=u'Home', slug=u'home',
description=u'home page',
site=self.site, user=self.user)
def test_init(self):
"""
Test successful init without data
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(isinstance(form.instance, Channel))
self.assertEqual(form.instance.pk, self.parent.pk)
self.assertEqual(int(form.fields['slug'].widget.attrs['maxlength']), 150)
def test_readonly_slug(self):
"""
Check readonly field slug
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(form.fields['slug'].widget.attrs['readonly'])
form_2 = ChannelAdminForm()
self.assertNotIn('readonly', form_2.fields['slug'].widget.attrs)
|
from django.test import TestCase
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from opps.channels.models import Channel
from opps.channels.forms import ChannelAdminForm
class ChannelFormTest(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username=u'test', password='test')
self.site = Site.objects.filter(name=u'example.com').get()
self.parent = Channel.objects.create(name=u'Home', slug=u'home',
description=u'home page',
site=self.site, user=self.user)
def test_init(self):
"""
Test successful init without data
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(isinstance(form.instance, Channel))
self.assertEqual(form.instance.pk, self.parent.pk)
+ self.assertEqual(int(form.fields['slug'].widget.attrs['maxlength']), 150)
+ def test_readonly_slug(self):
+ """
+ Check readonly field slug
+ """
+ form = ChannelAdminForm(instance=self.parent)
+ self.assertTrue(form.fields['slug'].widget.attrs['readonly'])
+
+ form_2 = ChannelAdminForm()
+ self.assertNotIn('readonly', form_2.fields['slug'].widget.attrs) |
fd77039104175a4b5702b46b21a2fa223676ddf4 | bowser/Database.py | bowser/Database.py | import json
import redis
class Database(object):
def __init__(self):
self.redis = redis.StrictRedis(host='redis', port=6379, db=0)
def set_data_of_server_channel(self, server, channel, data):
self.redis.hmset(server, {channel: json.dumps(data)})
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
json_data = json.loads(data.decode('utf-8'))
return json_data
| import json
import redis
class Database(object):
def __init__(self):
self.redis = redis.StrictRedis(host='redis', port=6379, db=0)
def set_data_of_server_channel(self, server, channel, data):
self.redis.hmset(server, {channel: json.dumps(data)})
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
if data is None:
raise KeyError
json_data = json.loads(data.decode('utf-8'))
return json_data
| Raise KeyErrors for missing data in redis | fix: Raise KeyErrors for missing data in redis
| Python | mit | kevinkjt2000/discord-minecraft-server-status | import json
import redis
class Database(object):
def __init__(self):
self.redis = redis.StrictRedis(host='redis', port=6379, db=0)
def set_data_of_server_channel(self, server, channel, data):
self.redis.hmset(server, {channel: json.dumps(data)})
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
+ if data is None:
+ raise KeyError
json_data = json.loads(data.decode('utf-8'))
return json_data
| Raise KeyErrors for missing data in redis | ## Code Before:
import json
import redis
class Database(object):
def __init__(self):
self.redis = redis.StrictRedis(host='redis', port=6379, db=0)
def set_data_of_server_channel(self, server, channel, data):
self.redis.hmset(server, {channel: json.dumps(data)})
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
json_data = json.loads(data.decode('utf-8'))
return json_data
## Instruction:
Raise KeyErrors for missing data in redis
## Code After:
import json
import redis
class Database(object):
def __init__(self):
self.redis = redis.StrictRedis(host='redis', port=6379, db=0)
def set_data_of_server_channel(self, server, channel, data):
self.redis.hmset(server, {channel: json.dumps(data)})
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
if data is None:
raise KeyError
json_data = json.loads(data.decode('utf-8'))
return json_data
| import json
import redis
class Database(object):
def __init__(self):
self.redis = redis.StrictRedis(host='redis', port=6379, db=0)
def set_data_of_server_channel(self, server, channel, data):
self.redis.hmset(server, {channel: json.dumps(data)})
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
+ if data is None:
+ raise KeyError
json_data = json.loads(data.decode('utf-8'))
return json_data |
bab058be7b830a38d75eebf53170a805e726308c | keyring/util/platform.py | keyring/util/platform.py | import os
import sys
# While we support Python 2.4, use a convoluted technique to import
# platform from the stdlib.
# With Python 2.5 or later, just do "from __future__ import absolute_import"
# and "import platform"
exec('__import__("platform", globals=dict())')
platform = sys.modules['platform']
def _data_root_Windows():
return os.path.join(os.environ['LOCALAPPDATA'], 'Python Keyring')
def _data_root_Linux():
"""
Use freedesktop.org Base Dir Specfication to determine storage
location.
"""
fallback = os.path.expanduser('~/.local/share')
root = os.environ.get('XDG_DATA_HOME', None) or fallback
return os.path.join(root, 'python_keyring')
# by default, use Unix convention
data_root = globals().get('_data_root_' + platform.system(), _data_root_Linux)
| import os
import sys
# While we support Python 2.4, use a convoluted technique to import
# platform from the stdlib.
# With Python 2.5 or later, just do "from __future__ import absolute_import"
# and "import platform"
exec('__import__("platform", globals=dict())')
platform = sys.modules['platform']
def _data_root_Windows():
try:
root = os.environ['LOCALAPPDATA']
except KeyError:
# Windows XP
root = os.path.join(os.environ['USERPROFILE'], 'Local Settings')
return os.path.join(root, 'Python Keyring')
def _data_root_Linux():
"""
Use freedesktop.org Base Dir Specfication to determine storage
location.
"""
fallback = os.path.expanduser('~/.local/share')
root = os.environ.get('XDG_DATA_HOME', None) or fallback
return os.path.join(root, 'python_keyring')
# by default, use Unix convention
data_root = globals().get('_data_root_' + platform.system(), _data_root_Linux)
| Fix regression on Windows XP in determining data root | Fix regression on Windows XP in determining data root
| Python | mit | jaraco/keyring | import os
import sys
# While we support Python 2.4, use a convoluted technique to import
# platform from the stdlib.
# With Python 2.5 or later, just do "from __future__ import absolute_import"
# and "import platform"
exec('__import__("platform", globals=dict())')
platform = sys.modules['platform']
def _data_root_Windows():
+ try:
+ root = os.environ['LOCALAPPDATA']
+ except KeyError:
+ # Windows XP
+ root = os.path.join(os.environ['USERPROFILE'], 'Local Settings')
- return os.path.join(os.environ['LOCALAPPDATA'], 'Python Keyring')
+ return os.path.join(root, 'Python Keyring')
def _data_root_Linux():
"""
Use freedesktop.org Base Dir Specfication to determine storage
location.
"""
fallback = os.path.expanduser('~/.local/share')
root = os.environ.get('XDG_DATA_HOME', None) or fallback
return os.path.join(root, 'python_keyring')
# by default, use Unix convention
data_root = globals().get('_data_root_' + platform.system(), _data_root_Linux)
| Fix regression on Windows XP in determining data root | ## Code Before:
import os
import sys
# While we support Python 2.4, use a convoluted technique to import
# platform from the stdlib.
# With Python 2.5 or later, just do "from __future__ import absolute_import"
# and "import platform"
exec('__import__("platform", globals=dict())')
platform = sys.modules['platform']
def _data_root_Windows():
return os.path.join(os.environ['LOCALAPPDATA'], 'Python Keyring')
def _data_root_Linux():
"""
Use freedesktop.org Base Dir Specfication to determine storage
location.
"""
fallback = os.path.expanduser('~/.local/share')
root = os.environ.get('XDG_DATA_HOME', None) or fallback
return os.path.join(root, 'python_keyring')
# by default, use Unix convention
data_root = globals().get('_data_root_' + platform.system(), _data_root_Linux)
## Instruction:
Fix regression on Windows XP in determining data root
## Code After:
import os
import sys
# While we support Python 2.4, use a convoluted technique to import
# platform from the stdlib.
# With Python 2.5 or later, just do "from __future__ import absolute_import"
# and "import platform"
exec('__import__("platform", globals=dict())')
platform = sys.modules['platform']
def _data_root_Windows():
try:
root = os.environ['LOCALAPPDATA']
except KeyError:
# Windows XP
root = os.path.join(os.environ['USERPROFILE'], 'Local Settings')
return os.path.join(root, 'Python Keyring')
def _data_root_Linux():
"""
Use freedesktop.org Base Dir Specfication to determine storage
location.
"""
fallback = os.path.expanduser('~/.local/share')
root = os.environ.get('XDG_DATA_HOME', None) or fallback
return os.path.join(root, 'python_keyring')
# by default, use Unix convention
data_root = globals().get('_data_root_' + platform.system(), _data_root_Linux)
| import os
import sys
# While we support Python 2.4, use a convoluted technique to import
# platform from the stdlib.
# With Python 2.5 or later, just do "from __future__ import absolute_import"
# and "import platform"
exec('__import__("platform", globals=dict())')
platform = sys.modules['platform']
def _data_root_Windows():
+ try:
+ root = os.environ['LOCALAPPDATA']
+ except KeyError:
+ # Windows XP
+ root = os.path.join(os.environ['USERPROFILE'], 'Local Settings')
- return os.path.join(os.environ['LOCALAPPDATA'], 'Python Keyring')
? ------- ^^^^^^^^^^^^^^^^^
+ return os.path.join(root, 'Python Keyring')
? ^^
def _data_root_Linux():
"""
Use freedesktop.org Base Dir Specfication to determine storage
location.
"""
fallback = os.path.expanduser('~/.local/share')
root = os.environ.get('XDG_DATA_HOME', None) or fallback
return os.path.join(root, 'python_keyring')
# by default, use Unix convention
data_root = globals().get('_data_root_' + platform.system(), _data_root_Linux) |
50bab0199e2d209dc177f5e3b5f193330048e403 | blinktCP.py | blinktCP.py |
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
for x in range(blinkt.NUM_PIXELS):
blinkt.set_pixel(x, r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
|
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
# for x in range(blinkt.NUM_PIXELS):
# blinkt.set_pixel(x, r, g, b)
blinkt.set_all(r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
| Use the Blinkt! library set_all rather than to loop on 8 pixels. | Use the Blinkt! library set_all rather than to loop on 8 pixels.
| Python | mit | dglaude/Blue-Dot-Colour-Picker |
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
- for x in range(blinkt.NUM_PIXELS):
+ # for x in range(blinkt.NUM_PIXELS):
- blinkt.set_pixel(x, r, g, b)
+ # blinkt.set_pixel(x, r, g, b)
+ blinkt.set_all(r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
| Use the Blinkt! library set_all rather than to loop on 8 pixels. | ## Code Before:
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
for x in range(blinkt.NUM_PIXELS):
blinkt.set_pixel(x, r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
## Instruction:
Use the Blinkt! library set_all rather than to loop on 8 pixels.
## Code After:
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
# for x in range(blinkt.NUM_PIXELS):
# blinkt.set_pixel(x, r, g, b)
blinkt.set_all(r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
|
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
- for x in range(blinkt.NUM_PIXELS):
+ # for x in range(blinkt.NUM_PIXELS):
? +
- blinkt.set_pixel(x, r, g, b)
+ # blinkt.set_pixel(x, r, g, b)
? +
+ blinkt.set_all(r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
|
20d63ba3fa1a9780d4a13c5119ae97a772efb502 | teardown_tests.py | teardown_tests.py |
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"reg.h5",
"reg_sub.h5",
"reg_f_f0.h5",
"reg_wt.h5",
"reg_norm.h5",
"reg_dict.h5",
"reg_post.h5",
"reg_traces.h5",
"reg_rois.h5",
"reg_proj.h5",
"reg_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
|
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"reg.h5",
"reg_sub.h5",
"reg_f_f0.h5",
"reg_wt.h5",
"reg_norm.h5",
"reg_dict.h5",
"reg_post.h5",
"reg_traces.h5",
"reg_rois.h5",
"reg_proj.h5",
"reg.zarr",
"reg_sub.zarr",
"reg_f_f0.zarr",
"reg_wt.zarr",
"reg_norm.zarr",
"reg_dict.zarr",
"reg_post.zarr",
"reg_traces.zarr",
"reg_rois.zarr",
"reg_proj.zarr",
"reg_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
| Remove test Zarr files after completion. | Remove test Zarr files after completion.
| Python | apache-2.0 | nanshe-org/nanshe_workflow,DudLab/nanshe_workflow |
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"reg.h5",
"reg_sub.h5",
"reg_f_f0.h5",
"reg_wt.h5",
"reg_norm.h5",
"reg_dict.h5",
"reg_post.h5",
"reg_traces.h5",
"reg_rois.h5",
"reg_proj.h5",
+ "reg.zarr",
+ "reg_sub.zarr",
+ "reg_f_f0.zarr",
+ "reg_wt.zarr",
+ "reg_norm.zarr",
+ "reg_dict.zarr",
+ "reg_post.zarr",
+ "reg_traces.zarr",
+ "reg_rois.zarr",
+ "reg_proj.zarr",
"reg_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
| Remove test Zarr files after completion. | ## Code Before:
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"reg.h5",
"reg_sub.h5",
"reg_f_f0.h5",
"reg_wt.h5",
"reg_norm.h5",
"reg_dict.h5",
"reg_post.h5",
"reg_traces.h5",
"reg_rois.h5",
"reg_proj.h5",
"reg_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
## Instruction:
Remove test Zarr files after completion.
## Code After:
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"reg.h5",
"reg_sub.h5",
"reg_f_f0.h5",
"reg_wt.h5",
"reg_norm.h5",
"reg_dict.h5",
"reg_post.h5",
"reg_traces.h5",
"reg_rois.h5",
"reg_proj.h5",
"reg.zarr",
"reg_sub.zarr",
"reg_f_f0.zarr",
"reg_wt.zarr",
"reg_norm.zarr",
"reg_dict.zarr",
"reg_post.zarr",
"reg_traces.zarr",
"reg_rois.zarr",
"reg_proj.zarr",
"reg_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
|
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"reg.h5",
"reg_sub.h5",
"reg_f_f0.h5",
"reg_wt.h5",
"reg_norm.h5",
"reg_dict.h5",
"reg_post.h5",
"reg_traces.h5",
"reg_rois.h5",
"reg_proj.h5",
+ "reg.zarr",
+ "reg_sub.zarr",
+ "reg_f_f0.zarr",
+ "reg_wt.zarr",
+ "reg_norm.zarr",
+ "reg_dict.zarr",
+ "reg_post.zarr",
+ "reg_traces.zarr",
+ "reg_rois.zarr",
+ "reg_proj.zarr",
"reg_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each) |
a885ebda3774f9d81422a96265bde25f6a93e7bf | tasks.py | tasks.py | from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration, release, docs)
| from invocations import docs
from invocations.testing import test, integration, watch_tests
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
ns = Collection(test, integration, watch_tests, release, docs)
ns.configure({
'tests': {
'package': 'releases',
},
})
| Use invocations' integration task, also add watch_tests | Use invocations' integration task, also add watch_tests
| Python | bsd-2-clause | bitprophet/releases | from invocations import docs
- from invocations.testing import test
+ from invocations.testing import test, integration, watch_tests
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
- @task(help={
- 'pty': "Whether to run tests under a pseudo-tty",
+ ns = Collection(test, integration, watch_tests, release, docs)
+ ns.configure({
+ 'tests': {
+ 'package': 'releases',
+ },
})
- def integration(pty=True):
- """Runs integration tests."""
- cmd = 'inv test -o --tests=integration'
- run(cmd + ('' if pty else ' --no-pty'), pty=pty)
-
- ns = Collection(test, integration, release, docs)
- | Use invocations' integration task, also add watch_tests | ## Code Before:
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration, release, docs)
## Instruction:
Use invocations' integration task, also add watch_tests
## Code After:
from invocations import docs
from invocations.testing import test, integration, watch_tests
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
ns = Collection(test, integration, watch_tests, release, docs)
ns.configure({
'tests': {
'package': 'releases',
},
})
| from invocations import docs
- from invocations.testing import test
+ from invocations.testing import test, integration, watch_tests
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
- @task(help={
- 'pty': "Whether to run tests under a pseudo-tty",
+ ns = Collection(test, integration, watch_tests, release, docs)
+ ns.configure({
+ 'tests': {
+ 'package': 'releases',
+ },
})
- def integration(pty=True):
- """Runs integration tests."""
- cmd = 'inv test -o --tests=integration'
- run(cmd + ('' if pty else ' --no-pty'), pty=pty)
-
-
- ns = Collection(test, integration, release, docs) |
0ae9b232b82285f2fa275b8ffa5dced6b9377b0e | keyring/credentials.py | keyring/credentials.py | import os
import abc
class Credential(metaclass=abc.ABCMeta):
"""Abstract class to manage credentials"""
@abc.abstractproperty
def username(self):
return None
@abc.abstractproperty
def password(self):
return None
class SimpleCredential(Credential):
"""Simple credentials implementation"""
def __init__(self, username, password):
self._username = username
self._password = password
@property
def username(self):
return self._username
@property
def password(self):
return self._password
class EnvironCredential(Credential):
"""Source credentials from environment variables.
Actual sourcing is deferred until requested.
"""
def __init__(self, user_env_var, pwd_env_var):
self.user_env_var = user_env_var
self.pwd_env_var = pwd_env_var
def _get_env(self, env_var):
"""Helper to read an environment variable"""
value = os.environ.get(env_var)
if not value:
raise ValueError('Missing environment variable:%s' % env_var)
return value
@property
def username(self):
return self._get_env(self.user_env_var)
@property
def password(self):
return self._get_env(self.pwd_env_var)
| import os
import abc
class Credential(metaclass=abc.ABCMeta):
"""Abstract class to manage credentials"""
@abc.abstractproperty
def username(self):
return None
@abc.abstractproperty
def password(self):
return None
class SimpleCredential(Credential):
"""Simple credentials implementation"""
def __init__(self, username, password):
self._username = username
self._password = password
@property
def username(self):
return self._username
@property
def password(self):
return self._password
class EnvironCredential(Credential):
"""Source credentials from environment variables.
Actual sourcing is deferred until requested.
"""
def __init__(self, user_env_var, pwd_env_var):
self.user_env_var = user_env_var
self.pwd_env_var = pwd_env_var
def __eq__(self, other: object) -> bool:
if not isinstance(other, EnvironCredential):
return NotImplemented
return (
self.user_env_var == other.user_env_var
and self.pwd_env_var == other.pwd_env_var
)
def _get_env(self, env_var):
"""Helper to read an environment variable"""
value = os.environ.get(env_var)
if not value:
raise ValueError('Missing environment variable:%s' % env_var)
return value
@property
def username(self):
return self._get_env(self.user_env_var)
@property
def password(self):
return self._get_env(self.pwd_env_var)
| Add equality operator to EnvironCredential | Add equality operator to EnvironCredential
Equality operator is useful for testing EnvironCredential
| Python | mit | jaraco/keyring | import os
import abc
class Credential(metaclass=abc.ABCMeta):
"""Abstract class to manage credentials"""
@abc.abstractproperty
def username(self):
return None
@abc.abstractproperty
def password(self):
return None
class SimpleCredential(Credential):
"""Simple credentials implementation"""
def __init__(self, username, password):
self._username = username
self._password = password
@property
def username(self):
return self._username
@property
def password(self):
return self._password
class EnvironCredential(Credential):
"""Source credentials from environment variables.
Actual sourcing is deferred until requested.
"""
def __init__(self, user_env_var, pwd_env_var):
self.user_env_var = user_env_var
self.pwd_env_var = pwd_env_var
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, EnvironCredential):
+ return NotImplemented
+
+ return (
+ self.user_env_var == other.user_env_var
+ and self.pwd_env_var == other.pwd_env_var
+ )
+
def _get_env(self, env_var):
"""Helper to read an environment variable"""
value = os.environ.get(env_var)
if not value:
raise ValueError('Missing environment variable:%s' % env_var)
return value
@property
def username(self):
return self._get_env(self.user_env_var)
@property
def password(self):
return self._get_env(self.pwd_env_var)
| Add equality operator to EnvironCredential | ## Code Before:
import os
import abc
class Credential(metaclass=abc.ABCMeta):
"""Abstract class to manage credentials"""
@abc.abstractproperty
def username(self):
return None
@abc.abstractproperty
def password(self):
return None
class SimpleCredential(Credential):
"""Simple credentials implementation"""
def __init__(self, username, password):
self._username = username
self._password = password
@property
def username(self):
return self._username
@property
def password(self):
return self._password
class EnvironCredential(Credential):
"""Source credentials from environment variables.
Actual sourcing is deferred until requested.
"""
def __init__(self, user_env_var, pwd_env_var):
self.user_env_var = user_env_var
self.pwd_env_var = pwd_env_var
def _get_env(self, env_var):
"""Helper to read an environment variable"""
value = os.environ.get(env_var)
if not value:
raise ValueError('Missing environment variable:%s' % env_var)
return value
@property
def username(self):
return self._get_env(self.user_env_var)
@property
def password(self):
return self._get_env(self.pwd_env_var)
## Instruction:
Add equality operator to EnvironCredential
## Code After:
import os
import abc
class Credential(metaclass=abc.ABCMeta):
"""Abstract class to manage credentials"""
@abc.abstractproperty
def username(self):
return None
@abc.abstractproperty
def password(self):
return None
class SimpleCredential(Credential):
"""Simple credentials implementation"""
def __init__(self, username, password):
self._username = username
self._password = password
@property
def username(self):
return self._username
@property
def password(self):
return self._password
class EnvironCredential(Credential):
"""Source credentials from environment variables.
Actual sourcing is deferred until requested.
"""
def __init__(self, user_env_var, pwd_env_var):
self.user_env_var = user_env_var
self.pwd_env_var = pwd_env_var
def __eq__(self, other: object) -> bool:
if not isinstance(other, EnvironCredential):
return NotImplemented
return (
self.user_env_var == other.user_env_var
and self.pwd_env_var == other.pwd_env_var
)
def _get_env(self, env_var):
"""Helper to read an environment variable"""
value = os.environ.get(env_var)
if not value:
raise ValueError('Missing environment variable:%s' % env_var)
return value
@property
def username(self):
return self._get_env(self.user_env_var)
@property
def password(self):
return self._get_env(self.pwd_env_var)
| import os
import abc
class Credential(metaclass=abc.ABCMeta):
"""Abstract class to manage credentials"""
@abc.abstractproperty
def username(self):
return None
@abc.abstractproperty
def password(self):
return None
class SimpleCredential(Credential):
"""Simple credentials implementation"""
def __init__(self, username, password):
self._username = username
self._password = password
@property
def username(self):
return self._username
@property
def password(self):
return self._password
class EnvironCredential(Credential):
"""Source credentials from environment variables.
Actual sourcing is deferred until requested.
"""
def __init__(self, user_env_var, pwd_env_var):
self.user_env_var = user_env_var
self.pwd_env_var = pwd_env_var
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, EnvironCredential):
+ return NotImplemented
+
+ return (
+ self.user_env_var == other.user_env_var
+ and self.pwd_env_var == other.pwd_env_var
+ )
+
def _get_env(self, env_var):
"""Helper to read an environment variable"""
value = os.environ.get(env_var)
if not value:
raise ValueError('Missing environment variable:%s' % env_var)
return value
@property
def username(self):
return self._get_env(self.user_env_var)
@property
def password(self):
return self._get_env(self.pwd_env_var) |
b1504dac6d33b4f0774cabceeb219653b9b6201f | ui.py | ui.py | from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
| from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print("\n{}".format(table.table))
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
| Change the way we get a clean, blank line before rendering letter bank | Change the way we get a clean, blank line before rendering letter bank
| Python | mit | tml/python-hangman-2017-summer | from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
+ print("\n{}".format(table.table))
- print()
- print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
| Change the way we get a clean, blank line before rendering letter bank | ## Code Before:
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
## Instruction:
Change the way we get a clean, blank line before rendering letter bank
## Code After:
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print("\n{}".format(table.table))
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
| from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
+ print("\n{}".format(table.table))
- print()
- print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='') |
f8b52162748ccf62db881fad101e6a91ed014bd4 | plugins/Hitman_Codename_47.py | plugins/Hitman_Codename_47.py | import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class HitmanCodename47Plugin(BasePlugin):
Name = "Hitman: Codename 47"
support_os = ["Windows"]
def backup(self, _):
_.add_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
def restore(self, _):
_.restore_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Hitman Codename 47')):
return True
return False
| import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class HitmanCodename47Plugin(BasePlugin):
Name = "Hitman: Codename 47"
support_os = ["Windows"]
def backup(self, _):
_.add_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
_.add_files('Config', os.path.join(SteamGamesPath, 'Hitman Codename 47'), ['Hitman.cfg', 'hitman.ini'])
def restore(self, _):
_.restore_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
_.restore_files('Config', os.path.join(SteamGamesPath, 'Hitman Codename 47'), ['Hitman.cfg', 'hitman.ini'])
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Hitman Codename 47')):
return True
return False
| Add backuping config files for Hitman: Codename 47 | Add backuping config files for Hitman: Codename 47
| Python | mit | Pr0Ger/SGSB | import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class HitmanCodename47Plugin(BasePlugin):
Name = "Hitman: Codename 47"
support_os = ["Windows"]
def backup(self, _):
_.add_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
+ _.add_files('Config', os.path.join(SteamGamesPath, 'Hitman Codename 47'), ['Hitman.cfg', 'hitman.ini'])
def restore(self, _):
_.restore_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
+ _.restore_files('Config', os.path.join(SteamGamesPath, 'Hitman Codename 47'), ['Hitman.cfg', 'hitman.ini'])
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Hitman Codename 47')):
return True
return False
| Add backuping config files for Hitman: Codename 47 | ## Code Before:
import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class HitmanCodename47Plugin(BasePlugin):
Name = "Hitman: Codename 47"
support_os = ["Windows"]
def backup(self, _):
_.add_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
def restore(self, _):
_.restore_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Hitman Codename 47')):
return True
return False
## Instruction:
Add backuping config files for Hitman: Codename 47
## Code After:
import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class HitmanCodename47Plugin(BasePlugin):
Name = "Hitman: Codename 47"
support_os = ["Windows"]
def backup(self, _):
_.add_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
_.add_files('Config', os.path.join(SteamGamesPath, 'Hitman Codename 47'), ['Hitman.cfg', 'hitman.ini'])
def restore(self, _):
_.restore_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
_.restore_files('Config', os.path.join(SteamGamesPath, 'Hitman Codename 47'), ['Hitman.cfg', 'hitman.ini'])
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Hitman Codename 47')):
return True
return False
| import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class HitmanCodename47Plugin(BasePlugin):
Name = "Hitman: Codename 47"
support_os = ["Windows"]
def backup(self, _):
_.add_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
+ _.add_files('Config', os.path.join(SteamGamesPath, 'Hitman Codename 47'), ['Hitman.cfg', 'hitman.ini'])
def restore(self, _):
_.restore_files('Save', os.path.join(SteamGamesPath, 'Hitman Codename 47'), 'Hitman.sav')
+ _.restore_files('Config', os.path.join(SteamGamesPath, 'Hitman Codename 47'), ['Hitman.cfg', 'hitman.ini'])
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Hitman Codename 47')):
return True
return False |
a4c247f5243c8ee637f1507fb9dc0281541af3b1 | pambox/speech/__init__.py | pambox/speech/__init__.py | from __future__ import absolute_import
from .sepsm import Sepsm
from .mrsepsm import MrSepsm
from .sii import Sii
from .material import Material
__all__ = [
'Sepsm',
'MrSepsm',
'Sii',
'Material'
]
| from __future__ import absolute_import
from .sepsm import Sepsm
from .mrsepsm import MrSepsm
from .sii import Sii
from .material import Material
from .experiment import Experiment
__all__ = [
'Sepsm',
'MrSepsm',
'Sii',
'Material',
'Experiment'
]
| Add Experiment to the init file of speech module | Add Experiment to the init file of speech module
| Python | bsd-3-clause | achabotl/pambox | from __future__ import absolute_import
from .sepsm import Sepsm
from .mrsepsm import MrSepsm
from .sii import Sii
from .material import Material
+ from .experiment import Experiment
__all__ = [
'Sepsm',
'MrSepsm',
'Sii',
- 'Material'
+ 'Material',
+ 'Experiment'
]
| Add Experiment to the init file of speech module | ## Code Before:
from __future__ import absolute_import
from .sepsm import Sepsm
from .mrsepsm import MrSepsm
from .sii import Sii
from .material import Material
__all__ = [
'Sepsm',
'MrSepsm',
'Sii',
'Material'
]
## Instruction:
Add Experiment to the init file of speech module
## Code After:
from __future__ import absolute_import
from .sepsm import Sepsm
from .mrsepsm import MrSepsm
from .sii import Sii
from .material import Material
from .experiment import Experiment
__all__ = [
'Sepsm',
'MrSepsm',
'Sii',
'Material',
'Experiment'
]
| from __future__ import absolute_import
from .sepsm import Sepsm
from .mrsepsm import MrSepsm
from .sii import Sii
from .material import Material
+ from .experiment import Experiment
__all__ = [
'Sepsm',
'MrSepsm',
'Sii',
- 'Material'
+ 'Material',
? +
+ 'Experiment'
] |
6830f29022746838677ecca420aeff190943c5ed | random/__init__.py | random/__init__.py |
"""Nomisma Quantitative Finance random number samplers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from nomisma_quant_finance.random.random_ops import multivariate_normal
from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle
__all__ = [
'multivariate_normal',
'stateless_random_shuffle'
]
|
"""Random number samplers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from nomisma_quant_finance.random.random_ops import multivariate_normal
from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle
__all__ = [
'multivariate_normal',
'stateless_random_shuffle'
]
| Remove remnants of internal project naming in one docstring. | Remove remnants of internal project naming in one docstring.
PiperOrigin-RevId: 263530441
| Python | apache-2.0 | google/tf-quant-finance,google/tf-quant-finance |
- """Nomisma Quantitative Finance random number samplers."""
+ """Random number samplers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from nomisma_quant_finance.random.random_ops import multivariate_normal
from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle
__all__ = [
'multivariate_normal',
'stateless_random_shuffle'
]
| Remove remnants of internal project naming in one docstring. | ## Code Before:
"""Nomisma Quantitative Finance random number samplers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from nomisma_quant_finance.random.random_ops import multivariate_normal
from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle
__all__ = [
'multivariate_normal',
'stateless_random_shuffle'
]
## Instruction:
Remove remnants of internal project naming in one docstring.
## Code After:
"""Random number samplers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from nomisma_quant_finance.random.random_ops import multivariate_normal
from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle
__all__ = [
'multivariate_normal',
'stateless_random_shuffle'
]
|
- """Nomisma Quantitative Finance random number samplers."""
+ """Random number samplers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from nomisma_quant_finance.random.random_ops import multivariate_normal
from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle
__all__ = [
'multivariate_normal',
'stateless_random_shuffle'
] |
775cd06b3e99cef9c777a907fc69c5c20380bb75 | raspicam/camera.py | raspicam/camera.py | import logging
# import picamera
# import picamera.array
from abc import ABCMeta, abstractmethod
import cv2
LOG = logging.getLogger(__name__)
class Camera(metaclass=ABCMeta):
@abstractmethod
def frame_generator(self):
raise NotImplementedError('Not yet implemented')
class USBCam(Camera):
def frame_generator(self):
video = cv2.VideoCapture(1)
if not video.isOpened():
raise Exception('Unable to open camera')
while True:
success, image = video.read()
yield image
video.release()
class PiCamera(Camera):
def frame_generator(self):
with picamera.PiCamera() as camera:
with picamera.array.PiRGBArray(camera) as output:
camera.resolution = (640, 480)
camera.framerate = 32
while True:
camera.capture(output, 'rgb', use_video_port=True)
yield output.array
output.truncate(0)
| import logging
# import picamera
# import picamera.array
from abc import ABCMeta, abstractmethod
import cv2
LOG = logging.getLogger(__name__)
class Camera(metaclass=ABCMeta):
@abstractmethod
def frame_generator(self):
raise NotImplementedError('Not yet implemented')
class USBCam(Camera):
def __init__(self, index=-1):
self.index = index
def frame_generator(self):
video = cv2.VideoCapture(self.index)
if not video.isOpened():
raise Exception('Unable to open camera')
while True:
success, image = video.read()
yield image
video.release()
class PiCamera(Camera):
def frame_generator(self):
with picamera.PiCamera() as camera:
with picamera.array.PiRGBArray(camera) as output:
camera.resolution = (640, 480)
camera.framerate = 32
while True:
camera.capture(output, 'rgb', use_video_port=True)
yield output.array
output.truncate(0)
| Allow the USB index to be modified | Allow the USB index to be modified
| Python | mit | exhuma/raspicam,exhuma/raspicam,exhuma/raspicam | import logging
# import picamera
# import picamera.array
from abc import ABCMeta, abstractmethod
import cv2
LOG = logging.getLogger(__name__)
class Camera(metaclass=ABCMeta):
@abstractmethod
def frame_generator(self):
raise NotImplementedError('Not yet implemented')
class USBCam(Camera):
+ def __init__(self, index=-1):
+ self.index = index
+
def frame_generator(self):
- video = cv2.VideoCapture(1)
+ video = cv2.VideoCapture(self.index)
if not video.isOpened():
raise Exception('Unable to open camera')
while True:
success, image = video.read()
yield image
video.release()
class PiCamera(Camera):
def frame_generator(self):
with picamera.PiCamera() as camera:
with picamera.array.PiRGBArray(camera) as output:
camera.resolution = (640, 480)
camera.framerate = 32
while True:
camera.capture(output, 'rgb', use_video_port=True)
yield output.array
output.truncate(0)
| Allow the USB index to be modified | ## Code Before:
import logging
# import picamera
# import picamera.array
from abc import ABCMeta, abstractmethod
import cv2
LOG = logging.getLogger(__name__)
class Camera(metaclass=ABCMeta):
@abstractmethod
def frame_generator(self):
raise NotImplementedError('Not yet implemented')
class USBCam(Camera):
def frame_generator(self):
video = cv2.VideoCapture(1)
if not video.isOpened():
raise Exception('Unable to open camera')
while True:
success, image = video.read()
yield image
video.release()
class PiCamera(Camera):
def frame_generator(self):
with picamera.PiCamera() as camera:
with picamera.array.PiRGBArray(camera) as output:
camera.resolution = (640, 480)
camera.framerate = 32
while True:
camera.capture(output, 'rgb', use_video_port=True)
yield output.array
output.truncate(0)
## Instruction:
Allow the USB index to be modified
## Code After:
import logging
# import picamera
# import picamera.array
from abc import ABCMeta, abstractmethod
import cv2
LOG = logging.getLogger(__name__)
class Camera(metaclass=ABCMeta):
@abstractmethod
def frame_generator(self):
raise NotImplementedError('Not yet implemented')
class USBCam(Camera):
def __init__(self, index=-1):
self.index = index
def frame_generator(self):
video = cv2.VideoCapture(self.index)
if not video.isOpened():
raise Exception('Unable to open camera')
while True:
success, image = video.read()
yield image
video.release()
class PiCamera(Camera):
def frame_generator(self):
with picamera.PiCamera() as camera:
with picamera.array.PiRGBArray(camera) as output:
camera.resolution = (640, 480)
camera.framerate = 32
while True:
camera.capture(output, 'rgb', use_video_port=True)
yield output.array
output.truncate(0)
| import logging
# import picamera
# import picamera.array
from abc import ABCMeta, abstractmethod
import cv2
LOG = logging.getLogger(__name__)
class Camera(metaclass=ABCMeta):
@abstractmethod
def frame_generator(self):
raise NotImplementedError('Not yet implemented')
class USBCam(Camera):
+ def __init__(self, index=-1):
+ self.index = index
+
def frame_generator(self):
- video = cv2.VideoCapture(1)
? ^
+ video = cv2.VideoCapture(self.index)
? ^^^^^^^^^^
if not video.isOpened():
raise Exception('Unable to open camera')
while True:
success, image = video.read()
yield image
video.release()
class PiCamera(Camera):
def frame_generator(self):
with picamera.PiCamera() as camera:
with picamera.array.PiRGBArray(camera) as output:
camera.resolution = (640, 480)
camera.framerate = 32
while True:
camera.capture(output, 'rgb', use_video_port=True)
yield output.array
output.truncate(0) |
d328129a2f2909c1b8769f1edb94746c4a88dd28 | test_project/test_models.py | test_project/test_models.py | from django.db import models
class TestUser0(models.Model):
username = models.CharField()
test_field = models.CharField('My title')
class Meta:
app_label = 'controlcenter'
def foo(self):
return 'original foo value'
foo.short_description = 'original foo label'
def bar(self):
return 'original bar value'
bar.short_description = 'original bar label'
def baz(self):
pass
baz.short_description = ''
def egg(self):
return 'original egg value'
class TestUser1(models.Model):
primary = models.AutoField(primary_key=True)
username = models.CharField()
class Meta:
app_label = 'controlcenter'
| from django.db import models
class TestUser0(models.Model):
username = models.CharField(max_length=255)
test_field = models.CharField('My title', max_length=255)
class Meta:
app_label = 'controlcenter'
def foo(self):
return 'original foo value'
foo.short_description = 'original foo label'
def bar(self):
return 'original bar value'
bar.short_description = 'original bar label'
def baz(self):
pass
baz.short_description = ''
def egg(self):
return 'original egg value'
class TestUser1(models.Model):
primary = models.AutoField(primary_key=True)
username = models.CharField(max_length=255)
class Meta:
app_label = 'controlcenter'
| Add `max_length` to char fields | Add `max_length` to char fields
| Python | bsd-3-clause | byashimov/django-controlcenter,byashimov/django-controlcenter,byashimov/django-controlcenter | from django.db import models
class TestUser0(models.Model):
- username = models.CharField()
+ username = models.CharField(max_length=255)
- test_field = models.CharField('My title')
+ test_field = models.CharField('My title', max_length=255)
class Meta:
app_label = 'controlcenter'
def foo(self):
return 'original foo value'
foo.short_description = 'original foo label'
def bar(self):
return 'original bar value'
bar.short_description = 'original bar label'
def baz(self):
pass
baz.short_description = ''
def egg(self):
return 'original egg value'
class TestUser1(models.Model):
primary = models.AutoField(primary_key=True)
- username = models.CharField()
+ username = models.CharField(max_length=255)
class Meta:
app_label = 'controlcenter'
| Add `max_length` to char fields | ## Code Before:
from django.db import models
class TestUser0(models.Model):
username = models.CharField()
test_field = models.CharField('My title')
class Meta:
app_label = 'controlcenter'
def foo(self):
return 'original foo value'
foo.short_description = 'original foo label'
def bar(self):
return 'original bar value'
bar.short_description = 'original bar label'
def baz(self):
pass
baz.short_description = ''
def egg(self):
return 'original egg value'
class TestUser1(models.Model):
primary = models.AutoField(primary_key=True)
username = models.CharField()
class Meta:
app_label = 'controlcenter'
## Instruction:
Add `max_length` to char fields
## Code After:
from django.db import models
class TestUser0(models.Model):
username = models.CharField(max_length=255)
test_field = models.CharField('My title', max_length=255)
class Meta:
app_label = 'controlcenter'
def foo(self):
return 'original foo value'
foo.short_description = 'original foo label'
def bar(self):
return 'original bar value'
bar.short_description = 'original bar label'
def baz(self):
pass
baz.short_description = ''
def egg(self):
return 'original egg value'
class TestUser1(models.Model):
primary = models.AutoField(primary_key=True)
username = models.CharField(max_length=255)
class Meta:
app_label = 'controlcenter'
| from django.db import models
class TestUser0(models.Model):
- username = models.CharField()
+ username = models.CharField(max_length=255)
? ++++++++++++++
- test_field = models.CharField('My title')
+ test_field = models.CharField('My title', max_length=255)
? ++++++++++++++++
class Meta:
app_label = 'controlcenter'
def foo(self):
return 'original foo value'
foo.short_description = 'original foo label'
def bar(self):
return 'original bar value'
bar.short_description = 'original bar label'
def baz(self):
pass
baz.short_description = ''
def egg(self):
return 'original egg value'
class TestUser1(models.Model):
primary = models.AutoField(primary_key=True)
- username = models.CharField()
+ username = models.CharField(max_length=255)
? ++++++++++++++
class Meta:
app_label = 'controlcenter' |
31a9b285a0445c895aeff02b2abbeda12bf7f3d7 | wagtail/admin/tests/pages/test_content_type_use_view.py | wagtail/admin/tests/pages/test_content_type_use_view.py | from django.test import TestCase
from django.urls import reverse
from wagtail.tests.utils import WagtailTestUtils
class TestContentTypeUse(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.user = self.login()
def test_content_type_use(self):
# Get use of event page
response = self.client.get(reverse('wagtailadmin_pages:type_use', args=('tests', 'eventpage')))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/content_type_use.html')
self.assertContains(response, "Christmas")
| from django.test import TestCase
from django.urls import reverse
from django.utils.http import urlencode
from wagtail.tests.testapp.models import EventPage
from wagtail.tests.utils import WagtailTestUtils
class TestContentTypeUse(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.user = self.login()
self.christmas_page = EventPage.objects.get(title="Christmas")
def test_content_type_use(self):
# Get use of event page
request_url = reverse('wagtailadmin_pages:type_use', args=('tests', 'eventpage'))
response = self.client.get(request_url)
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/content_type_use.html')
self.assertContains(response, "Christmas")
# Links to 'delete' etc should include a 'next' URL parameter pointing back here
delete_url = (
reverse('wagtailadmin_pages:delete', args=(self.christmas_page.id,))
+ '?' + urlencode({'next': request_url})
)
self.assertContains(response, delete_url)
| Add test for button URLs including a 'next' parameter | Add test for button URLs including a 'next' parameter
| Python | bsd-3-clause | torchbox/wagtail,FlipperPA/wagtail,gasman/wagtail,gasman/wagtail,mixxorz/wagtail,thenewguy/wagtail,mixxorz/wagtail,torchbox/wagtail,torchbox/wagtail,thenewguy/wagtail,gasman/wagtail,rsalmaso/wagtail,mixxorz/wagtail,wagtail/wagtail,takeflight/wagtail,FlipperPA/wagtail,torchbox/wagtail,zerolab/wagtail,thenewguy/wagtail,takeflight/wagtail,zerolab/wagtail,kaedroho/wagtail,wagtail/wagtail,zerolab/wagtail,thenewguy/wagtail,gasman/wagtail,rsalmaso/wagtail,kaedroho/wagtail,kaedroho/wagtail,thenewguy/wagtail,mixxorz/wagtail,zerolab/wagtail,jnns/wagtail,takeflight/wagtail,rsalmaso/wagtail,gasman/wagtail,FlipperPA/wagtail,wagtail/wagtail,wagtail/wagtail,zerolab/wagtail,rsalmaso/wagtail,kaedroho/wagtail,FlipperPA/wagtail,jnns/wagtail,jnns/wagtail,takeflight/wagtail,kaedroho/wagtail,mixxorz/wagtail,wagtail/wagtail,rsalmaso/wagtail,jnns/wagtail | from django.test import TestCase
from django.urls import reverse
+ from django.utils.http import urlencode
+ from wagtail.tests.testapp.models import EventPage
from wagtail.tests.utils import WagtailTestUtils
class TestContentTypeUse(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.user = self.login()
+ self.christmas_page = EventPage.objects.get(title="Christmas")
def test_content_type_use(self):
# Get use of event page
- response = self.client.get(reverse('wagtailadmin_pages:type_use', args=('tests', 'eventpage')))
+ request_url = reverse('wagtailadmin_pages:type_use', args=('tests', 'eventpage'))
+ response = self.client.get(request_url)
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/content_type_use.html')
self.assertContains(response, "Christmas")
+ # Links to 'delete' etc should include a 'next' URL parameter pointing back here
+ delete_url = (
+ reverse('wagtailadmin_pages:delete', args=(self.christmas_page.id,))
+ + '?' + urlencode({'next': request_url})
+ )
+ self.assertContains(response, delete_url)
+ | Add test for button URLs including a 'next' parameter | ## Code Before:
from django.test import TestCase
from django.urls import reverse
from wagtail.tests.utils import WagtailTestUtils
class TestContentTypeUse(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.user = self.login()
def test_content_type_use(self):
# Get use of event page
response = self.client.get(reverse('wagtailadmin_pages:type_use', args=('tests', 'eventpage')))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/content_type_use.html')
self.assertContains(response, "Christmas")
## Instruction:
Add test for button URLs including a 'next' parameter
## Code After:
from django.test import TestCase
from django.urls import reverse
from django.utils.http import urlencode
from wagtail.tests.testapp.models import EventPage
from wagtail.tests.utils import WagtailTestUtils
class TestContentTypeUse(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.user = self.login()
self.christmas_page = EventPage.objects.get(title="Christmas")
def test_content_type_use(self):
# Get use of event page
request_url = reverse('wagtailadmin_pages:type_use', args=('tests', 'eventpage'))
response = self.client.get(request_url)
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/content_type_use.html')
self.assertContains(response, "Christmas")
# Links to 'delete' etc should include a 'next' URL parameter pointing back here
delete_url = (
reverse('wagtailadmin_pages:delete', args=(self.christmas_page.id,))
+ '?' + urlencode({'next': request_url})
)
self.assertContains(response, delete_url)
| from django.test import TestCase
from django.urls import reverse
+ from django.utils.http import urlencode
+ from wagtail.tests.testapp.models import EventPage
from wagtail.tests.utils import WagtailTestUtils
class TestContentTypeUse(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.user = self.login()
+ self.christmas_page = EventPage.objects.get(title="Christmas")
def test_content_type_use(self):
# Get use of event page
- response = self.client.get(reverse('wagtailadmin_pages:type_use', args=('tests', 'eventpage')))
? ^^^^^ ---------------- -
+ request_url = reverse('wagtailadmin_pages:type_use', args=('tests', 'eventpage'))
? +++ ^^^^^
+ response = self.client.get(request_url)
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/content_type_use.html')
self.assertContains(response, "Christmas")
+
+ # Links to 'delete' etc should include a 'next' URL parameter pointing back here
+ delete_url = (
+ reverse('wagtailadmin_pages:delete', args=(self.christmas_page.id,))
+ + '?' + urlencode({'next': request_url})
+ )
+ self.assertContains(response, delete_url) |
f3001e7e72f366fde962bbdd52f38a983d9f7026 | routes/__init__.py | routes/__init__.py | from routes.index import index
from routes.project_page import project
from routes.user_overview import user_overview
from routes.project_overview import group_overview, series_overview
from routes.login import login
def setup_routes(app):
"""
Sets up all the routes for the webapp.
:param app:
:return:
"""
app.router.add_get('/', index)
app.router.add_post('/login', login)
app.router.add_get('/user_overview', user_overview)
app.router.add_get('/projects/', group_overview)
app.router.add_get('/projects/legacy/{group_series}/{group_part}', group_overview)
app.router.add_get('/projects/legacy/{group_series}/', series_overview)
app.router.add_get('/users/{user_name}/{project_name}', project)
| from routes.index import index
from routes.project_page import project
from routes.user_overview import user_overview
from routes.project_overview import group_overview, series_overview
from routes.login import login
def setup_routes(app):
"""
Sets up all the routes for the webapp.
:param app:
:return:
"""
app.router.add_get('/', index)
app.router.add_post('/login', login)
app.router.add_get('/user_overview', user_overview)
app.router.add_get('/projects/', group_overview)
app.router.add_get('/projects/legacy/{group_series}/{group_part}', group_overview)
app.router.add_get('/projects/legacy/{group_series}/', series_overview)
app.router.add_get('/projects/{project_name}', project)
| Rename route '/users/{user_name}/{project_name}' to '/projects/{project_name}' | Rename route '/users/{user_name}/{project_name}' to '/projects/{project_name}'
| Python | agpl-3.0 | wtsi-hgi/CoGS-Webapp,wtsi-hgi/CoGS-Webapp,wtsi-hgi/CoGS-Webapp | from routes.index import index
from routes.project_page import project
from routes.user_overview import user_overview
from routes.project_overview import group_overview, series_overview
from routes.login import login
def setup_routes(app):
"""
Sets up all the routes for the webapp.
:param app:
:return:
"""
app.router.add_get('/', index)
app.router.add_post('/login', login)
app.router.add_get('/user_overview', user_overview)
app.router.add_get('/projects/', group_overview)
app.router.add_get('/projects/legacy/{group_series}/{group_part}', group_overview)
app.router.add_get('/projects/legacy/{group_series}/', series_overview)
- app.router.add_get('/users/{user_name}/{project_name}', project)
+ app.router.add_get('/projects/{project_name}', project)
| Rename route '/users/{user_name}/{project_name}' to '/projects/{project_name}' | ## Code Before:
from routes.index import index
from routes.project_page import project
from routes.user_overview import user_overview
from routes.project_overview import group_overview, series_overview
from routes.login import login
def setup_routes(app):
"""
Sets up all the routes for the webapp.
:param app:
:return:
"""
app.router.add_get('/', index)
app.router.add_post('/login', login)
app.router.add_get('/user_overview', user_overview)
app.router.add_get('/projects/', group_overview)
app.router.add_get('/projects/legacy/{group_series}/{group_part}', group_overview)
app.router.add_get('/projects/legacy/{group_series}/', series_overview)
app.router.add_get('/users/{user_name}/{project_name}', project)
## Instruction:
Rename route '/users/{user_name}/{project_name}' to '/projects/{project_name}'
## Code After:
from routes.index import index
from routes.project_page import project
from routes.user_overview import user_overview
from routes.project_overview import group_overview, series_overview
from routes.login import login
def setup_routes(app):
"""
Sets up all the routes for the webapp.
:param app:
:return:
"""
app.router.add_get('/', index)
app.router.add_post('/login', login)
app.router.add_get('/user_overview', user_overview)
app.router.add_get('/projects/', group_overview)
app.router.add_get('/projects/legacy/{group_series}/{group_part}', group_overview)
app.router.add_get('/projects/legacy/{group_series}/', series_overview)
app.router.add_get('/projects/{project_name}', project)
| from routes.index import index
from routes.project_page import project
from routes.user_overview import user_overview
from routes.project_overview import group_overview, series_overview
from routes.login import login
def setup_routes(app):
"""
Sets up all the routes for the webapp.
:param app:
:return:
"""
app.router.add_get('/', index)
app.router.add_post('/login', login)
app.router.add_get('/user_overview', user_overview)
app.router.add_get('/projects/', group_overview)
app.router.add_get('/projects/legacy/{group_series}/{group_part}', group_overview)
app.router.add_get('/projects/legacy/{group_series}/', series_overview)
- app.router.add_get('/users/{user_name}/{project_name}', project)
? ^ ---------------
+ app.router.add_get('/projects/{project_name}', project)
? ^^^^^^^
|
fa78c5b5442c904ba3888b858eb2c284f16664ed | pages/urls/page.py | pages/urls/page.py | from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = patterns('',
url(r'', include(router.urls)),
)
| from django.conf.urls import include, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = [
url(r'', include(router.urls)),
]
| Purge unnecessary patterns function from urls | Purge unnecessary patterns function from urls
| Python | bsd-2-clause | incuna/feincms-pages-api | - from django.conf.urls import include, patterns, url
+ from django.conf.urls import include, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
- urlpatterns = patterns('',
+ urlpatterns = [
url(r'', include(router.urls)),
- )
+ ]
| Purge unnecessary patterns function from urls | ## Code Before:
from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = patterns('',
url(r'', include(router.urls)),
)
## Instruction:
Purge unnecessary patterns function from urls
## Code After:
from django.conf.urls import include, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = [
url(r'', include(router.urls)),
]
| - from django.conf.urls import include, patterns, url
? ----------
+ from django.conf.urls import include, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
- urlpatterns = patterns('',
+ urlpatterns = [
url(r'', include(router.urls)),
- )
+ ] |
4647183697170ce22910bd6cde27746297543514 | python3_tools/get_edx_webservices.py | python3_tools/get_edx_webservices.py | import github
from get_repos import *
webservices = []
for repo in expanded_repos_list(orgs):
try:
metadata = get_remote_yaml(repo, 'openedx.yaml')
except github.GithubException:
continue
if 'tags' in metadata and 'webservice' in metadata['tags']:
print("{}".format(repo.html_url))
webservices.append(repo)
| import github
from get_repos import orgs, expanded_repos_list, get_remote_yaml
webservices = []
for repo in expanded_repos_list(orgs):
try:
metadata = get_remote_yaml(repo, 'openedx.yaml')
except github.GithubException:
continue
if 'tags' in metadata and 'webservice' in metadata['tags']:
print("{}".format(repo.html_url))
webservices.append(repo)
| Add tooling to get all of edx's web services. | Add tooling to get all of edx's web services.
| Python | apache-2.0 | edx/repo-tools,edx/repo-tools | import github
- from get_repos import *
+ from get_repos import orgs, expanded_repos_list, get_remote_yaml
webservices = []
for repo in expanded_repos_list(orgs):
try:
metadata = get_remote_yaml(repo, 'openedx.yaml')
except github.GithubException:
continue
if 'tags' in metadata and 'webservice' in metadata['tags']:
print("{}".format(repo.html_url))
webservices.append(repo)
| Add tooling to get all of edx's web services. | ## Code Before:
import github
from get_repos import *
webservices = []
for repo in expanded_repos_list(orgs):
try:
metadata = get_remote_yaml(repo, 'openedx.yaml')
except github.GithubException:
continue
if 'tags' in metadata and 'webservice' in metadata['tags']:
print("{}".format(repo.html_url))
webservices.append(repo)
## Instruction:
Add tooling to get all of edx's web services.
## Code After:
import github
from get_repos import orgs, expanded_repos_list, get_remote_yaml
webservices = []
for repo in expanded_repos_list(orgs):
try:
metadata = get_remote_yaml(repo, 'openedx.yaml')
except github.GithubException:
continue
if 'tags' in metadata and 'webservice' in metadata['tags']:
print("{}".format(repo.html_url))
webservices.append(repo)
| import github
- from get_repos import *
+ from get_repos import orgs, expanded_repos_list, get_remote_yaml
webservices = []
for repo in expanded_repos_list(orgs):
try:
metadata = get_remote_yaml(repo, 'openedx.yaml')
except github.GithubException:
continue
if 'tags' in metadata and 'webservice' in metadata['tags']:
print("{}".format(repo.html_url))
webservices.append(repo)
|
5b7cd2f62bb86658b6fce7503a4fab9238b8faa5 | channelguide/init.py | channelguide/init.py |
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
|
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
import django.db
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
# hack for the fact that django tries to rollback its non-existant
# connection when requests finish.
dispatcher.disconnect(django.db._rollback_on_exception,
signal=signals.got_request_exception)
| Fix for django trying to rollback connections on request exceptions. | Fix for django trying to rollback connections on request exceptions.
git-svn-id: 98eea730e22c7fb5f8b38c49248ce5c7e9bb5936@525 be7adf91-e322-0410-8f47-e6edb61c52aa
| Python | agpl-3.0 | kmshi/miroguide,kmshi/miroguide,kmshi/miroguide |
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
+ import django.db
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
+ # hack for the fact that django tries to rollback its non-existant
+ # connection when requests finish.
+ dispatcher.disconnect(django.db._rollback_on_exception,
+ signal=signals.got_request_exception)
+
+ | Fix for django trying to rollback connections on request exceptions. | ## Code Before:
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
## Instruction:
Fix for django trying to rollback connections on request exceptions.
## Code After:
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
import django.db
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
# hack for the fact that django tries to rollback its non-existant
# connection when requests finish.
dispatcher.disconnect(django.db._rollback_on_exception,
signal=signals.got_request_exception)
|
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
+ import django.db
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
+
+ # hack for the fact that django tries to rollback its non-existant
+ # connection when requests finish.
+ dispatcher.disconnect(django.db._rollback_on_exception,
+ signal=signals.got_request_exception)
+ |
0a2fd079c828a8d2f48f8e2c33574aec2d416f06 | mbuild/lib/atoms/c3.py | mbuild/lib/atoms/c3.py | from __future__ import division
import numpy as np
import mbuild as mb
class C3(mb.Compound):
"""A tri-valent, planar carbon."""
def __init__(self):
super(C3, self).__init__()
self.add(mb.Particle(name='C'))
self.add(mb.Port(anchor=self[0]), 'up')
self['up'].translate(self['up'], np.array([0, 0.07, 0]))
self.add(mb.Port(anchor=self[0]), 'down')
self['down'].translate(np.array([0, 0.07, 0]))
self['down'].spin(np.pi * 2/3, [0, 0, 1])
self.add(mb.Port(anchor=self[0]), 'left')
self['left'].translate(np.array([0, 0.07, 0]))
self['left'].spin(-np.pi * 2/3, [0, 0, 1])
if __name__ == '__main__':
m = C3()
m.visualize(show_ports=True)
| from __future__ import division
import numpy as np
import mbuild as mb
class C3(mb.Compound):
"""A tri-valent, planar carbon."""
def __init__(self):
super(C3, self).__init__()
self.add(mb.Particle(name='C'))
self.add(mb.Port(anchor=self[0]), 'up')
self['up'].translate(np.array([0, 0.07, 0]))
self.add(mb.Port(anchor=self[0]), 'down')
self['down'].translate(np.array([0, 0.07, 0]))
self['down'].spin(np.pi * 2/3, [0, 0, 1])
self.add(mb.Port(anchor=self[0]), 'left')
self['left'].translate(np.array([0, 0.07, 0]))
self['left'].spin(-np.pi * 2/3, [0, 0, 1])
if __name__ == '__main__':
m = C3()
m.visualize(show_ports=True)
| Fix bug in translate call | Fix bug in translate call
| Python | mit | iModels/mbuild,ctk3b/mbuild,tcmoore3/mbuild,summeraz/mbuild,summeraz/mbuild,ctk3b/mbuild,iModels/mbuild,tcmoore3/mbuild | from __future__ import division
import numpy as np
import mbuild as mb
class C3(mb.Compound):
"""A tri-valent, planar carbon."""
def __init__(self):
super(C3, self).__init__()
self.add(mb.Particle(name='C'))
self.add(mb.Port(anchor=self[0]), 'up')
- self['up'].translate(self['up'], np.array([0, 0.07, 0]))
+ self['up'].translate(np.array([0, 0.07, 0]))
self.add(mb.Port(anchor=self[0]), 'down')
self['down'].translate(np.array([0, 0.07, 0]))
self['down'].spin(np.pi * 2/3, [0, 0, 1])
self.add(mb.Port(anchor=self[0]), 'left')
self['left'].translate(np.array([0, 0.07, 0]))
self['left'].spin(-np.pi * 2/3, [0, 0, 1])
if __name__ == '__main__':
m = C3()
m.visualize(show_ports=True)
| Fix bug in translate call | ## Code Before:
from __future__ import division
import numpy as np
import mbuild as mb
class C3(mb.Compound):
"""A tri-valent, planar carbon."""
def __init__(self):
super(C3, self).__init__()
self.add(mb.Particle(name='C'))
self.add(mb.Port(anchor=self[0]), 'up')
self['up'].translate(self['up'], np.array([0, 0.07, 0]))
self.add(mb.Port(anchor=self[0]), 'down')
self['down'].translate(np.array([0, 0.07, 0]))
self['down'].spin(np.pi * 2/3, [0, 0, 1])
self.add(mb.Port(anchor=self[0]), 'left')
self['left'].translate(np.array([0, 0.07, 0]))
self['left'].spin(-np.pi * 2/3, [0, 0, 1])
if __name__ == '__main__':
m = C3()
m.visualize(show_ports=True)
## Instruction:
Fix bug in translate call
## Code After:
from __future__ import division
import numpy as np
import mbuild as mb
class C3(mb.Compound):
"""A tri-valent, planar carbon."""
def __init__(self):
super(C3, self).__init__()
self.add(mb.Particle(name='C'))
self.add(mb.Port(anchor=self[0]), 'up')
self['up'].translate(np.array([0, 0.07, 0]))
self.add(mb.Port(anchor=self[0]), 'down')
self['down'].translate(np.array([0, 0.07, 0]))
self['down'].spin(np.pi * 2/3, [0, 0, 1])
self.add(mb.Port(anchor=self[0]), 'left')
self['left'].translate(np.array([0, 0.07, 0]))
self['left'].spin(-np.pi * 2/3, [0, 0, 1])
if __name__ == '__main__':
m = C3()
m.visualize(show_ports=True)
| from __future__ import division
import numpy as np
import mbuild as mb
class C3(mb.Compound):
"""A tri-valent, planar carbon."""
def __init__(self):
super(C3, self).__init__()
self.add(mb.Particle(name='C'))
self.add(mb.Port(anchor=self[0]), 'up')
- self['up'].translate(self['up'], np.array([0, 0.07, 0]))
? ------------
+ self['up'].translate(np.array([0, 0.07, 0]))
self.add(mb.Port(anchor=self[0]), 'down')
self['down'].translate(np.array([0, 0.07, 0]))
self['down'].spin(np.pi * 2/3, [0, 0, 1])
self.add(mb.Port(anchor=self[0]), 'left')
self['left'].translate(np.array([0, 0.07, 0]))
self['left'].spin(-np.pi * 2/3, [0, 0, 1])
if __name__ == '__main__':
m = C3()
m.visualize(show_ports=True) |
ceadcb80150278ae29fb60b339049f4c840c135d | astroquery/nist/tests/test_nist_remote.py | astroquery/nist/tests/test_nist_remote.py | from __future__ import print_function
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
import requests
import imp
from ... import nist
imp.reload(requests)
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
| from __future__ import print_function
import numpy as np
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
from ... import nist
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
| Add missing numpy import, and cleanup the rest | Add missing numpy import, and cleanup the rest
| Python | bsd-3-clause | ceb8/astroquery,imbasimba/astroquery,ceb8/astroquery,imbasimba/astroquery | from __future__ import print_function
+
+ import numpy as np
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
- import requests
- import imp
from ... import nist
-
- imp.reload(requests)
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
| Add missing numpy import, and cleanup the rest | ## Code Before:
from __future__ import print_function
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
import requests
import imp
from ... import nist
imp.reload(requests)
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
## Instruction:
Add missing numpy import, and cleanup the rest
## Code After:
from __future__ import print_function
import numpy as np
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
from ... import nist
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
| from __future__ import print_function
+
+ import numpy as np
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
- import requests
- import imp
from ... import nist
-
- imp.reload(requests)
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637') |
b92c75e1145915892723206a77593b9701e608bf | webapp/hello.py | webapp/hello.py | from flask import Flask, render_template, request
from flask.ext.uploads import UploadSet, configure_uploads, IMAGES
from scipy import misc
import numpy as np
from VGG16_gs_model import *
# sett opp nevralt nettverk:
model = VGG_16()
fpath = '../models/vgg16_sg.h5';
model.load_weights(fpath)
# Sett opp webapp
app = Flask(__name__)
photos = UploadSet('photos', IMAGES)
app.config['UPLOADED_PHOTOS_DEST'] = 'img'
configure_uploads(app, photos)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
# Hent ut prediksjon
img = misc.imread('./img/' + filename)
img = misc.imresize(img, (224, 224))
img = img.transpose()
img = np.expand_dims(img, axis=0).astype(np.uint8)
preds, idxs, classes = predict(model, img)
return 'You are a ' + str(classes[0]) + ' with a confidence of ' + str(preds[0])
return render_template('upload.html')
@app.route("/")
def hello():
return "Hello Nabla and Timini!"
if __name__ == "__main__":
app.run(host = '0.0.0.0' )
| from flask import Flask, render_template, request
from flask.ext.uploads import UploadSet, configure_uploads, IMAGES
from scipy import misc
import numpy as np
from VGG16_gs_model import *
# sett opp nevralt nettverk:
model = VGG_16()
fpath = '../models/vgg16_sg.h5';
model.load_weights(fpath)
# Sett opp webapp
app = Flask(__name__)
photos = UploadSet('photos', IMAGES)
app.config['UPLOADED_PHOTOS_DEST'] = 'img'
configure_uploads(app, photos)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
# Hent ut prediksjon
img = misc.imread('./img/' + filename)
img = misc.imresize(img, (224, 224))
img = img.transpose()
img = np.expand_dims(img, axis=0).astype(np.uint8)
preds, idxs, classes = predict(model, img)
return 'You are a ' + str(classes[0]) + ' with a confidence of ' + str(preds[0])
return render_template('upload.html')
@app.route("/")
def hello():
return "Hello Nabla and Timini!"
if __name__ == "__main__":
app.run(host = '0.0.0.0', port=5000)
| Add port on flask app | Add port on flask app
| Python | apache-2.0 | jchalvorsen/ConsultantOrNot,jchalvorsen/ConsultantOrNot,jchalvorsen/ConsultantOrNot | from flask import Flask, render_template, request
from flask.ext.uploads import UploadSet, configure_uploads, IMAGES
from scipy import misc
import numpy as np
from VGG16_gs_model import *
# sett opp nevralt nettverk:
model = VGG_16()
fpath = '../models/vgg16_sg.h5';
model.load_weights(fpath)
# Sett opp webapp
app = Flask(__name__)
photos = UploadSet('photos', IMAGES)
app.config['UPLOADED_PHOTOS_DEST'] = 'img'
configure_uploads(app, photos)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
# Hent ut prediksjon
img = misc.imread('./img/' + filename)
img = misc.imresize(img, (224, 224))
img = img.transpose()
img = np.expand_dims(img, axis=0).astype(np.uint8)
preds, idxs, classes = predict(model, img)
return 'You are a ' + str(classes[0]) + ' with a confidence of ' + str(preds[0])
return render_template('upload.html')
@app.route("/")
def hello():
return "Hello Nabla and Timini!"
if __name__ == "__main__":
- app.run(host = '0.0.0.0' )
+ app.run(host = '0.0.0.0', port=5000)
| Add port on flask app | ## Code Before:
from flask import Flask, render_template, request
from flask.ext.uploads import UploadSet, configure_uploads, IMAGES
from scipy import misc
import numpy as np
from VGG16_gs_model import *
# sett opp nevralt nettverk:
model = VGG_16()
fpath = '../models/vgg16_sg.h5';
model.load_weights(fpath)
# Sett opp webapp
app = Flask(__name__)
photos = UploadSet('photos', IMAGES)
app.config['UPLOADED_PHOTOS_DEST'] = 'img'
configure_uploads(app, photos)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
# Hent ut prediksjon
img = misc.imread('./img/' + filename)
img = misc.imresize(img, (224, 224))
img = img.transpose()
img = np.expand_dims(img, axis=0).astype(np.uint8)
preds, idxs, classes = predict(model, img)
return 'You are a ' + str(classes[0]) + ' with a confidence of ' + str(preds[0])
return render_template('upload.html')
@app.route("/")
def hello():
return "Hello Nabla and Timini!"
if __name__ == "__main__":
app.run(host = '0.0.0.0' )
## Instruction:
Add port on flask app
## Code After:
from flask import Flask, render_template, request
from flask.ext.uploads import UploadSet, configure_uploads, IMAGES
from scipy import misc
import numpy as np
from VGG16_gs_model import *
# sett opp nevralt nettverk:
model = VGG_16()
fpath = '../models/vgg16_sg.h5';
model.load_weights(fpath)
# Sett opp webapp
app = Flask(__name__)
photos = UploadSet('photos', IMAGES)
app.config['UPLOADED_PHOTOS_DEST'] = 'img'
configure_uploads(app, photos)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
# Hent ut prediksjon
img = misc.imread('./img/' + filename)
img = misc.imresize(img, (224, 224))
img = img.transpose()
img = np.expand_dims(img, axis=0).astype(np.uint8)
preds, idxs, classes = predict(model, img)
return 'You are a ' + str(classes[0]) + ' with a confidence of ' + str(preds[0])
return render_template('upload.html')
@app.route("/")
def hello():
return "Hello Nabla and Timini!"
if __name__ == "__main__":
app.run(host = '0.0.0.0', port=5000)
| from flask import Flask, render_template, request
from flask.ext.uploads import UploadSet, configure_uploads, IMAGES
from scipy import misc
import numpy as np
from VGG16_gs_model import *
# sett opp nevralt nettverk:
model = VGG_16()
fpath = '../models/vgg16_sg.h5';
model.load_weights(fpath)
# Sett opp webapp
app = Flask(__name__)
photos = UploadSet('photos', IMAGES)
app.config['UPLOADED_PHOTOS_DEST'] = 'img'
configure_uploads(app, photos)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
# Hent ut prediksjon
img = misc.imread('./img/' + filename)
img = misc.imresize(img, (224, 224))
img = img.transpose()
img = np.expand_dims(img, axis=0).astype(np.uint8)
preds, idxs, classes = predict(model, img)
return 'You are a ' + str(classes[0]) + ' with a confidence of ' + str(preds[0])
return render_template('upload.html')
@app.route("/")
def hello():
return "Hello Nabla and Timini!"
if __name__ == "__main__":
- app.run(host = '0.0.0.0' )
+ app.run(host = '0.0.0.0', port=5000)
? + +++++++++
|
9fba993ea52df48de8d812c1ad0128d48c8ab4cf | classes/room.py | classes/room.py | class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
else:
return "Room is at full capacity"
| class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
print (person.person_type.title() + " " + person.person_name.title() + " " + person.person_surname.title() + " has been allocated " + self.room_type + " " + self.room_name.title())
else:
raise Exception(self.room_type.title() + " " + self.room_name.title() + " is at full capacity")
| Add Error statement to add_occupant method for when max capacity is reached | Add Error statement to add_occupant method for when max capacity is reached
| Python | mit | peterpaints/room-allocator | class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
-
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
+ print (person.person_type.title() + " " + person.person_name.title() + " " + person.person_surname.title() + " has been allocated " + self.room_type + " " + self.room_name.title())
else:
- return "Room is at full capacity"
+ raise Exception(self.room_type.title() + " " + self.room_name.title() + " is at full capacity")
| Add Error statement to add_occupant method for when max capacity is reached | ## Code Before:
class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
else:
return "Room is at full capacity"
## Instruction:
Add Error statement to add_occupant method for when max capacity is reached
## Code After:
class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
print (person.person_type.title() + " " + person.person_name.title() + " " + person.person_surname.title() + " has been allocated " + self.room_type + " " + self.room_name.title())
else:
raise Exception(self.room_type.title() + " " + self.room_name.title() + " is at full capacity")
| class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
-
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
+ print (person.person_type.title() + " " + person.person_name.title() + " " + person.person_surname.title() + " has been allocated " + self.room_type + " " + self.room_name.title())
else:
- return "Room is at full capacity"
+ raise Exception(self.room_type.title() + " " + self.room_name.title() + " is at full capacity") |
380f565231997353faa30f77bbe84d0ed6bbf009 | pal/services/__init__.py | pal/services/__init__.py | from .omdb_service import OMDBService
ALL_SERVICES = [
# DirectoryService()
OMDBService()
]
| from .directory_service import DirectoryService
# from .omdb_service import OMDBService
ALL_SERVICES = [
DirectoryService()
# OMDBService()
]
| Make directory service the only service | Make directory service the only service
| Python | bsd-3-clause | Machyne/pal,Machyne/pal,Machyne/pal,Machyne/pal | + from .directory_service import DirectoryService
- from .omdb_service import OMDBService
+ # from .omdb_service import OMDBService
ALL_SERVICES = [
- # DirectoryService()
+ DirectoryService()
- OMDBService()
+ # OMDBService()
]
| Make directory service the only service | ## Code Before:
from .omdb_service import OMDBService
ALL_SERVICES = [
# DirectoryService()
OMDBService()
]
## Instruction:
Make directory service the only service
## Code After:
from .directory_service import DirectoryService
# from .omdb_service import OMDBService
ALL_SERVICES = [
DirectoryService()
# OMDBService()
]
| + from .directory_service import DirectoryService
- from .omdb_service import OMDBService
+ # from .omdb_service import OMDBService
? ++
ALL_SERVICES = [
- # DirectoryService()
? --
+ DirectoryService()
- OMDBService()
+ # OMDBService()
? ++
] |
bbe425da10607692c1aace560b1b61b089137704 | frappe/patches/v12_0/rename_events_repeat_on.py | frappe/patches/v12_0/rename_events_repeat_on.py | import frappe
from frappe.utils import get_datetime
def execute():
weekdays = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
weekly_events = frappe.get_list("Event", filters={"repeat_this_event": 1, "repeat_on": "Every Week"}, fields=["name", "starts_on"])
frappe.reload_doc("desk", "doctype", "event")
# Initially Daily Events had option to choose days, but now Weekly does, so just changing from Daily -> Weekly does the job
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Day'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Week'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Monthly' WHERE `tabEvent`.repeat_on='Every Month'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Yearly' WHERE `tabEvent`.repeat_on='Every Year'""")
for weekly_event in weekly_events:
# Set WeekDay based on the starts_on so that event can repeat Weekly
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.{0}=1 WHERE `tabEvent`.name='{1}'""".format(weekdays[get_datetime(weekly_event.starts_on).weekday()], weekly_event.name))
| import frappe
from frappe.utils import get_datetime
def execute():
weekdays = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
weekly_events = frappe.get_list("Event", filters={"repeat_this_event": 1, "repeat_on": "Every Week"}, fields=["name", "starts_on"])
frappe.reload_doc("desk", "doctype", "event")
# Initially Daily Events had option to choose days, but now Weekly does, so just changing from Daily -> Weekly does the job
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Day'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Week'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Monthly' WHERE `tabEvent`.repeat_on='Every Month'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Yearly' WHERE `tabEvent`.repeat_on='Every Year'""")
for weekly_event in weekly_events:
# Set WeekDay based on the starts_on so that event can repeat Weekly
frappe.db.set_value('Event', weekly_event.name, weekdays[get_datetime(weekly_event.starts_on).weekday()], 1, update_modified=1)
| Convert to SQL to set_value | fix: Convert to SQL to set_value | Python | mit | mhbu50/frappe,saurabh6790/frappe,vjFaLk/frappe,almeidapaulopt/frappe,vjFaLk/frappe,frappe/frappe,almeidapaulopt/frappe,vjFaLk/frappe,mhbu50/frappe,mhbu50/frappe,StrellaGroup/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,adityahase/frappe,adityahase/frappe,yashodhank/frappe,almeidapaulopt/frappe,mhbu50/frappe,yashodhank/frappe,StrellaGroup/frappe,adityahase/frappe,saurabh6790/frappe,vjFaLk/frappe,adityahase/frappe,frappe/frappe,saurabh6790/frappe,yashodhank/frappe,frappe/frappe,yashodhank/frappe,saurabh6790/frappe | import frappe
from frappe.utils import get_datetime
def execute():
weekdays = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
weekly_events = frappe.get_list("Event", filters={"repeat_this_event": 1, "repeat_on": "Every Week"}, fields=["name", "starts_on"])
frappe.reload_doc("desk", "doctype", "event")
# Initially Daily Events had option to choose days, but now Weekly does, so just changing from Daily -> Weekly does the job
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Day'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Week'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Monthly' WHERE `tabEvent`.repeat_on='Every Month'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Yearly' WHERE `tabEvent`.repeat_on='Every Year'""")
for weekly_event in weekly_events:
# Set WeekDay based on the starts_on so that event can repeat Weekly
- frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.{0}=1 WHERE `tabEvent`.name='{1}'""".format(weekdays[get_datetime(weekly_event.starts_on).weekday()], weekly_event.name))
+ frappe.db.set_value('Event', weekly_event.name, weekdays[get_datetime(weekly_event.starts_on).weekday()], 1, update_modified=1)
| Convert to SQL to set_value | ## Code Before:
import frappe
from frappe.utils import get_datetime
def execute():
weekdays = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
weekly_events = frappe.get_list("Event", filters={"repeat_this_event": 1, "repeat_on": "Every Week"}, fields=["name", "starts_on"])
frappe.reload_doc("desk", "doctype", "event")
# Initially Daily Events had option to choose days, but now Weekly does, so just changing from Daily -> Weekly does the job
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Day'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Week'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Monthly' WHERE `tabEvent`.repeat_on='Every Month'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Yearly' WHERE `tabEvent`.repeat_on='Every Year'""")
for weekly_event in weekly_events:
# Set WeekDay based on the starts_on so that event can repeat Weekly
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.{0}=1 WHERE `tabEvent`.name='{1}'""".format(weekdays[get_datetime(weekly_event.starts_on).weekday()], weekly_event.name))
## Instruction:
Convert to SQL to set_value
## Code After:
import frappe
from frappe.utils import get_datetime
def execute():
weekdays = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
weekly_events = frappe.get_list("Event", filters={"repeat_this_event": 1, "repeat_on": "Every Week"}, fields=["name", "starts_on"])
frappe.reload_doc("desk", "doctype", "event")
# Initially Daily Events had option to choose days, but now Weekly does, so just changing from Daily -> Weekly does the job
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Day'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Week'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Monthly' WHERE `tabEvent`.repeat_on='Every Month'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Yearly' WHERE `tabEvent`.repeat_on='Every Year'""")
for weekly_event in weekly_events:
# Set WeekDay based on the starts_on so that event can repeat Weekly
frappe.db.set_value('Event', weekly_event.name, weekdays[get_datetime(weekly_event.starts_on).weekday()], 1, update_modified=1)
| import frappe
from frappe.utils import get_datetime
def execute():
weekdays = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
weekly_events = frappe.get_list("Event", filters={"repeat_this_event": 1, "repeat_on": "Every Week"}, fields=["name", "starts_on"])
frappe.reload_doc("desk", "doctype", "event")
# Initially Daily Events had option to choose days, but now Weekly does, so just changing from Daily -> Weekly does the job
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Day'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Weekly' WHERE `tabEvent`.repeat_on='Every Week'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Monthly' WHERE `tabEvent`.repeat_on='Every Month'""")
frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.repeat_on='Yearly' WHERE `tabEvent`.repeat_on='Every Year'""")
for weekly_event in weekly_events:
# Set WeekDay based on the starts_on so that event can repeat Weekly
- frappe.db.sql("""UPDATE `tabEvent` SET `tabEvent`.{0}=1 WHERE `tabEvent`.name='{1}'""".format(weekdays[get_datetime(weekly_event.starts_on).weekday()], weekly_event.name))
+ frappe.db.set_value('Event', weekly_event.name, weekdays[get_datetime(weekly_event.starts_on).weekday()], 1, update_modified=1) |
134fcbd6e82957ac3abd2eebdc296fd4ccb457e9 | alexandria/api/books.py | alexandria/api/books.py | from . import app, mongo
from alexandria.decorators import *
from flask import request, jsonify, url_for, session
from flask.ext.classy import FlaskView, route
import json
from bson import json_util
class BooksView(FlaskView):
route_prefix = '/api/'
@authenticated
def index(self):
query = mongo.Books.find()
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
return jsonify(books=books)
@authenticated
def genre(self, id):
query = mongo.Books.find({'genres':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
return jsonify(books=books)
@authenticated
def author(self, id):
query = mongo.Books.find({'authors':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
return jsonify(books=books)
BooksView.register(app)
| from . import app, mongo
from alexandria.decorators import *
from flask import request, jsonify, url_for, session
from flask.ext.classy import FlaskView, route
import json
from bson import json_util
class BooksView(FlaskView):
route_prefix = '/api/'
@authenticated
def index(self):
query = mongo.Books.find()
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
book['owner'] = book['owner']['$oid']
return jsonify(books=books)
@authenticated
def genre(self, id):
query = mongo.Books.find({'genres':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
book['owner'] = book['owner']['$oid']
return jsonify(books=books)
@authenticated
def author(self, id):
query = mongo.Books.find({'authors':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
book['owner'] = book['owner']['$oid']
return jsonify(books=books)
BooksView.register(app)
| Set value of 'owner' to the value of the ObjectId | Set value of 'owner' to the value of the ObjectId
| Python | mit | citruspi/Alexandria,citruspi/Alexandria | from . import app, mongo
from alexandria.decorators import *
from flask import request, jsonify, url_for, session
from flask.ext.classy import FlaskView, route
import json
from bson import json_util
class BooksView(FlaskView):
route_prefix = '/api/'
@authenticated
def index(self):
query = mongo.Books.find()
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
+ book['owner'] = book['owner']['$oid']
+
return jsonify(books=books)
@authenticated
def genre(self, id):
query = mongo.Books.find({'genres':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
+
+ book['owner'] = book['owner']['$oid']
return jsonify(books=books)
@authenticated
def author(self, id):
query = mongo.Books.find({'authors':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
+ book['owner'] = book['owner']['$oid']
+
return jsonify(books=books)
BooksView.register(app)
| Set value of 'owner' to the value of the ObjectId | ## Code Before:
from . import app, mongo
from alexandria.decorators import *
from flask import request, jsonify, url_for, session
from flask.ext.classy import FlaskView, route
import json
from bson import json_util
class BooksView(FlaskView):
route_prefix = '/api/'
@authenticated
def index(self):
query = mongo.Books.find()
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
return jsonify(books=books)
@authenticated
def genre(self, id):
query = mongo.Books.find({'genres':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
return jsonify(books=books)
@authenticated
def author(self, id):
query = mongo.Books.find({'authors':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
return jsonify(books=books)
BooksView.register(app)
## Instruction:
Set value of 'owner' to the value of the ObjectId
## Code After:
from . import app, mongo
from alexandria.decorators import *
from flask import request, jsonify, url_for, session
from flask.ext.classy import FlaskView, route
import json
from bson import json_util
class BooksView(FlaskView):
route_prefix = '/api/'
@authenticated
def index(self):
query = mongo.Books.find()
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
book['owner'] = book['owner']['$oid']
return jsonify(books=books)
@authenticated
def genre(self, id):
query = mongo.Books.find({'genres':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
book['owner'] = book['owner']['$oid']
return jsonify(books=books)
@authenticated
def author(self, id):
query = mongo.Books.find({'authors':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
book['owner'] = book['owner']['$oid']
return jsonify(books=books)
BooksView.register(app)
| from . import app, mongo
from alexandria.decorators import *
from flask import request, jsonify, url_for, session
from flask.ext.classy import FlaskView, route
import json
from bson import json_util
class BooksView(FlaskView):
route_prefix = '/api/'
@authenticated
def index(self):
query = mongo.Books.find()
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
+ book['owner'] = book['owner']['$oid']
+
return jsonify(books=books)
@authenticated
def genre(self, id):
query = mongo.Books.find({'genres':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
+
+ book['owner'] = book['owner']['$oid']
return jsonify(books=books)
@authenticated
def author(self, id):
query = mongo.Books.find({'authors':id})
books = json.loads(json_util.dumps(query, default=json_util.default))
for book in books:
book['id'] = book['_id']['$oid']
book.pop('_id')
+ book['owner'] = book['owner']['$oid']
+
return jsonify(books=books)
BooksView.register(app) |
bad8133c6714a25ad764419302f4db0da3f39952 | spec_cleaner/rpminstall.py | spec_cleaner/rpminstall.py |
import string
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
line = string.replace(line, '%{makeinstall}', install_command)
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
|
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
line = line.replace('%{makeinstall}', install_command)
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
| Fix test failures on py3. | Fix test failures on py3.
| Python | bsd-3-clause | plusky/spec-cleaner,plusky/spec-cleaner,pombredanne/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner,pombredanne/spec-cleaner,plusky/spec-cleaner | -
- import string
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
- line = string.replace(line, '%{makeinstall}', install_command)
+ line = line.replace('%{makeinstall}', install_command)
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
| Fix test failures on py3. | ## Code Before:
import string
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
line = string.replace(line, '%{makeinstall}', install_command)
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
## Instruction:
Fix test failures on py3.
## Code After:
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
line = line.replace('%{makeinstall}', install_command)
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
| -
- import string
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
- line = string.replace(line, '%{makeinstall}', install_command)
? ^^^ ^ ------
+ line = line.replace('%{makeinstall}', install_command)
? ^ ^
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line |
38746e4f4891f7ad87ce678776be15556d1db449 | gcl/to_json.py | gcl/to_json.py | import argparse
import json
import sys
import gcl
from gcl import query
from gcl import util
def main(argv=None, stdin=None):
parser = argparse.ArgumentParser(description='Convert (parts of) a GCL model file to JSON.')
parser.add_argument('file', metavar='FILE', type=str, nargs='?',
help='File to parse')
parser.add_argument('selectors', metavar='SELECTOR', type=str, nargs='*',
help='Subnodes to convert. The first selector will be treated as the root of the printed output.')
args = parser.parse_args(argv or sys.argv[1:])
try:
if args.file and args.file != '-':
model = gcl.load(args.file)
else:
model = gcl.loads((stdin or sys.stdin).read(), filename='<stdin>')
sels = query.GPath(args.selectors)
if not sels.everything():
model = sels.select(model).deep()
plain = util.to_python(model)
sys.stdout.write(json.dumps(plain))
except (gcl.ParseError, RuntimeError) as e:
sys.stderr.write(str(e) + '\n')
sys.exit(1)
| import argparse
import json
import sys
import gcl
from gcl import query
from gcl import util
def select(dct, path):
for part in path:
if not hasattr(dct, 'keys'):
raise RuntimeError('Value %r cannot be indexed with %r' % (dct, part))
if part not in dct:
raise RuntimeError('Value %r has no key %r' % (dct, part))
dct = dct[part]
return dct
def main(argv=None, stdin=None):
parser = argparse.ArgumentParser(description='Convert (parts of) a GCL model file to JSON.')
parser.add_argument('file', metavar='FILE', type=str, nargs='?',
help='File to parse')
parser.add_argument('selectors', metavar='SELECTOR', type=str, nargs='*',
help='Select nodes to include in the JSON.')
parser.add_argument('--root', '-r', metavar='PATH', type=str, default='',
help='Use the indicated root path as the root of the output JSON object (like a.b.c but without wildcards)')
args = parser.parse_args(argv or sys.argv[1:])
try:
if args.file and args.file != '-':
model = gcl.load(args.file)
else:
model = gcl.loads((stdin or sys.stdin).read(), filename='<stdin>')
sels = query.GPath(args.selectors)
if not sels.everything():
model = sels.select(model).deep()
plain = util.to_python(model)
selectors = args.root.split('.') if args.root else []
selected = select(plain, selectors)
sys.stdout.write(json.dumps(selected))
except (gcl.ParseError, RuntimeError) as e:
sys.stderr.write(str(e) + '\n')
sys.exit(1)
| Add proper root selector to gcl2json | Add proper root selector to gcl2json
| Python | mit | rix0rrr/gcl | import argparse
import json
import sys
import gcl
from gcl import query
from gcl import util
+ def select(dct, path):
+ for part in path:
+ if not hasattr(dct, 'keys'):
+ raise RuntimeError('Value %r cannot be indexed with %r' % (dct, part))
+ if part not in dct:
+ raise RuntimeError('Value %r has no key %r' % (dct, part))
+ dct = dct[part]
+ return dct
+
+
def main(argv=None, stdin=None):
parser = argparse.ArgumentParser(description='Convert (parts of) a GCL model file to JSON.')
parser.add_argument('file', metavar='FILE', type=str, nargs='?',
help='File to parse')
parser.add_argument('selectors', metavar='SELECTOR', type=str, nargs='*',
- help='Subnodes to convert. The first selector will be treated as the root of the printed output.')
+ help='Select nodes to include in the JSON.')
+ parser.add_argument('--root', '-r', metavar='PATH', type=str, default='',
+ help='Use the indicated root path as the root of the output JSON object (like a.b.c but without wildcards)')
args = parser.parse_args(argv or sys.argv[1:])
try:
if args.file and args.file != '-':
model = gcl.load(args.file)
else:
model = gcl.loads((stdin or sys.stdin).read(), filename='<stdin>')
sels = query.GPath(args.selectors)
if not sels.everything():
model = sels.select(model).deep()
plain = util.to_python(model)
+
+ selectors = args.root.split('.') if args.root else []
+ selected = select(plain, selectors)
+
- sys.stdout.write(json.dumps(plain))
+ sys.stdout.write(json.dumps(selected))
except (gcl.ParseError, RuntimeError) as e:
sys.stderr.write(str(e) + '\n')
sys.exit(1)
| Add proper root selector to gcl2json | ## Code Before:
import argparse
import json
import sys
import gcl
from gcl import query
from gcl import util
def main(argv=None, stdin=None):
parser = argparse.ArgumentParser(description='Convert (parts of) a GCL model file to JSON.')
parser.add_argument('file', metavar='FILE', type=str, nargs='?',
help='File to parse')
parser.add_argument('selectors', metavar='SELECTOR', type=str, nargs='*',
help='Subnodes to convert. The first selector will be treated as the root of the printed output.')
args = parser.parse_args(argv or sys.argv[1:])
try:
if args.file and args.file != '-':
model = gcl.load(args.file)
else:
model = gcl.loads((stdin or sys.stdin).read(), filename='<stdin>')
sels = query.GPath(args.selectors)
if not sels.everything():
model = sels.select(model).deep()
plain = util.to_python(model)
sys.stdout.write(json.dumps(plain))
except (gcl.ParseError, RuntimeError) as e:
sys.stderr.write(str(e) + '\n')
sys.exit(1)
## Instruction:
Add proper root selector to gcl2json
## Code After:
import argparse
import json
import sys
import gcl
from gcl import query
from gcl import util
def select(dct, path):
for part in path:
if not hasattr(dct, 'keys'):
raise RuntimeError('Value %r cannot be indexed with %r' % (dct, part))
if part not in dct:
raise RuntimeError('Value %r has no key %r' % (dct, part))
dct = dct[part]
return dct
def main(argv=None, stdin=None):
parser = argparse.ArgumentParser(description='Convert (parts of) a GCL model file to JSON.')
parser.add_argument('file', metavar='FILE', type=str, nargs='?',
help='File to parse')
parser.add_argument('selectors', metavar='SELECTOR', type=str, nargs='*',
help='Select nodes to include in the JSON.')
parser.add_argument('--root', '-r', metavar='PATH', type=str, default='',
help='Use the indicated root path as the root of the output JSON object (like a.b.c but without wildcards)')
args = parser.parse_args(argv or sys.argv[1:])
try:
if args.file and args.file != '-':
model = gcl.load(args.file)
else:
model = gcl.loads((stdin or sys.stdin).read(), filename='<stdin>')
sels = query.GPath(args.selectors)
if not sels.everything():
model = sels.select(model).deep()
plain = util.to_python(model)
selectors = args.root.split('.') if args.root else []
selected = select(plain, selectors)
sys.stdout.write(json.dumps(selected))
except (gcl.ParseError, RuntimeError) as e:
sys.stderr.write(str(e) + '\n')
sys.exit(1)
| import argparse
import json
import sys
import gcl
from gcl import query
from gcl import util
+ def select(dct, path):
+ for part in path:
+ if not hasattr(dct, 'keys'):
+ raise RuntimeError('Value %r cannot be indexed with %r' % (dct, part))
+ if part not in dct:
+ raise RuntimeError('Value %r has no key %r' % (dct, part))
+ dct = dct[part]
+ return dct
+
+
def main(argv=None, stdin=None):
parser = argparse.ArgumentParser(description='Convert (parts of) a GCL model file to JSON.')
parser.add_argument('file', metavar='FILE', type=str, nargs='?',
help='File to parse')
parser.add_argument('selectors', metavar='SELECTOR', type=str, nargs='*',
- help='Subnodes to convert. The first selector will be treated as the root of the printed output.')
+ help='Select nodes to include in the JSON.')
+ parser.add_argument('--root', '-r', metavar='PATH', type=str, default='',
+ help='Use the indicated root path as the root of the output JSON object (like a.b.c but without wildcards)')
args = parser.parse_args(argv or sys.argv[1:])
try:
if args.file and args.file != '-':
model = gcl.load(args.file)
else:
model = gcl.loads((stdin or sys.stdin).read(), filename='<stdin>')
sels = query.GPath(args.selectors)
if not sels.everything():
model = sels.select(model).deep()
plain = util.to_python(model)
+
+ selectors = args.root.split('.') if args.root else []
+ selected = select(plain, selectors)
+
- sys.stdout.write(json.dumps(plain))
? ^ ^^^
+ sys.stdout.write(json.dumps(selected))
? ^^ ^^^^^
except (gcl.ParseError, RuntimeError) as e:
sys.stderr.write(str(e) + '\n')
sys.exit(1) |
733404ba2eb7218bb4d253cd74fe88107ff75afc | test/test_live_openid_login.py | test/test_live_openid_login.py | import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
def test_openid_login():
"""
Tests login to the Stack Exchange OpenID provider.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
# This will raise an error if login fails.
browser.loginSEOpenID(
live_testing.username,
live_testing.password)
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password)
| import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password)
| Remove successful OpenID login live test. It's redundant with our message-related live tests. | Remove successful OpenID login live test.
It's redundant with our message-related live tests.
| Python | apache-2.0 | ByteCommander/ChatExchange6,hichris1234/ChatExchange,Charcoal-SE/ChatExchange,hichris1234/ChatExchange,ByteCommander/ChatExchange6,Charcoal-SE/ChatExchange | import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
- def test_openid_login():
- """
- Tests login to the Stack Exchange OpenID provider.
- """
- browser = SEChatBrowser()
-
- # avoid hitting the SE servers too frequently
- time.sleep(2)
-
- # This will raise an error if login fails.
- browser.loginSEOpenID(
- live_testing.username,
- live_testing.password)
-
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password)
| Remove successful OpenID login live test. It's redundant with our message-related live tests. | ## Code Before:
import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
def test_openid_login():
"""
Tests login to the Stack Exchange OpenID provider.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
# This will raise an error if login fails.
browser.loginSEOpenID(
live_testing.username,
live_testing.password)
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password)
## Instruction:
Remove successful OpenID login live test. It's redundant with our message-related live tests.
## Code After:
import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password)
| import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
- def test_openid_login():
- """
- Tests login to the Stack Exchange OpenID provider.
- """
- browser = SEChatBrowser()
-
- # avoid hitting the SE servers too frequently
- time.sleep(2)
-
- # This will raise an error if login fails.
- browser.loginSEOpenID(
- live_testing.username,
- live_testing.password)
-
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password) |
c1dc571faa9bf2ae0e0a580365943806826ced4a | src/adhocracy_spd/adhocracy_spd/workflows/digital_leben.py | src/adhocracy_spd/adhocracy_spd/workflows/digital_leben.py | """Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': [ 'participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None ], # noqa
['create_rate', 'Allow', None, None, None ], # noqa
['edit_rate', None, None, 'Allow', None ], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
| """Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': ['participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None], # noqa
['create_rate', 'Allow', None, None, None], # noqa
['edit_rate', None, None, 'Allow', None], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
| Make flake8 happy for spd | Make flake8 happy for spd
| Python | agpl-3.0 | liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator | """Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
- digital_leben_meta = standard_meta \
+ digital_leben_meta = standard_meta \
- .transform(('states', 'participate', 'acm'),
+ .transform(('states', 'participate', 'acm'),
- {'principals': [ 'participant', 'moderator', 'creator', 'initiator'], # noqa
+ {'principals': ['participant', 'moderator', 'creator', 'initiator'], # noqa
- 'permissions':
+ 'permissions':
- [['create_proposal', None, None, None, 'Allow'], # noqa
+ [['create_proposal', None, None, None, 'Allow'], # noqa
- ['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
+ ['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
- ['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
+ ['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
- ['edit_comment', None, None, 'Allow', None ], # noqa
+ ['edit_comment', None, None, 'Allow', None], # noqa
- ['create_rate', 'Allow', None, None, None ], # noqa
+ ['create_rate', 'Allow', None, None, None], # noqa
- ['edit_rate', None, None, 'Allow', None ], # noqa
+ ['edit_rate', None, None, 'Allow', None], # noqa
- ]})
+ ]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
| Make flake8 happy for spd | ## Code Before:
"""Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': [ 'participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None ], # noqa
['create_rate', 'Allow', None, None, None ], # noqa
['edit_rate', None, None, 'Allow', None ], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
## Instruction:
Make flake8 happy for spd
## Code After:
"""Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': ['participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None], # noqa
['create_rate', 'Allow', None, None, None], # noqa
['edit_rate', None, None, 'Allow', None], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
| """Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
- digital_leben_meta = standard_meta \
? -
+ digital_leben_meta = standard_meta \
- .transform(('states', 'participate', 'acm'),
? -
+ .transform(('states', 'participate', 'acm'),
- {'principals': [ 'participant', 'moderator', 'creator', 'initiator'], # noqa
? - - ^^
+ {'principals': ['participant', 'moderator', 'creator', 'initiator'], # noqa
? ^
- 'permissions':
+ 'permissions':
? +
- [['create_proposal', None, None, None, 'Allow'], # noqa
? -
+ [['create_proposal', None, None, None, 'Allow'], # noqa
- ['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
? -
+ ['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
- ['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
? -
+ ['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
- ['edit_comment', None, None, 'Allow', None ], # noqa
? - --
+ ['edit_comment', None, None, 'Allow', None], # noqa
- ['create_rate', 'Allow', None, None, None ], # noqa
? - --
+ ['create_rate', 'Allow', None, None, None], # noqa
- ['edit_rate', None, None, 'Allow', None ], # noqa
? - --
+ ['edit_rate', None, None, 'Allow', None], # noqa
- ]})
? --
+ ]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben') |
738e4ddd0043c204095767f1f7458db9e6948262 | tensorflow/tools/docker/jupyter_notebook_config.py | tensorflow/tools/docker/jupyter_notebook_config.py | import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
| import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
password = os.environ['PASSWORD']
if password:
c.NotebookApp.password = passwd(password)
else:
c.NotebookApp.password = ''
c.NotebookApp.token = ''
del os.environ['PASSWORD']
| Allow disabling password and token auth on jupyter notebooks | Allow disabling password and token auth on jupyter notebooks | Python | apache-2.0 | Intel-tensorflow/tensorflow,renyi533/tensorflow,hsaputra/tensorflow,zasdfgbnm/tensorflow,dendisuhubdy/tensorflow,ageron/tensorflow,pavelchristof/gomoku-ai,av8ramit/tensorflow,hfp/tensorflow-xsmm,alivecor/tensorflow,Xeralux/tensorflow,seanli9jan/tensorflow,DavidNorman/tensorflow,ghchinoy/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Mistobaan/tensorflow,snnn/tensorflow,jalexvig/tensorflow,nightjean/Deep-Learning,arborh/tensorflow,rabipanda/tensorflow,freedomtan/tensorflow,a-doumoulakis/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,manazhao/tf_recsys,alivecor/tensorflow,apark263/tensorflow,frreiss/tensorflow-fred,DavidNorman/tensorflow,jhseu/tensorflow,andrewcmyers/tensorflow,davidzchen/tensorflow,frreiss/tensorflow-fred,yufengg/tensorflow,jhseu/tensorflow,kevin-coder/tensorflow-fork,lukeiwanski/tensorflow,tensorflow/tensorflow,dongjoon-hyun/tensorflow,Bulochkin/tensorflow_pack,maciekcc/tensorflow,hehongliang/tensorflow,jart/tensorflow,renyi533/tensorflow,asimshankar/tensorflow,jbedorf/tensorflow,rabipanda/tensorflow,Xeralux/tensorflow,xzturn/tensorflow,apark263/tensorflow,freedomtan/tensorflow,gunan/tensorflow,kobejean/tensorflow,gunan/tensorflow,jart/tensorflow,manipopopo/tensorflow,Bulochkin/tensorflow_pack,gunan/tensorflow,ishay2b/tensorflow,kevin-coder/tensorflow-fork,unsiloai/syntaxnet-ops-hack,caisq/tensorflow,theflofly/tensorflow,freedomtan/tensorflow,gautam1858/tensorflow,zycdragonball/tensorflow,hfp/tensorflow-xsmm,guschmue/tensorflow,ychfan/tensorflow,jbedorf/tensorflow,ageron/tensorflow,Intel-tensorflow/tensorflow,AnishShah/tensorflow,tiagofrepereira2012/tensorflow,dendisuhubdy/tensorflow,jalexvig/tensorflow,jhseu/tensorflow,jhseu/tensorflow,meteorcloudy/tensorflow,davidzchen/tensorflow,JingJunYin/tensorflow,frreiss/tensorflow-fred,yanchen036/tensorflow,dendisuhubdy/tensorflow,benoitsteiner/tensorflow,aldian/tensorflow,girving/tensorflow,ychfan/tensorflow,benoitsteiner/tensorflow-xsmm,codrut3/tensorflow,gojira/tensorflow,benoitsteiner/tensorflow,maciekcc/tensorflow,a-doumoulakis/tensorflow,karllessard/tensorflow,raymondxyang/tensorflow,alistairlow/tensorflow,jwlawson/tensorflow,Xeralux/tensorflow,dyoung418/tensorflow,gautam1858/tensorflow,jostep/tensorflow,freedomtan/tensorflow,eaplatanios/tensorflow,girving/tensorflow,eadgarchen/tensorflow,Kongsea/tensorflow,zasdfgbnm/tensorflow,ghchinoy/tensorflow,karllessard/tensorflow,theflofly/tensorflow,aselle/tensorflow,sjperkins/tensorflow,Bismarrck/tensorflow,tornadozou/tensorflow,jbedorf/tensorflow,asimshankar/tensorflow,mavenlin/tensorflow,ravindrapanda/tensorflow,renyi533/tensorflow,lukeiwanski/tensorflow,arborh/tensorflow,chemelnucfin/tensorflow,mavenlin/tensorflow,manazhao/tf_recsys,horance-liu/tensorflow,davidzchen/tensorflow,DavidNorman/tensorflow,Moriadry/tensorflow,xzturn/tensorflow,petewarden/tensorflow,Intel-tensorflow/tensorflow,brchiu/tensorflow,gojira/tensorflow,eadgarchen/tensorflow,av8ramit/tensorflow,Intel-tensorflow/tensorflow,DavidNorman/tensorflow,Mazecreator/tensorflow,jostep/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ville-k/tensorflow,brchiu/tensorflow,hfp/tensorflow-xsmm,unsiloai/syntaxnet-ops-hack,tensorflow/tensorflow-experimental_link_static_libraries_once,nolanliou/tensorflow,kevin-coder/tensorflow-fork,mavenlin/tensorflow,yufengg/tensorflow,nburn42/tensorflow,ville-k/tensorflow,codrut3/tensorflow,a-doumoulakis/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,allenlavoie/tensorflow,Bismarrck/tensorflow,manipopopo/tensorflow,hsaputra/tensorflow,rabipanda/tensorflow,jhseu/tensorflow,ArtsiomCh/tensorflow,tensorflow/tensorflow-pywrap_saved_model,llhe/tensorflow,gojira/tensorflow,benoitsteiner/tensorflow-opencl,raymondxyang/tensorflow,llhe/tensorflow,seanli9jan/tensorflow,alistairlow/tensorflow,Xeralux/tensorflow,nightjean/Deep-Learning,alsrgv/tensorflow,tensorflow/tensorflow-pywrap_saved_model,benoitsteiner/tensorflow-xsmm,paolodedios/tensorflow,ville-k/tensorflow,ishay2b/tensorflow,meteorcloudy/tensorflow,jwlawson/tensorflow,alivecor/tensorflow,allenlavoie/tensorflow,meteorcloudy/tensorflow,maciekcc/tensorflow,lakshayg/tensorflow,snnn/tensorflow,jwlawson/tensorflow,bowang/tensorflow,alivecor/tensorflow,chemelnucfin/tensorflow,alsrgv/tensorflow,alistairlow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ishay2b/tensorflow,asimshankar/tensorflow,kevin-coder/tensorflow-fork,ville-k/tensorflow,ville-k/tensorflow,JVillella/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhaux/tensorflow,ychfan/tensorflow,hehongliang/tensorflow,kevin-coder/tensorflow-fork,Xeralux/tensorflow,kobejean/tensorflow,ishay2b/tensorflow,DavidNorman/tensorflow,Bismarrck/tensorflow,alsrgv/tensorflow,alivecor/tensorflow,ArtsiomCh/tensorflow,mdrumond/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,ZhangXinNan/tensorflow,Mistobaan/tensorflow,dongjoon-hyun/tensorflow,nolanliou/tensorflow,benoitsteiner/tensorflow,gojira/tensorflow,paolodedios/tensorflow,ravindrapanda/tensorflow,dongjoon-hyun/tensorflow,nburn42/tensorflow,snnn/tensorflow,cxxgtxy/tensorflow,laszlocsomor/tensorflow,cxxgtxy/tensorflow,Mazecreator/tensorflow,laszlocsomor/tensorflow,kevin-coder/tensorflow-fork,hsaputra/tensorflow,Intel-Corporation/tensorflow,alshedivat/tensorflow,aldian/tensorflow,Mistobaan/tensorflow,gunan/tensorflow,dyoung418/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,DavidNorman/tensorflow,dongjoon-hyun/tensorflow,JingJunYin/tensorflow,theflofly/tensorflow,ychfan/tensorflow,dendisuhubdy/tensorflow,gunan/tensorflow,ville-k/tensorflow,xzturn/tensorflow,jalexvig/tensorflow,Moriadry/tensorflow,dongjoon-hyun/tensorflow,mixturemodel-flow/tensorflow,jhaux/tensorflow,theflofly/tensorflow,AnishShah/tensorflow,llhe/tensorflow,jendap/tensorflow,aldian/tensorflow,asimshankar/tensorflow,aam-at/tensorflow,bowang/tensorflow,adamtiger/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alsrgv/tensorflow,JVillella/tensorflow,ppwwyyxx/tensorflow,adit-chandra/tensorflow,annarev/tensorflow,nburn42/tensorflow,davidzchen/tensorflow,rabipanda/tensorflow,benoitsteiner/tensorflow,drpngx/tensorflow,nolanliou/tensorflow,nolanliou/tensorflow,meteorcloudy/tensorflow,sjperkins/tensorflow,ran5515/DeepDecision,laszlocsomor/tensorflow,freedomtan/tensorflow,renyi533/tensorflow,freedomtan/tensorflow,ZhangXinNan/tensorflow,zasdfgbnm/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,DavidNorman/tensorflow,a-doumoulakis/tensorflow,brchiu/tensorflow,jalexvig/tensorflow,with-git/tensorflow,Mazecreator/tensorflow,hsaputra/tensorflow,gunan/tensorflow,codrut3/tensorflow,yufengg/tensorflow,karllessard/tensorflow,nightjean/Deep-Learning,girving/tensorflow,horance-liu/tensorflow,asimshankar/tensorflow,snnn/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,AnishShah/tensorflow,sarvex/tensorflow,guschmue/tensorflow,jalexvig/tensorflow,av8ramit/tensorflow,benoitsteiner/tensorflow,xzturn/tensorflow,raymondxyang/tensorflow,yongtang/tensorflow,asimshankar/tensorflow,llhe/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,benoitsteiner/tensorflow-xsmm,hsaputra/tensorflow,aselle/tensorflow,eaplatanios/tensorflow,jendap/tensorflow,Xeralux/tensorflow,ghchinoy/tensorflow,aselle/tensorflow,xodus7/tensorflow,mdrumond/tensorflow,Mazecreator/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow,seanli9jan/tensorflow,gunan/tensorflow,Bulochkin/tensorflow_pack,ppwwyyxx/tensorflow,karllessard/tensorflow,gojira/tensorflow,JVillella/tensorflow,jhaux/tensorflow,alivecor/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,adit-chandra/tensorflow,davidzchen/tensorflow,horance-liu/tensorflow,Bismarrck/tensorflow,hfp/tensorflow-xsmm,alistairlow/tensorflow,caisq/tensorflow,AnishShah/tensorflow,Bulochkin/tensorflow_pack,kobejean/tensorflow,eadgarchen/tensorflow,allenlavoie/tensorflow,dancingdan/tensorflow,hsaputra/tensorflow,ghchinoy/tensorflow,with-git/tensorflow,jostep/tensorflow,seanli9jan/tensorflow,manazhao/tf_recsys,yanchen036/tensorflow,hfp/tensorflow-xsmm,yongtang/tensorflow,Moriadry/tensorflow,a-doumoulakis/tensorflow,petewarden/tensorflow,eaplatanios/tensorflow,JingJunYin/tensorflow,zasdfgbnm/tensorflow,with-git/tensorflow,girving/tensorflow,davidzchen/tensorflow,benoitsteiner/tensorflow-xsmm,jalexvig/tensorflow,brchiu/tensorflow,girving/tensorflow,hfp/tensorflow-xsmm,paolodedios/tensorflow,brchiu/tensorflow,renyi533/tensorflow,ppwwyyxx/tensorflow,drpngx/tensorflow,Bismarrck/tensorflow,zycdragonball/tensorflow,annarev/tensorflow,apark263/tensorflow,gunan/tensorflow,petewarden/tensorflow,adamtiger/tensorflow,eadgarchen/tensorflow,nolanliou/tensorflow,cxxgtxy/tensorflow,petewarden/tensorflow,ishay2b/tensorflow,asimshankar/tensorflow,caisq/tensorflow,adit-chandra/tensorflow,dyoung418/tensorflow,alistairlow/tensorflow,tornadozou/tensorflow,dongjoon-hyun/tensorflow,Xeralux/tensorflow,xodus7/tensorflow,tensorflow/tensorflow,maciekcc/tensorflow,paolodedios/tensorflow,hsaputra/tensorflow,drpngx/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,kobejean/tensorflow,av8ramit/tensorflow,hsaputra/tensorflow,ZhangXinNan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhaux/tensorflow,jhaux/tensorflow,Bulochkin/tensorflow_pack,jart/tensorflow,drpngx/tensorflow,dancingdan/tensorflow,DavidNorman/tensorflow,renyi533/tensorflow,apark263/tensorflow,raymondxyang/tensorflow,annarev/tensorflow,gojira/tensorflow,xzturn/tensorflow,jart/tensorflow,jendap/tensorflow,ran5515/DeepDecision,benoitsteiner/tensorflow-opencl,tensorflow/tensorflow-pywrap_saved_model,ArtsiomCh/tensorflow,tornadozou/tensorflow,codrut3/tensorflow,mdrumond/tensorflow,horance-liu/tensorflow,dendisuhubdy/tensorflow,adit-chandra/tensorflow,horance-liu/tensorflow,davidzchen/tensorflow,ppwwyyxx/tensorflow,alsrgv/tensorflow,kobejean/tensorflow,andrewcmyers/tensorflow,benoitsteiner/tensorflow-opencl,girving/tensorflow,codrut3/tensorflow,jwlawson/tensorflow,frreiss/tensorflow-fred,jalexvig/tensorflow,dancingdan/tensorflow,alshedivat/tensorflow,sjperkins/tensorflow,jbedorf/tensorflow,AnishShah/tensorflow,llhe/tensorflow,sarvex/tensorflow,yongtang/tensorflow,bowang/tensorflow,nburn42/tensorflow,llhe/tensorflow,bowang/tensorflow,eadgarchen/tensorflow,guschmue/tensorflow,caisq/tensorflow,ychfan/tensorflow,eaplatanios/tensorflow,yanchen036/tensorflow,chemelnucfin/tensorflow,aldian/tensorflow,tiagofrepereira2012/tensorflow,adit-chandra/tensorflow,Intel-tensorflow/tensorflow,ville-k/tensorflow,benoitsteiner/tensorflow-xsmm,Bulochkin/tensorflow_pack,alsrgv/tensorflow,brchiu/tensorflow,apark263/tensorflow,nburn42/tensorflow,guschmue/tensorflow,kobejean/tensorflow,renyi533/tensorflow,laszlocsomor/tensorflow,horance-liu/tensorflow,meteorcloudy/tensorflow,nightjean/Deep-Learning,a-doumoulakis/tensorflow,benoitsteiner/tensorflow,zasdfgbnm/tensorflow,tornadozou/tensorflow,rabipanda/tensorflow,snnn/tensorflow,unsiloai/syntaxnet-ops-hack,tiagofrepereira2012/tensorflow,karllessard/tensorflow,dancingdan/tensorflow,benoitsteiner/tensorflow-xsmm,apark263/tensorflow,hfp/tensorflow-xsmm,hfp/tensorflow-xsmm,jwlawson/tensorflow,alsrgv/tensorflow,aam-at/tensorflow,ghchinoy/tensorflow,zycdragonball/tensorflow,av8ramit/tensorflow,adamtiger/tensorflow,Xeralux/tensorflow,tiagofrepereira2012/tensorflow,jart/tensorflow,unsiloai/syntaxnet-ops-hack,karllessard/tensorflow,gautam1858/tensorflow,alistairlow/tensorflow,theflofly/tensorflow,nburn42/tensorflow,asimshankar/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow,eaplatanios/tensorflow,seanli9jan/tensorflow,dancingdan/tensorflow,dendisuhubdy/tensorflow,aselle/tensorflow,ville-k/tensorflow,theflofly/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jendap/tensorflow,eadgarchen/tensorflow,unsiloai/syntaxnet-ops-hack,Xeralux/tensorflow,girving/tensorflow,jbedorf/tensorflow,benoitsteiner/tensorflow-xsmm,ppwwyyxx/tensorflow,girving/tensorflow,jbedorf/tensorflow,tiagofrepereira2012/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,zasdfgbnm/tensorflow,alsrgv/tensorflow,Intel-Corporation/tensorflow,ZhangXinNan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,drpngx/tensorflow,frreiss/tensorflow-fred,ArtsiomCh/tensorflow,davidzchen/tensorflow,ran5515/DeepDecision,dancingdan/tensorflow,bowang/tensorflow,brchiu/tensorflow,chemelnucfin/tensorflow,dendisuhubdy/tensorflow,frreiss/tensorflow-fred,kobejean/tensorflow,ran5515/DeepDecision,hehongliang/tensorflow,bowang/tensorflow,jart/tensorflow,manipopopo/tensorflow,frreiss/tensorflow-fred,snnn/tensorflow,codrut3/tensorflow,tensorflow/tensorflow,manazhao/tf_recsys,jendap/tensorflow,Intel-tensorflow/tensorflow,Bismarrck/tensorflow,ishay2b/tensorflow,ageron/tensorflow,benoitsteiner/tensorflow-xsmm,pavelchristof/gomoku-ai,xodus7/tensorflow,davidzchen/tensorflow,JingJunYin/tensorflow,allenlavoie/tensorflow,benoitsteiner/tensorflow-opencl,jhseu/tensorflow,raymondxyang/tensorflow,llhe/tensorflow,arborh/tensorflow,frreiss/tensorflow-fred,drpngx/tensorflow,seanli9jan/tensorflow,manipopopo/tensorflow,gautam1858/tensorflow,with-git/tensorflow,karllessard/tensorflow,pavelchristof/gomoku-ai,caisq/tensorflow,Bulochkin/tensorflow_pack,eaplatanios/tensorflow,tillahoffmann/tensorflow,ghchinoy/tensorflow,JingJunYin/tensorflow,Mazecreator/tensorflow,manipopopo/tensorflow,Intel-tensorflow/tensorflow,renyi533/tensorflow,Mistobaan/tensorflow,adit-chandra/tensorflow,Intel-Corporation/tensorflow,annarev/tensorflow,Kongsea/tensorflow,hehongliang/tensorflow,jendap/tensorflow,xzturn/tensorflow,adit-chandra/tensorflow,drpngx/tensorflow,girving/tensorflow,jart/tensorflow,manazhao/tf_recsys,gojira/tensorflow,laszlocsomor/tensorflow,snnn/tensorflow,alshedivat/tensorflow,JingJunYin/tensorflow,kevin-coder/tensorflow-fork,Mistobaan/tensorflow,davidzchen/tensorflow,freedomtan/tensorflow,cxxgtxy/tensorflow,chemelnucfin/tensorflow,alshedivat/tensorflow,yanchen036/tensorflow,aam-at/tensorflow,sarvex/tensorflow,pavelchristof/gomoku-ai,petewarden/tensorflow,guschmue/tensorflow,asimshankar/tensorflow,andrewcmyers/tensorflow,arborh/tensorflow,alshedivat/tensorflow,xodus7/tensorflow,Kongsea/tensorflow,raymondxyang/tensorflow,jendap/tensorflow,arborh/tensorflow,benoitsteiner/tensorflow,zasdfgbnm/tensorflow,hsaputra/tensorflow,jhaux/tensorflow,adit-chandra/tensorflow,hehongliang/tensorflow,yanchen036/tensorflow,annarev/tensorflow,zasdfgbnm/tensorflow,benoitsteiner/tensorflow-opencl,Mistobaan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Kongsea/tensorflow,ppwwyyxx/tensorflow,alsrgv/tensorflow,jhseu/tensorflow,gunan/tensorflow,horance-liu/tensorflow,Mistobaan/tensorflow,renyi533/tensorflow,rabipanda/tensorflow,sarvex/tensorflow,renyi533/tensorflow,eaplatanios/tensorflow,lukeiwanski/tensorflow,apark263/tensorflow,ravindrapanda/tensorflow,snnn/tensorflow,yongtang/tensorflow,petewarden/tensorflow,manipopopo/tensorflow,ychfan/tensorflow,tillahoffmann/tensorflow,davidzchen/tensorflow,gunan/tensorflow,arborh/tensorflow,manipopopo/tensorflow,rabipanda/tensorflow,ychfan/tensorflow,jwlawson/tensorflow,sjperkins/tensorflow,ychfan/tensorflow,horance-liu/tensorflow,sarvex/tensorflow,lukeiwanski/tensorflow,xzturn/tensorflow,cxxgtxy/tensorflow,ville-k/tensorflow,girving/tensorflow,chemelnucfin/tensorflow,arborh/tensorflow,Bulochkin/tensorflow_pack,JVillella/tensorflow,raymondxyang/tensorflow,lakshayg/tensorflow,rabipanda/tensorflow,Intel-tensorflow/tensorflow,tillahoffmann/tensorflow,jhseu/tensorflow,ageron/tensorflow,nburn42/tensorflow,alshedivat/tensorflow,brchiu/tensorflow,dongjoon-hyun/tensorflow,kobejean/tensorflow,mixturemodel-flow/tensorflow,AnishShah/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,AnishShah/tensorflow,arborh/tensorflow,jalexvig/tensorflow,caisq/tensorflow,jbedorf/tensorflow,lakshayg/tensorflow,alshedivat/tensorflow,AnishShah/tensorflow,aselle/tensorflow,annarev/tensorflow,maciekcc/tensorflow,lakshayg/tensorflow,ageron/tensorflow,sjperkins/tensorflow,apark263/tensorflow,tiagofrepereira2012/tensorflow,rabipanda/tensorflow,tillahoffmann/tensorflow,drpngx/tensorflow,mixturemodel-flow/tensorflow,tensorflow/tensorflow,ville-k/tensorflow,gojira/tensorflow,sjperkins/tensorflow,tillahoffmann/tensorflow,maciekcc/tensorflow,Mistobaan/tensorflow,Moriadry/tensorflow,freedomtan/tensorflow,Intel-Corporation/tensorflow,xodus7/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,nolanliou/tensorflow,meteorcloudy/tensorflow,jalexvig/tensorflow,ravindrapanda/tensorflow,rabipanda/tensorflow,sjperkins/tensorflow,ageron/tensorflow,benoitsteiner/tensorflow-xsmm,allenlavoie/tensorflow,allenlavoie/tensorflow,xodus7/tensorflow,ageron/tensorflow,mixturemodel-flow/tensorflow,nolanliou/tensorflow,DavidNorman/tensorflow,caisq/tensorflow,lakshayg/tensorflow,tillahoffmann/tensorflow,nightjean/Deep-Learning,nolanliou/tensorflow,tensorflow/tensorflow,jhaux/tensorflow,jhaux/tensorflow,ageron/tensorflow,zycdragonball/tensorflow,dancingdan/tensorflow,Mazecreator/tensorflow,cxxgtxy/tensorflow,mdrumond/tensorflow,dyoung418/tensorflow,av8ramit/tensorflow,kevin-coder/tensorflow-fork,Mistobaan/tensorflow,dyoung418/tensorflow,tornadozou/tensorflow,tillahoffmann/tensorflow,petewarden/tensorflow,adamtiger/tensorflow,alshedivat/tensorflow,zycdragonball/tensorflow,ZhangXinNan/tensorflow,gojira/tensorflow,guschmue/tensorflow,nightjean/Deep-Learning,aam-at/tensorflow,suiyuan2009/tensorflow,benoitsteiner/tensorflow,ghchinoy/tensorflow,sarvex/tensorflow,llhe/tensorflow,jart/tensorflow,frreiss/tensorflow-fred,freedomtan/tensorflow,brchiu/tensorflow,kobejean/tensorflow,dendisuhubdy/tensorflow,yongtang/tensorflow,lukeiwanski/tensorflow,guschmue/tensorflow,ArtsiomCh/tensorflow,ravindrapanda/tensorflow,jhseu/tensorflow,arborh/tensorflow,llhe/tensorflow,alistairlow/tensorflow,yanchen036/tensorflow,ppwwyyxx/tensorflow,jendap/tensorflow,dendisuhubdy/tensorflow,xodus7/tensorflow,dyoung418/tensorflow,xzturn/tensorflow,ravindrapanda/tensorflow,kevin-coder/tensorflow-fork,eadgarchen/tensorflow,ran5515/DeepDecision,arborh/tensorflow,yongtang/tensorflow,brchiu/tensorflow,manipopopo/tensorflow,ravindrapanda/tensorflow,alsrgv/tensorflow,mavenlin/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,aam-at/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,chemelnucfin/tensorflow,meteorcloudy/tensorflow,maciekcc/tensorflow,ran5515/DeepDecision,jwlawson/tensorflow,xzturn/tensorflow,sjperkins/tensorflow,guschmue/tensorflow,aldian/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,with-git/tensorflow,yongtang/tensorflow,suiyuan2009/tensorflow,Intel-tensorflow/tensorflow,bowang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,pavelchristof/gomoku-ai,Intel-Corporation/tensorflow,apark263/tensorflow,lukeiwanski/tensorflow,gautam1858/tensorflow,horance-liu/tensorflow,ghchinoy/tensorflow,mavenlin/tensorflow,jhseu/tensorflow,bowang/tensorflow,ghchinoy/tensorflow,aselle/tensorflow,ravindrapanda/tensorflow,adit-chandra/tensorflow,nburn42/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,dendisuhubdy/tensorflow,petewarden/tensorflow,tensorflow/tensorflow,xodus7/tensorflow,raymondxyang/tensorflow,tornadozou/tensorflow,brchiu/tensorflow,llhe/tensorflow,kevin-coder/tensorflow-fork,jwlawson/tensorflow,jbedorf/tensorflow,jhaux/tensorflow,xodus7/tensorflow,cxxgtxy/tensorflow,Kongsea/tensorflow,theflofly/tensorflow,eaplatanios/tensorflow,DavidNorman/tensorflow,mixturemodel-flow/tensorflow,zasdfgbnm/tensorflow,annarev/tensorflow,drpngx/tensorflow,DavidNorman/tensorflow,aselle/tensorflow,aldian/tensorflow,tiagofrepereira2012/tensorflow,aam-at/tensorflow,aselle/tensorflow,sarvex/tensorflow,tillahoffmann/tensorflow,seanli9jan/tensorflow,lakshayg/tensorflow,manazhao/tf_recsys,yufengg/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,eaplatanios/tensorflow,theflofly/tensorflow,alshedivat/tensorflow,eaplatanios/tensorflow,xodus7/tensorflow,jendap/tensorflow,dancingdan/tensorflow,dancingdan/tensorflow,chemelnucfin/tensorflow,nolanliou/tensorflow,alistairlow/tensorflow,kobejean/tensorflow,annarev/tensorflow,unsiloai/syntaxnet-ops-hack,unsiloai/syntaxnet-ops-hack,arborh/tensorflow,lakshayg/tensorflow,meteorcloudy/tensorflow,allenlavoie/tensorflow,petewarden/tensorflow,zycdragonball/tensorflow,ArtsiomCh/tensorflow,jbedorf/tensorflow,yufengg/tensorflow,with-git/tensorflow,DavidNorman/tensorflow,tornadozou/tensorflow,jendap/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,benoitsteiner/tensorflow-opencl,laszlocsomor/tensorflow,manipopopo/tensorflow,caisq/tensorflow,Bismarrck/tensorflow,suiyuan2009/tensorflow,nburn42/tensorflow,JVillella/tensorflow,ArtsiomCh/tensorflow,mdrumond/tensorflow,zasdfgbnm/tensorflow,jbedorf/tensorflow,andrewcmyers/tensorflow,aldian/tensorflow,hehongliang/tensorflow,paolodedios/tensorflow,Moriadry/tensorflow,JingJunYin/tensorflow,Mazecreator/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,aam-at/tensorflow,jart/tensorflow,yanchen036/tensorflow,suiyuan2009/tensorflow,meteorcloudy/tensorflow,aam-at/tensorflow,yufengg/tensorflow,mixturemodel-flow/tensorflow,apark263/tensorflow,alistairlow/tensorflow,alivecor/tensorflow,Xeralux/tensorflow,mixturemodel-flow/tensorflow,meteorcloudy/tensorflow,paolodedios/tensorflow,dancingdan/tensorflow,Mazecreator/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,jalexvig/tensorflow,allenlavoie/tensorflow,ArtsiomCh/tensorflow,seanli9jan/tensorflow,asimshankar/tensorflow,gautam1858/tensorflow,jostep/tensorflow,gautam1858/tensorflow,JingJunYin/tensorflow,ppwwyyxx/tensorflow,ZhangXinNan/tensorflow,petewarden/tensorflow,with-git/tensorflow,alsrgv/tensorflow,AnishShah/tensorflow,andrewcmyers/tensorflow,lukeiwanski/tensorflow,AnishShah/tensorflow,ZhangXinNan/tensorflow,adamtiger/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,adit-chandra/tensorflow,adit-chandra/tensorflow,mavenlin/tensorflow,lakshayg/tensorflow,ageron/tensorflow,a-doumoulakis/tensorflow,dongjoon-hyun/tensorflow,snnn/tensorflow,Bulochkin/tensorflow_pack,ZhangXinNan/tensorflow,jhseu/tensorflow,AnishShah/tensorflow,horance-liu/tensorflow,tensorflow/tensorflow,yanchen036/tensorflow,sjperkins/tensorflow,dyoung418/tensorflow,ppwwyyxx/tensorflow,xzturn/tensorflow,dongjoon-hyun/tensorflow,jhaux/tensorflow,aldian/tensorflow,Bismarrck/tensorflow,pavelchristof/gomoku-ai,girving/tensorflow,nburn42/tensorflow,Bismarrck/tensorflow,xodus7/tensorflow,mdrumond/tensorflow,paolodedios/tensorflow,xzturn/tensorflow,ran5515/DeepDecision,alistairlow/tensorflow,drpngx/tensorflow,cxxgtxy/tensorflow,snnn/tensorflow,apark263/tensorflow,kobejean/tensorflow,jwlawson/tensorflow,maciekcc/tensorflow,aselle/tensorflow,andrewcmyers/tensorflow,ghchinoy/tensorflow,Mistobaan/tensorflow,benoitsteiner/tensorflow-opencl,chemelnucfin/tensorflow,lukeiwanski/tensorflow,allenlavoie/tensorflow,annarev/tensorflow,Kongsea/tensorflow,adamtiger/tensorflow,ageron/tensorflow,jhseu/tensorflow,andrewcmyers/tensorflow,arborh/tensorflow,Moriadry/tensorflow,dongjoon-hyun/tensorflow,jwlawson/tensorflow,mdrumond/tensorflow,mavenlin/tensorflow,petewarden/tensorflow,aam-at/tensorflow,with-git/tensorflow,eadgarchen/tensorflow,Xeralux/tensorflow,Intel-Corporation/tensorflow,petewarden/tensorflow,jart/tensorflow,codrut3/tensorflow,seanli9jan/tensorflow,hfp/tensorflow-xsmm,pavelchristof/gomoku-ai,ghchinoy/tensorflow,jalexvig/tensorflow,av8ramit/tensorflow,eadgarchen/tensorflow,zasdfgbnm/tensorflow,andrewcmyers/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gojira/tensorflow,benoitsteiner/tensorflow-xsmm,snnn/tensorflow,yongtang/tensorflow,nolanliou/tensorflow,lukeiwanski/tensorflow,chemelnucfin/tensorflow,allenlavoie/tensorflow,Bulochkin/tensorflow_pack,codrut3/tensorflow,Intel-tensorflow/tensorflow,dancingdan/tensorflow,nightjean/Deep-Learning,Kongsea/tensorflow,sjperkins/tensorflow,mavenlin/tensorflow,sarvex/tensorflow,aselle/tensorflow,guschmue/tensorflow,suiyuan2009/tensorflow,pavelchristof/gomoku-ai,Bulochkin/tensorflow_pack,ageron/tensorflow,eaplatanios/tensorflow,lukeiwanski/tensorflow,freedomtan/tensorflow,jwlawson/tensorflow,theflofly/tensorflow,gojira/tensorflow,ravindrapanda/tensorflow,laszlocsomor/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,adamtiger/tensorflow,unsiloai/syntaxnet-ops-hack,manipopopo/tensorflow,caisq/tensorflow,benoitsteiner/tensorflow,benoitsteiner/tensorflow-opencl,laszlocsomor/tensorflow,rabipanda/tensorflow,aselle/tensorflow,Moriadry/tensorflow,laszlocsomor/tensorflow,annarev/tensorflow,codrut3/tensorflow,theflofly/tensorflow,ZhangXinNan/tensorflow,JingJunYin/tensorflow,allenlavoie/tensorflow,asimshankar/tensorflow,theflofly/tensorflow,Mazecreator/tensorflow,hehongliang/tensorflow,kevin-coder/tensorflow-fork,guschmue/tensorflow,alshedivat/tensorflow,Mistobaan/tensorflow,nburn42/tensorflow,aam-at/tensorflow,jostep/tensorflow,ZhangXinNan/tensorflow,Moriadry/tensorflow,suiyuan2009/tensorflow,ageron/tensorflow,benoitsteiner/tensorflow-opencl,chemelnucfin/tensorflow,dongjoon-hyun/tensorflow,nightjean/Deep-Learning,manazhao/tf_recsys,tornadozou/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alshedivat/tensorflow,jostep/tensorflow,jostep/tensorflow,JingJunYin/tensorflow,JVillella/tensorflow,laszlocsomor/tensorflow,chemelnucfin/tensorflow,av8ramit/tensorflow,Bulochkin/tensorflow_pack,av8ramit/tensorflow,seanli9jan/tensorflow,paolodedios/tensorflow,a-doumoulakis/tensorflow,jbedorf/tensorflow,gunan/tensorflow,tiagofrepereira2012/tensorflow,mixturemodel-flow/tensorflow,ychfan/tensorflow,dyoung418/tensorflow,caisq/tensorflow,ishay2b/tensorflow,alivecor/tensorflow,renyi533/tensorflow,yufengg/tensorflow,karllessard/tensorflow,benoitsteiner/tensorflow-xsmm,codrut3/tensorflow,mdrumond/tensorflow,jendap/tensorflow,freedomtan/tensorflow,mdrumond/tensorflow,benoitsteiner/tensorflow,hsaputra/tensorflow,Kongsea/tensorflow,ZhangXinNan/tensorflow,jostep/tensorflow,hfp/tensorflow-xsmm,alsrgv/tensorflow,av8ramit/tensorflow,zycdragonball/tensorflow,xzturn/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,JVillella/tensorflow,suiyuan2009/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,av8ramit/tensorflow,aam-at/tensorflow,gautam1858/tensorflow,eadgarchen/tensorflow,hfp/tensorflow-xsmm,jbedorf/tensorflow,adit-chandra/tensorflow | import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
- c.NotebookApp.password = passwd(os.environ['PASSWORD'])
+ password = os.environ['PASSWORD']
+ if password:
+ c.NotebookApp.password = passwd(password)
+ else:
+ c.NotebookApp.password = ''
+ c.NotebookApp.token = ''
del os.environ['PASSWORD']
| Allow disabling password and token auth on jupyter notebooks | ## Code Before:
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
## Instruction:
Allow disabling password and token auth on jupyter notebooks
## Code After:
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
password = os.environ['PASSWORD']
if password:
c.NotebookApp.password = passwd(password)
else:
c.NotebookApp.password = ''
c.NotebookApp.token = ''
del os.environ['PASSWORD']
| import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
- c.NotebookApp.password = passwd(os.environ['PASSWORD'])
? -------------- ------- -
+ password = os.environ['PASSWORD']
+ if password:
+ c.NotebookApp.password = passwd(password)
+ else:
+ c.NotebookApp.password = ''
+ c.NotebookApp.token = ''
del os.environ['PASSWORD'] |
06cf113cc45e7eaa8ab63e2791c2f2a0990ac946 | EasyEuler/data.py | EasyEuler/data.py | import json
import os
from jinja2 import Environment, FileSystemLoader
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
DATA_PATH = os.path.join(BASE_PATH, 'data')
TEMPLATE_PATH = os.path.join(BASE_PATH, 'templates')
CONFIG_PATH = os.path.join(BASE_PATH, 'config.json')
templates = Environment(loader=FileSystemLoader(TEMPLATE_PATH))
with open(CONFIG_PATH) as f:
config = json.load(f)
with open('%s/problems.json' % DATA_PATH) as f:
problems = json.load(f)
| import collections
import json
import os
from jinja2 import Environment, FileSystemLoader
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
DATA_PATH = os.path.join(BASE_PATH, 'data')
TEMPLATE_PATH = os.path.join(BASE_PATH, 'templates')
CONFIG_PATH = os.path.join(BASE_PATH, 'config.json')
with open('%s/problems.json' % DATA_PATH) as f:
problems = json.load(f)
class ConfigurationDictionary(collections.MutableMapping):
def __init__(self, config_paths):
self.config = {}
for config_path in config_paths:
if os.path.exists(config_path):
with open(config_path) as f:
self.config = self.update(self.config, json.load(f))
def update(self, config, updates):
for key, value in updates.items():
if isinstance(value, collections.Mapping):
updated = self.update(config.get(key, {}), value)
config[key] = updated
else:
config[key] = value
return config
def __getitem__(self, key):
return self.config[key]
def __setitem__(self, key, value):
self.config[key] = value
def __delitem__(self, key):
del self.config[key]
def __iter__(self):
return iter(self.config)
def __len__(self):
return len(self.config)
home = os.environ.get('HOME')
xdg_config_home = os.environ.get('XDG_CONFIG_HOME',
os.path.join(home, '.config'))
xdg_config_dirs = os.environ.get('XDG_CONFIG_DIRS', '/etc/xdg')
config_dirs = [xdg_config_home] + xdg_config_dirs.split(':')
config_paths = [os.path.join(config_dir, 'EasyEuler/config.json')
for config_dir in config_dirs if os.path.isabs(config_dir)]
template_paths = [os.path.join(config_dir, 'EasyEuler/templates')
for config_dir in config_dirs if os.path.isabs(config_dir)]
config_paths.append(CONFIG_PATH)
template_paths.append(TEMPLATE_PATH)
config = ConfigurationDictionary(reversed(config_paths))
templates = Environment(loader=FileSystemLoader(reversed(template_paths)))
| Add support for XDG spec configuration | Add support for XDG spec configuration
| Python | mit | Encrylize/EasyEuler | + import collections
import json
import os
from jinja2 import Environment, FileSystemLoader
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
DATA_PATH = os.path.join(BASE_PATH, 'data')
TEMPLATE_PATH = os.path.join(BASE_PATH, 'templates')
CONFIG_PATH = os.path.join(BASE_PATH, 'config.json')
- templates = Environment(loader=FileSystemLoader(TEMPLATE_PATH))
-
- with open(CONFIG_PATH) as f:
- config = json.load(f)
-
with open('%s/problems.json' % DATA_PATH) as f:
problems = json.load(f)
+
+ class ConfigurationDictionary(collections.MutableMapping):
+ def __init__(self, config_paths):
+ self.config = {}
+
+ for config_path in config_paths:
+ if os.path.exists(config_path):
+ with open(config_path) as f:
+ self.config = self.update(self.config, json.load(f))
+
+ def update(self, config, updates):
+ for key, value in updates.items():
+ if isinstance(value, collections.Mapping):
+ updated = self.update(config.get(key, {}), value)
+ config[key] = updated
+ else:
+ config[key] = value
+ return config
+
+ def __getitem__(self, key):
+ return self.config[key]
+
+ def __setitem__(self, key, value):
+ self.config[key] = value
+
+ def __delitem__(self, key):
+ del self.config[key]
+
+ def __iter__(self):
+ return iter(self.config)
+
+ def __len__(self):
+ return len(self.config)
+
+
+ home = os.environ.get('HOME')
+ xdg_config_home = os.environ.get('XDG_CONFIG_HOME',
+ os.path.join(home, '.config'))
+ xdg_config_dirs = os.environ.get('XDG_CONFIG_DIRS', '/etc/xdg')
+ config_dirs = [xdg_config_home] + xdg_config_dirs.split(':')
+ config_paths = [os.path.join(config_dir, 'EasyEuler/config.json')
+ for config_dir in config_dirs if os.path.isabs(config_dir)]
+ template_paths = [os.path.join(config_dir, 'EasyEuler/templates')
+ for config_dir in config_dirs if os.path.isabs(config_dir)]
+ config_paths.append(CONFIG_PATH)
+ template_paths.append(TEMPLATE_PATH)
+
+ config = ConfigurationDictionary(reversed(config_paths))
+ templates = Environment(loader=FileSystemLoader(reversed(template_paths)))
+ | Add support for XDG spec configuration | ## Code Before:
import json
import os
from jinja2 import Environment, FileSystemLoader
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
DATA_PATH = os.path.join(BASE_PATH, 'data')
TEMPLATE_PATH = os.path.join(BASE_PATH, 'templates')
CONFIG_PATH = os.path.join(BASE_PATH, 'config.json')
templates = Environment(loader=FileSystemLoader(TEMPLATE_PATH))
with open(CONFIG_PATH) as f:
config = json.load(f)
with open('%s/problems.json' % DATA_PATH) as f:
problems = json.load(f)
## Instruction:
Add support for XDG spec configuration
## Code After:
import collections
import json
import os
from jinja2 import Environment, FileSystemLoader
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
DATA_PATH = os.path.join(BASE_PATH, 'data')
TEMPLATE_PATH = os.path.join(BASE_PATH, 'templates')
CONFIG_PATH = os.path.join(BASE_PATH, 'config.json')
with open('%s/problems.json' % DATA_PATH) as f:
problems = json.load(f)
class ConfigurationDictionary(collections.MutableMapping):
def __init__(self, config_paths):
self.config = {}
for config_path in config_paths:
if os.path.exists(config_path):
with open(config_path) as f:
self.config = self.update(self.config, json.load(f))
def update(self, config, updates):
for key, value in updates.items():
if isinstance(value, collections.Mapping):
updated = self.update(config.get(key, {}), value)
config[key] = updated
else:
config[key] = value
return config
def __getitem__(self, key):
return self.config[key]
def __setitem__(self, key, value):
self.config[key] = value
def __delitem__(self, key):
del self.config[key]
def __iter__(self):
return iter(self.config)
def __len__(self):
return len(self.config)
home = os.environ.get('HOME')
xdg_config_home = os.environ.get('XDG_CONFIG_HOME',
os.path.join(home, '.config'))
xdg_config_dirs = os.environ.get('XDG_CONFIG_DIRS', '/etc/xdg')
config_dirs = [xdg_config_home] + xdg_config_dirs.split(':')
config_paths = [os.path.join(config_dir, 'EasyEuler/config.json')
for config_dir in config_dirs if os.path.isabs(config_dir)]
template_paths = [os.path.join(config_dir, 'EasyEuler/templates')
for config_dir in config_dirs if os.path.isabs(config_dir)]
config_paths.append(CONFIG_PATH)
template_paths.append(TEMPLATE_PATH)
config = ConfigurationDictionary(reversed(config_paths))
templates = Environment(loader=FileSystemLoader(reversed(template_paths)))
| + import collections
import json
import os
from jinja2 import Environment, FileSystemLoader
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
DATA_PATH = os.path.join(BASE_PATH, 'data')
TEMPLATE_PATH = os.path.join(BASE_PATH, 'templates')
CONFIG_PATH = os.path.join(BASE_PATH, 'config.json')
- templates = Environment(loader=FileSystemLoader(TEMPLATE_PATH))
-
- with open(CONFIG_PATH) as f:
- config = json.load(f)
-
with open('%s/problems.json' % DATA_PATH) as f:
problems = json.load(f)
+
+
+ class ConfigurationDictionary(collections.MutableMapping):
+ def __init__(self, config_paths):
+ self.config = {}
+
+ for config_path in config_paths:
+ if os.path.exists(config_path):
+ with open(config_path) as f:
+ self.config = self.update(self.config, json.load(f))
+
+ def update(self, config, updates):
+ for key, value in updates.items():
+ if isinstance(value, collections.Mapping):
+ updated = self.update(config.get(key, {}), value)
+ config[key] = updated
+ else:
+ config[key] = value
+ return config
+
+ def __getitem__(self, key):
+ return self.config[key]
+
+ def __setitem__(self, key, value):
+ self.config[key] = value
+
+ def __delitem__(self, key):
+ del self.config[key]
+
+ def __iter__(self):
+ return iter(self.config)
+
+ def __len__(self):
+ return len(self.config)
+
+
+ home = os.environ.get('HOME')
+ xdg_config_home = os.environ.get('XDG_CONFIG_HOME',
+ os.path.join(home, '.config'))
+ xdg_config_dirs = os.environ.get('XDG_CONFIG_DIRS', '/etc/xdg')
+ config_dirs = [xdg_config_home] + xdg_config_dirs.split(':')
+ config_paths = [os.path.join(config_dir, 'EasyEuler/config.json')
+ for config_dir in config_dirs if os.path.isabs(config_dir)]
+ template_paths = [os.path.join(config_dir, 'EasyEuler/templates')
+ for config_dir in config_dirs if os.path.isabs(config_dir)]
+ config_paths.append(CONFIG_PATH)
+ template_paths.append(TEMPLATE_PATH)
+
+ config = ConfigurationDictionary(reversed(config_paths))
+ templates = Environment(loader=FileSystemLoader(reversed(template_paths))) |
697bf0c23786794e35b0b9f72c878bb762d296b9 | benches/cprofile_pyproj.py | benches/cprofile_pyproj.py | import numpy as np
from pyproj import Proj, transform
# London bounding box
N = 51.691874116909894
E = 0.3340155643740321
S = 51.28676016315085
W = -0.5103750689005356
osgb36 = Proj(init='epsg:27700')
wgs84 = Proj(init='epsg:4326')
num_coords = 1000000
lon_ls = np.random.uniform(W, E, [num_coords])
lat_ls = np.random.uniform(S, N, [num_coords])
if __name__ == "__main__":
for x in xrange(50):
transform(wgs84, osgb36, lon_ls, lat_ls)
| import numpy as np
from pyproj import Proj, transform
# London bounding box
N = 51.691874116909894
E = 0.3340155643740321
S = 51.28676016315085
W = -0.5103750689005356
# osgb36 = Proj(init='epsg:27700')
osgb36 = Proj('+init=EPSG:27700 +nadgrids=OSTN02_NTv2.gsb')
wgs84 = Proj(init='epsg:4326')
num_coords = 1000000
lon_ls = np.random.uniform(W, E, [num_coords])
lat_ls = np.random.uniform(S, N, [num_coords])
if __name__ == "__main__":
for x in xrange(50):
transform(wgs84, osgb36, lon_ls, lat_ls)
| Use NTv2 transform for Pyproj | Use NTv2 transform for Pyproj
| Python | mit | urschrei/lonlat_bng,urschrei/rust_bng,urschrei/lonlat_bng,urschrei/rust_bng,urschrei/lonlat_bng | import numpy as np
from pyproj import Proj, transform
# London bounding box
N = 51.691874116909894
E = 0.3340155643740321
S = 51.28676016315085
W = -0.5103750689005356
- osgb36 = Proj(init='epsg:27700')
+ # osgb36 = Proj(init='epsg:27700')
+ osgb36 = Proj('+init=EPSG:27700 +nadgrids=OSTN02_NTv2.gsb')
wgs84 = Proj(init='epsg:4326')
num_coords = 1000000
lon_ls = np.random.uniform(W, E, [num_coords])
lat_ls = np.random.uniform(S, N, [num_coords])
if __name__ == "__main__":
for x in xrange(50):
transform(wgs84, osgb36, lon_ls, lat_ls)
| Use NTv2 transform for Pyproj | ## Code Before:
import numpy as np
from pyproj import Proj, transform
# London bounding box
N = 51.691874116909894
E = 0.3340155643740321
S = 51.28676016315085
W = -0.5103750689005356
osgb36 = Proj(init='epsg:27700')
wgs84 = Proj(init='epsg:4326')
num_coords = 1000000
lon_ls = np.random.uniform(W, E, [num_coords])
lat_ls = np.random.uniform(S, N, [num_coords])
if __name__ == "__main__":
for x in xrange(50):
transform(wgs84, osgb36, lon_ls, lat_ls)
## Instruction:
Use NTv2 transform for Pyproj
## Code After:
import numpy as np
from pyproj import Proj, transform
# London bounding box
N = 51.691874116909894
E = 0.3340155643740321
S = 51.28676016315085
W = -0.5103750689005356
# osgb36 = Proj(init='epsg:27700')
osgb36 = Proj('+init=EPSG:27700 +nadgrids=OSTN02_NTv2.gsb')
wgs84 = Proj(init='epsg:4326')
num_coords = 1000000
lon_ls = np.random.uniform(W, E, [num_coords])
lat_ls = np.random.uniform(S, N, [num_coords])
if __name__ == "__main__":
for x in xrange(50):
transform(wgs84, osgb36, lon_ls, lat_ls)
| import numpy as np
from pyproj import Proj, transform
# London bounding box
N = 51.691874116909894
E = 0.3340155643740321
S = 51.28676016315085
W = -0.5103750689005356
- osgb36 = Proj(init='epsg:27700')
+ # osgb36 = Proj(init='epsg:27700')
? ++
+ osgb36 = Proj('+init=EPSG:27700 +nadgrids=OSTN02_NTv2.gsb')
wgs84 = Proj(init='epsg:4326')
num_coords = 1000000
lon_ls = np.random.uniform(W, E, [num_coords])
lat_ls = np.random.uniform(S, N, [num_coords])
if __name__ == "__main__":
for x in xrange(50):
transform(wgs84, osgb36, lon_ls, lat_ls) |
7e1ed9cca3e02488d8d189d22e6fca35c0bec108 | xmantissa/test/test_siteroot.py | xmantissa/test/test_siteroot.py |
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
| Fix boken test - install WebSite before trying to locateChild | Fix boken test - install WebSite before trying to locateChild | Python | mit | twisted/mantissa,twisted/mantissa,twisted/mantissa |
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
+ ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
| Fix boken test - install WebSite before trying to locateChild | ## Code Before:
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
## Instruction:
Fix boken test - install WebSite before trying to locateChild
## Code After:
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
+ ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',)) |
4f84482803049b40d7b7da26d9d624a6a63b4820 | core/utils.py | core/utils.py | from django.utils import timezone
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = '{} hour{}'.format(h, 's' if h > 1 else '')
if m > 0 and precision != 'h':
duration += '{}{} minute{}'.format(
'' if duration == '' else ', ', m, 's' if m > 1 else '')
if s > 0 and precision != 'h' and precision != 'm':
duration += '{}{} second{}'.format(
'' if duration == '' else ', ', s, 's' if s > 1 else '')
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
| from django.utils import timezone
from django.utils.translation import ngettext
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = ngettext('%(hours)s hour', '%(hours)s hours', h) % {
'hours': h
}
if m > 0 and precision != 'h':
if duration != '':
duration += ', '
duration += ngettext('%(minutes)s minute', '%(minutes)s minutes', m) % {
'minutes': m
}
if s > 0 and precision != 'h' and precision != 'm':
if duration != '':
duration += ', '
duration += ngettext('%(seconds)s second', '%(seconds)s seconds', s) % {
'seconds': s
}
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
| Add translation support to `duration_string` utility | Add translation support to `duration_string` utility
| Python | bsd-2-clause | cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy | from django.utils import timezone
+ from django.utils.translation import ngettext
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
- duration = '{} hour{}'.format(h, 's' if h > 1 else '')
+ duration = ngettext('%(hours)s hour', '%(hours)s hours', h) % {
+ 'hours': h
+ }
if m > 0 and precision != 'h':
- duration += '{}{} minute{}'.format(
- '' if duration == '' else ', ', m, 's' if m > 1 else '')
+ if duration != '':
+ duration += ', '
+ duration += ngettext('%(minutes)s minute', '%(minutes)s minutes', m) % {
+ 'minutes': m
+ }
if s > 0 and precision != 'h' and precision != 'm':
- duration += '{}{} second{}'.format(
- '' if duration == '' else ', ', s, 's' if s > 1 else '')
+ if duration != '':
+ duration += ', '
+ duration += ngettext('%(seconds)s second', '%(seconds)s seconds', s) % {
+ 'seconds': s
+ }
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
| Add translation support to `duration_string` utility | ## Code Before:
from django.utils import timezone
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = '{} hour{}'.format(h, 's' if h > 1 else '')
if m > 0 and precision != 'h':
duration += '{}{} minute{}'.format(
'' if duration == '' else ', ', m, 's' if m > 1 else '')
if s > 0 and precision != 'h' and precision != 'm':
duration += '{}{} second{}'.format(
'' if duration == '' else ', ', s, 's' if s > 1 else '')
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
## Instruction:
Add translation support to `duration_string` utility
## Code After:
from django.utils import timezone
from django.utils.translation import ngettext
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = ngettext('%(hours)s hour', '%(hours)s hours', h) % {
'hours': h
}
if m > 0 and precision != 'h':
if duration != '':
duration += ', '
duration += ngettext('%(minutes)s minute', '%(minutes)s minutes', m) % {
'minutes': m
}
if s > 0 and precision != 'h' and precision != 'm':
if duration != '':
duration += ', '
duration += ngettext('%(seconds)s second', '%(seconds)s seconds', s) % {
'seconds': s
}
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
| from django.utils import timezone
+ from django.utils.translation import ngettext
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
- duration = '{} hour{}'.format(h, 's' if h > 1 else '')
+ duration = ngettext('%(hours)s hour', '%(hours)s hours', h) % {
+ 'hours': h
+ }
if m > 0 and precision != 'h':
- duration += '{}{} minute{}'.format(
- '' if duration == '' else ', ', m, 's' if m > 1 else '')
+ if duration != '':
+ duration += ', '
+ duration += ngettext('%(minutes)s minute', '%(minutes)s minutes', m) % {
+ 'minutes': m
+ }
if s > 0 and precision != 'h' and precision != 'm':
- duration += '{}{} second{}'.format(
- '' if duration == '' else ', ', s, 's' if s > 1 else '')
+ if duration != '':
+ duration += ', '
+ duration += ngettext('%(seconds)s second', '%(seconds)s seconds', s) % {
+ 'seconds': s
+ }
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s |
bc8e064e41d43a4579c8111f1480b55e660ca186 | pep8ify/fixes/fix_tabs.py | pep8ify/fixes/fix_tabs.py | from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
SPACES = ' ' * 4
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
| from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
from .utils import SPACES
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
| Use globally define number of spaces. | Clean-up: Use globally define number of spaces.
| Python | apache-2.0 | spulec/pep8ify | from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
- SPACES = ' ' * 4
+ from .utils import SPACES
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
| Use globally define number of spaces. | ## Code Before:
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
SPACES = ' ' * 4
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
## Instruction:
Use globally define number of spaces.
## Code After:
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
from .utils import SPACES
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
| from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
- SPACES = ' ' * 4
+ from .utils import SPACES
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed() |
b19951bcf2035c9e755ad731e4f5081cf5f0d46f | troposphere/codeguruprofiler.py | troposphere/codeguruprofiler.py |
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'ProfilingGroupName': (basestring, True),
}
|
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
| Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes | Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes
| Python | bsd-2-clause | cloudtools/troposphere,cloudtools/troposphere |
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
+ 'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
| Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes | ## Code Before:
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'ProfilingGroupName': (basestring, True),
}
## Instruction:
Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes
## Code After:
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
|
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
+ 'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
} |
9c48cd08ee0805cfd9a8115d77da139e8c09d7a9 | plyer/platforms/linux/cpu.py | plyer/platforms/linux/cpu.py | from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
| from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
if output:
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
| Add CPU count for GNU/Linux | Add CPU count for GNU/Linux
| Python | mit | kivy/plyer,KeyWeeUsr/plyer,kivy/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer | from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
+ physical = [] # list of CPU ids from kernel
+ # open Linux kernel data file for CPU
+ with open('/proc/cpuinfo', 'rb') as fle:
+ lines = fle.readlines()
+ # go through the lines and obtain CPU core ids
+ for line in lines:
+ line = line.decode('utf-8')
+ if 'core id' not in line:
+ continue
+ cpuid = line.split(':')[1].strip()
+ physical.append(cpuid)
+ # total cores (socket * core per socket)
+ # is the length of unique CPU ids from kernel
+ physical = len(set(physical))
+ cpus['physical'] = physical
+
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
- environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
+
+ environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
| Add CPU count for GNU/Linux | ## Code Before:
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
## Instruction:
Add CPU count for GNU/Linux
## Code After:
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
if output:
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
| from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
+ physical = [] # list of CPU ids from kernel
+ # open Linux kernel data file for CPU
+ with open('/proc/cpuinfo', 'rb') as fle:
+ lines = fle.readlines()
+ # go through the lines and obtain CPU core ids
+ for line in lines:
+ line = line.decode('utf-8')
+ if 'core id' not in line:
+ continue
+ cpuid = line.split(':')[1].strip()
+ physical.append(cpuid)
+ # total cores (socket * core per socket)
+ # is the length of unique CPU ids from kernel
+ physical = len(set(physical))
+ cpus['physical'] = physical
+
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
- environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
+
+ environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU() |
fba983fa54691fcde0de93d6519b3906dff3cb32 | sara_flexbe_states/src/sara_flexbe_states/get_distance2D.py | sara_flexbe_states/src/sara_flexbe_states/get_distance2D.py |
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
super(GetNumberFromText, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
|
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
super(getDistance, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
| Correct call to super constructor | Correct call to super constructor
| Python | bsd-3-clause | WalkingMachine/sara_behaviors,WalkingMachine/sara_behaviors |
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
- super(GetNumberFromText, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
+ super(getDistance, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
| Correct call to super constructor | ## Code Before:
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
super(GetNumberFromText, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
## Instruction:
Correct call to super constructor
## Code After:
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
super(getDistance, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
|
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
- super(GetNumberFromText, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
? ^ ^^^^ ---------
+ super(getDistance, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
? ^ ^^^^^^^
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2)) |
d4cfe4c9d5ff680a85c25c144b077d928386811c | onetime/backends.py | onetime/backends.py | from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.get(key=key)
if data is None:
None
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.filter(key=key)
if len(data) == 0:
return None
data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| Use filter() instead of get() since the key might be invalid | Use filter() instead of get() since the key might be invalid
| Python | agpl-3.0 | ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,vanschelven/cmsplugin-journal,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,uploadcare/django-loginurl,ISIFoundation/influenzanet-website,fajran/django-loginurl,ISIFoundation/influenzanet-website | from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
- data = Key.objects.get(key=key)
+ data = Key.objects.filter(key=key)
- if data is None:
+ if len(data) == 0:
- None
+ return None
+ data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| Use filter() instead of get() since the key might be invalid | ## Code Before:
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.get(key=key)
if data is None:
None
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
## Instruction:
Use filter() instead of get() since the key might be invalid
## Code After:
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.filter(key=key)
if len(data) == 0:
return None
data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
- data = Key.objects.get(key=key)
? ^ ^
+ data = Key.objects.filter(key=key)
? ^^^^ ^
- if data is None:
+ if len(data) == 0:
- None
+ return None
? +++++++
+ data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
056b4ae938ab1aacf5e3f48a1e17919a79ff29b7 | scripts/sbatch_cancel.py | scripts/sbatch_cancel.py | import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain]
CURRENT_JOB = sys.argv[1]
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
to_kill = [CURRENT_JOB]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
| import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain] [Name of first running job in chain for a second chain] ...
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
for current_job in sys.argv[1].split():
if len(target_job) < 5:
continue
to_kill = [current_job]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
| Kill multiple chains at once. | Kill multiple chains at once.
| Python | mit | nyu-mll/spinn,nyu-mll/spinn,nyu-mll/spinn | import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
- ### Usage: python sbatch_cancel.py [Name of first running job in chain]
+ ### Usage: python sbatch_cancel.py [Name of first running job in chain] [Name of first running job in chain for a second chain] ...
- CURRENT_JOB = sys.argv[1]
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
- to_kill = [CURRENT_JOB]
+ for current_job in sys.argv[1].split():
+ if len(target_job) < 5:
+ continue
+ to_kill = [current_job]
- for line in lines:
+ for line in lines:
- s = line.split()
+ s = line.split()
- if (len(s) > 0) and (to_kill[-1] in s[2]):
+ if (len(s) > 0) and (to_kill[-1] in s[2]):
- to_kill.append(s[1])
+ to_kill.append(s[1])
- subprocess.call(['scancel'] + to_kill)
+ subprocess.call(['scancel'] + to_kill)
- | Kill multiple chains at once. | ## Code Before:
import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain]
CURRENT_JOB = sys.argv[1]
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
to_kill = [CURRENT_JOB]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
## Instruction:
Kill multiple chains at once.
## Code After:
import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain] [Name of first running job in chain for a second chain] ...
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
for current_job in sys.argv[1].split():
if len(target_job) < 5:
continue
to_kill = [current_job]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
| import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
- ### Usage: python sbatch_cancel.py [Name of first running job in chain]
+ ### Usage: python sbatch_cancel.py [Name of first running job in chain] [Name of first running job in chain for a second chain] ...
- CURRENT_JOB = sys.argv[1]
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
- to_kill = [CURRENT_JOB]
+ for current_job in sys.argv[1].split():
+ if len(target_job) < 5:
+ continue
+ to_kill = [current_job]
- for line in lines:
+ for line in lines:
? +
- s = line.split()
+ s = line.split()
? +
- if (len(s) > 0) and (to_kill[-1] in s[2]):
+ if (len(s) > 0) and (to_kill[-1] in s[2]):
? +
- to_kill.append(s[1])
+ to_kill.append(s[1])
? +
- subprocess.call(['scancel'] + to_kill)
+ subprocess.call(['scancel'] + to_kill)
? +
- |
411decbdb193b28bb3060e02e81bfa29483e85a9 | staticgen_demo/blog/staticgen_views.py | staticgen_demo/blog/staticgen_views.py |
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
def _get_paginator(self, url):
response = self.client.get(url)
print 'status_code: %s' % response.status_code
if not response.status_code == 200:
pass
else:
context = {}
if hasattr(response, 'context_data'):
context = response.context_data
elif hasattr(response, 'context'):
context = response.context
print context
try:
return context['paginator'], context['is_paginated']
except KeyError:
pass
return None, False
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
| Remove debug code from staticgen views. | Remove debug code from staticgen views.
| Python | bsd-3-clause | mishbahr/staticgen-demo,mishbahr/staticgen-demo,mishbahr/staticgen-demo |
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
- def _get_paginator(self, url):
- response = self.client.get(url)
- print 'status_code: %s' % response.status_code
- if not response.status_code == 200:
- pass
- else:
- context = {}
- if hasattr(response, 'context_data'):
- context = response.context_data
- elif hasattr(response, 'context'):
- context = response.context
-
- print context
- try:
- return context['paginator'], context['is_paginated']
- except KeyError:
- pass
- return None, False
-
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
| Remove debug code from staticgen views. | ## Code Before:
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
def _get_paginator(self, url):
response = self.client.get(url)
print 'status_code: %s' % response.status_code
if not response.status_code == 200:
pass
else:
context = {}
if hasattr(response, 'context_data'):
context = response.context_data
elif hasattr(response, 'context'):
context = response.context
print context
try:
return context['paginator'], context['is_paginated']
except KeyError:
pass
return None, False
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
## Instruction:
Remove debug code from staticgen views.
## Code After:
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
- def _get_paginator(self, url):
- response = self.client.get(url)
- print 'status_code: %s' % response.status_code
- if not response.status_code == 200:
- pass
- else:
- context = {}
- if hasattr(response, 'context_data'):
- context = response.context_data
- elif hasattr(response, 'context'):
- context = response.context
-
- print context
- try:
- return context['paginator'], context['is_paginated']
- except KeyError:
- pass
- return None, False
-
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
ed40088b5e913e70c161e8148ab76fdc0b6c5c46 | clt_utils/argparse.py | clt_utils/argparse.py | from datetime import datetime
import argparse
import os
def is_dir(string):
"""
Type check for a valid directory for ArgumentParser.
"""
if not os.path.isdir(string):
msg = '{0} is not a directory'.format(string)
raise argparse.ArgumentTypeError(msg)
return string
def is_file(string):
"""
Type check for a valid file for ArgumentParser.
"""
if not os.path.isfile(string):
msg = u'{0} is not a file'.format(string)
raise argparse.ArgumentTypeError(msg)
return string
def gt_zero(string):
"""
Type check for int > 0 for ArgumentParser.
"""
if not int(string) > 0:
msg = u'limit must be > 0'
raise argparse.ArgumentTypeError(msg)
return int(string)
def isodate(string):
try:
return datetime.strptime(string, '%Y-%m-%d').date()
except ValueError:
msg = u'date input must in the format of yyyy-mm-dd'
raise argparse.ArgumentTypeError(msg)
| from __future__ import absolute_import
from datetime import datetime
import argparse as _argparse
import os
def is_dir(string):
"""
Type check for a valid directory for ArgumentParser.
"""
if not os.path.isdir(string):
msg = '{0} is not a directory'.format(string)
raise _argparse.ArgumentTypeError(msg)
return string
def is_file(string):
"""
Type check for a valid file for ArgumentParser.
"""
if not os.path.isfile(string):
msg = u'{0} is not a file'.format(string)
raise _argparse.ArgumentTypeError(msg)
return string
def gt_zero(string):
"""
Type check for int > 0 for ArgumentParser.
"""
if not int(string) > 0:
msg = u'limit must be > 0'
raise _argparse.ArgumentTypeError(msg)
return int(string)
def isodate(string):
try:
return datetime.strptime(string, '%Y-%m-%d').date()
except ValueError:
msg = u'date input must in the format of yyyy-mm-dd'
raise _argparse.ArgumentTypeError(msg)
| Fix bug with self reference | Fix bug with self reference
| Python | apache-2.0 | 55minutes/clt-utils | + from __future__ import absolute_import
+
from datetime import datetime
- import argparse
+ import argparse as _argparse
import os
def is_dir(string):
"""
Type check for a valid directory for ArgumentParser.
"""
if not os.path.isdir(string):
msg = '{0} is not a directory'.format(string)
- raise argparse.ArgumentTypeError(msg)
+ raise _argparse.ArgumentTypeError(msg)
return string
def is_file(string):
"""
Type check for a valid file for ArgumentParser.
"""
if not os.path.isfile(string):
msg = u'{0} is not a file'.format(string)
- raise argparse.ArgumentTypeError(msg)
+ raise _argparse.ArgumentTypeError(msg)
return string
def gt_zero(string):
"""
Type check for int > 0 for ArgumentParser.
"""
if not int(string) > 0:
msg = u'limit must be > 0'
- raise argparse.ArgumentTypeError(msg)
+ raise _argparse.ArgumentTypeError(msg)
return int(string)
def isodate(string):
try:
return datetime.strptime(string, '%Y-%m-%d').date()
except ValueError:
msg = u'date input must in the format of yyyy-mm-dd'
- raise argparse.ArgumentTypeError(msg)
+ raise _argparse.ArgumentTypeError(msg)
| Fix bug with self reference | ## Code Before:
from datetime import datetime
import argparse
import os
def is_dir(string):
"""
Type check for a valid directory for ArgumentParser.
"""
if not os.path.isdir(string):
msg = '{0} is not a directory'.format(string)
raise argparse.ArgumentTypeError(msg)
return string
def is_file(string):
"""
Type check for a valid file for ArgumentParser.
"""
if not os.path.isfile(string):
msg = u'{0} is not a file'.format(string)
raise argparse.ArgumentTypeError(msg)
return string
def gt_zero(string):
"""
Type check for int > 0 for ArgumentParser.
"""
if not int(string) > 0:
msg = u'limit must be > 0'
raise argparse.ArgumentTypeError(msg)
return int(string)
def isodate(string):
try:
return datetime.strptime(string, '%Y-%m-%d').date()
except ValueError:
msg = u'date input must in the format of yyyy-mm-dd'
raise argparse.ArgumentTypeError(msg)
## Instruction:
Fix bug with self reference
## Code After:
from __future__ import absolute_import
from datetime import datetime
import argparse as _argparse
import os
def is_dir(string):
"""
Type check for a valid directory for ArgumentParser.
"""
if not os.path.isdir(string):
msg = '{0} is not a directory'.format(string)
raise _argparse.ArgumentTypeError(msg)
return string
def is_file(string):
"""
Type check for a valid file for ArgumentParser.
"""
if not os.path.isfile(string):
msg = u'{0} is not a file'.format(string)
raise _argparse.ArgumentTypeError(msg)
return string
def gt_zero(string):
"""
Type check for int > 0 for ArgumentParser.
"""
if not int(string) > 0:
msg = u'limit must be > 0'
raise _argparse.ArgumentTypeError(msg)
return int(string)
def isodate(string):
try:
return datetime.strptime(string, '%Y-%m-%d').date()
except ValueError:
msg = u'date input must in the format of yyyy-mm-dd'
raise _argparse.ArgumentTypeError(msg)
| + from __future__ import absolute_import
+
from datetime import datetime
- import argparse
+ import argparse as _argparse
import os
def is_dir(string):
"""
Type check for a valid directory for ArgumentParser.
"""
if not os.path.isdir(string):
msg = '{0} is not a directory'.format(string)
- raise argparse.ArgumentTypeError(msg)
+ raise _argparse.ArgumentTypeError(msg)
? +
return string
def is_file(string):
"""
Type check for a valid file for ArgumentParser.
"""
if not os.path.isfile(string):
msg = u'{0} is not a file'.format(string)
- raise argparse.ArgumentTypeError(msg)
+ raise _argparse.ArgumentTypeError(msg)
? +
return string
def gt_zero(string):
"""
Type check for int > 0 for ArgumentParser.
"""
if not int(string) > 0:
msg = u'limit must be > 0'
- raise argparse.ArgumentTypeError(msg)
+ raise _argparse.ArgumentTypeError(msg)
? +
return int(string)
def isodate(string):
try:
return datetime.strptime(string, '%Y-%m-%d').date()
except ValueError:
msg = u'date input must in the format of yyyy-mm-dd'
- raise argparse.ArgumentTypeError(msg)
+ raise _argparse.ArgumentTypeError(msg)
? +
|
b2155e167b559367bc24ba614f51360793951f12 | mythril/support/source_support.py | mythril/support/source_support.py | from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
self.meta = ""
| from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
| Remove meta from source class (belongs to issue not source) | Remove meta from source class (belongs to issue not source)
| Python | mit | b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril | from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
- self.meta = ""
| Remove meta from source class (belongs to issue not source) | ## Code Before:
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
self.meta = ""
## Instruction:
Remove meta from source class (belongs to issue not source)
## Code After:
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
| from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
- self.meta = "" |
c94f7e5f2c838c3fdd007229175da680de256b04 | tests/configurations/nginx/tests_file_size_limit.py | tests/configurations/nginx/tests_file_size_limit.py |
import subprocess
import nose.tools as nt
from tests import TestPortalAndino
class TestFileSizeLimit(TestPortalAndino.TestPortalAndino):
@classmethod
def setUpClass(cls):
super(TestFileSizeLimit, cls).setUpClass()
def test_nginx_configuration_uses_1024_MB_as_file_size_limit(self):
size_line = subprocess.check_output('docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf | '
'grep client_max_body_size', shell=True).strip()
print subprocess.check_output('docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf',
shell=True).strip()
nt.assert_true('1024' in size_line)
|
import subprocess
import nose.tools as nt
from tests import TestPortalAndino
class TestFileSizeLimit(TestPortalAndino.TestPortalAndino):
@classmethod
def setUpClass(cls):
super(TestFileSizeLimit, cls).setUpClass()
def test_nginx_configuration_uses_1024_MB_as_file_size_limit(self):
size_line = subprocess.check_output(
'docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf | grep client_max_body_size', shell=True)
nt.assert_true('1024' in size_line)
| Revert "Hago un strip del output de subprocess" | Revert "Hago un strip del output de subprocess"
This reverts commit f5f21d78d87be641617a7cb920d0869975175e58.
| Python | mit | datosgobar/portal-andino,datosgobar/portal-andino |
import subprocess
import nose.tools as nt
from tests import TestPortalAndino
class TestFileSizeLimit(TestPortalAndino.TestPortalAndino):
@classmethod
def setUpClass(cls):
super(TestFileSizeLimit, cls).setUpClass()
def test_nginx_configuration_uses_1024_MB_as_file_size_limit(self):
+ size_line = subprocess.check_output(
+ 'docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf | grep client_max_body_size', shell=True)
- size_line = subprocess.check_output('docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf | '
- 'grep client_max_body_size', shell=True).strip()
- print subprocess.check_output('docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf',
- shell=True).strip()
nt.assert_true('1024' in size_line)
| Revert "Hago un strip del output de subprocess" | ## Code Before:
import subprocess
import nose.tools as nt
from tests import TestPortalAndino
class TestFileSizeLimit(TestPortalAndino.TestPortalAndino):
@classmethod
def setUpClass(cls):
super(TestFileSizeLimit, cls).setUpClass()
def test_nginx_configuration_uses_1024_MB_as_file_size_limit(self):
size_line = subprocess.check_output('docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf | '
'grep client_max_body_size', shell=True).strip()
print subprocess.check_output('docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf',
shell=True).strip()
nt.assert_true('1024' in size_line)
## Instruction:
Revert "Hago un strip del output de subprocess"
## Code After:
import subprocess
import nose.tools as nt
from tests import TestPortalAndino
class TestFileSizeLimit(TestPortalAndino.TestPortalAndino):
@classmethod
def setUpClass(cls):
super(TestFileSizeLimit, cls).setUpClass()
def test_nginx_configuration_uses_1024_MB_as_file_size_limit(self):
size_line = subprocess.check_output(
'docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf | grep client_max_body_size', shell=True)
nt.assert_true('1024' in size_line)
|
import subprocess
import nose.tools as nt
from tests import TestPortalAndino
class TestFileSizeLimit(TestPortalAndino.TestPortalAndino):
@classmethod
def setUpClass(cls):
super(TestFileSizeLimit, cls).setUpClass()
def test_nginx_configuration_uses_1024_MB_as_file_size_limit(self):
+ size_line = subprocess.check_output(
+ 'docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf | grep client_max_body_size', shell=True)
- size_line = subprocess.check_output('docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf | '
- 'grep client_max_body_size', shell=True).strip()
- print subprocess.check_output('docker exec -it andino-nginx cat /etc/nginx/conf.d/default.conf',
- shell=True).strip()
nt.assert_true('1024' in size_line) |
e478a70549164bee7351f01c161a8b0ef6f8c1c8 | dashboard/src/api.py | dashboard/src/api.py | import requests
import os
class Api:
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
def add_slash(url):
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
return requests.get(self.url)
def print_error_response(self, response, message_key):
error_message = response.json().get(message_key, "Server does not sent error message")
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
| """Module with class representing common API."""
import requests
import os
class Api:
"""Class representing common API."""
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
"""Set the API endpoint and store the authorization token if provided."""
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
"""Check if the API is available for calls."""
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
@staticmethod
def add_slash(url):
"""Add a slash at end of URL, if the slash is not provided."""
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
"""Use GET method to access API."""
return requests.get(self.url)
def print_error_response(self, response, message_key):
"""Print error message if anything goes wrong."""
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
| Add staticmethod annotation + docstrings to module, class, and all public methods | Add staticmethod annotation + docstrings to module, class, and all public methods
| Python | apache-2.0 | jpopelka/fabric8-analytics-common,jpopelka/fabric8-analytics-common,jpopelka/fabric8-analytics-common,tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common | + """Module with class representing common API."""
import requests
import os
class Api:
+ """Class representing common API."""
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
+ """Set the API endpoint and store the authorization token if provided."""
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
+ """Check if the API is available for calls."""
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
+ @staticmethod
def add_slash(url):
+ """Add a slash at end of URL, if the slash is not provided."""
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
+ """Use GET method to access API."""
return requests.get(self.url)
def print_error_response(self, response, message_key):
- error_message = response.json().get(message_key, "Server does not sent error message")
+ """Print error message if anything goes wrong."""
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
| Add staticmethod annotation + docstrings to module, class, and all public methods | ## Code Before:
import requests
import os
class Api:
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
def add_slash(url):
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
return requests.get(self.url)
def print_error_response(self, response, message_key):
error_message = response.json().get(message_key, "Server does not sent error message")
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
## Instruction:
Add staticmethod annotation + docstrings to module, class, and all public methods
## Code After:
"""Module with class representing common API."""
import requests
import os
class Api:
"""Class representing common API."""
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
"""Set the API endpoint and store the authorization token if provided."""
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
"""Check if the API is available for calls."""
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
@staticmethod
def add_slash(url):
"""Add a slash at end of URL, if the slash is not provided."""
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
"""Use GET method to access API."""
return requests.get(self.url)
def print_error_response(self, response, message_key):
"""Print error message if anything goes wrong."""
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
| + """Module with class representing common API."""
import requests
import os
class Api:
+ """Class representing common API."""
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
+ """Set the API endpoint and store the authorization token if provided."""
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
+ """Check if the API is available for calls."""
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
+ @staticmethod
def add_slash(url):
+ """Add a slash at end of URL, if the slash is not provided."""
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
+ """Use GET method to access API."""
return requests.get(self.url)
def print_error_response(self, response, message_key):
- error_message = response.json().get(message_key, "Server does not sent error message")
+ """Print error message if anything goes wrong."""
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message)) |
fbf2a59d9cf25c3d3a041afa839d0d44f6f385a5 | win_unc/internal/utils.py | win_unc/internal/utils.py |
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
|
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
return None
| Return None explicitly instead of implicitly | Return None explicitly instead of implicitly
| Python | mit | CovenantEyes/py_win_unc,nithinphilips/py_win_unc |
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
+ return None
| Return None explicitly instead of implicitly | ## Code Before:
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
## Instruction:
Return None explicitly instead of implicitly
## Code After:
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
return None
|
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
+ return None |
c6d50c3feed444f8f450c5c140e8470c6897f2bf | societies/models.py | societies/models.py |
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
return 'GuitarSociety(name="{}", link="{}")'.format(self.name, self.link)
|
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
return self.name
def __repr__(self):
return 'GuitarSociety("{}")'.format(self.name)
| Make the Guitar Society __str__ Method a bit more Logical | Make the Guitar Society __str__ Method a bit more Logical
| Python | bsd-3-clause | chrisguitarguy/GuitarSocieties.org,chrisguitarguy/GuitarSocieties.org |
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
- return 'GuitarSociety(name="{}", link="{}")'.format(self.name, self.link)
+ return self.name
+ def __repr__(self):
+ return 'GuitarSociety("{}")'.format(self.name)
+ | Make the Guitar Society __str__ Method a bit more Logical | ## Code Before:
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
return 'GuitarSociety(name="{}", link="{}")'.format(self.name, self.link)
## Instruction:
Make the Guitar Society __str__ Method a bit more Logical
## Code After:
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
return self.name
def __repr__(self):
return 'GuitarSociety("{}")'.format(self.name)
|
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
+ return self.name
+
+ def __repr__(self):
- return 'GuitarSociety(name="{}", link="{}")'.format(self.name, self.link)
? ---------------- -----------
+ return 'GuitarSociety("{}")'.format(self.name) |
9c6739830ea8ccfbe697bc691de001a42f01f9c6 | serial_protocol/test.py | serial_protocol/test.py | import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
ser.write('1')
#ser.write('0' if left >= 0 else '1')
#ser.write(struct.pack("B", abs(left) * 255))
#ser.write('0' if right >= 0 else '1')
#ser.write(struct.pack("B", abs(right) * 255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write('0')
ser.write('0')
ser.write('0')
print('test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
| import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
# Write OpCode
ser.write('1')
# Write Left Motor Direction
if (left >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Left Motor Speed
ser.write(bytes(abs(left * 255)))
# Write Right Motor Direction
if (right >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Right Motor Speed
ser.write(bytes(abs(right * 255)))
# Pad message to 9 bytes
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
print('Test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
| Write each byte at a time in protocol | Write each byte at a time in protocol
| Python | mit | zacharylawrence/ENEE408I-Team-9,zacharylawrence/ENEE408I-Team-9,zacharylawrence/ENEE408I-Team-9 | import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
+ # Write OpCode
ser.write('1')
- #ser.write('0' if left >= 0 else '1')
- #ser.write(struct.pack("B", abs(left) * 255))
- #ser.write('0' if right >= 0 else '1')
- #ser.write(struct.pack("B", abs(right) * 255))
- ser.write('0')
+ # Write Left Motor Direction
+ if (left >= 0):
- ser.write(bytes(255))
+ ser.write(bytes(0))
- ser.write('0')
+ else:
- ser.write(bytes(255))
+ ser.write(bytes(1))
+ # Write Left Motor Speed
+ ser.write(bytes(abs(left * 255)))
- ser.write('0')
- ser.write('0')
- ser.write('0')
- ser.write('0')
+ # Write Right Motor Direction
+ if (right >= 0):
+ ser.write(bytes(0))
+ else:
+ ser.write(bytes(1))
+
+ # Write Right Motor Speed
+ ser.write(bytes(abs(right * 255)))
+
+ # Pad message to 9 bytes
+ ser.write(bytes(0))
+ ser.write(bytes(0))
+ ser.write(bytes(0))
+ ser.write(bytes(0))
+
- print('test')
+ print('Test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
| Write each byte at a time in protocol | ## Code Before:
import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
ser.write('1')
#ser.write('0' if left >= 0 else '1')
#ser.write(struct.pack("B", abs(left) * 255))
#ser.write('0' if right >= 0 else '1')
#ser.write(struct.pack("B", abs(right) * 255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write('0')
ser.write('0')
ser.write('0')
print('test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
## Instruction:
Write each byte at a time in protocol
## Code After:
import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
# Write OpCode
ser.write('1')
# Write Left Motor Direction
if (left >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Left Motor Speed
ser.write(bytes(abs(left * 255)))
# Write Right Motor Direction
if (right >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Right Motor Speed
ser.write(bytes(abs(right * 255)))
# Pad message to 9 bytes
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
print('Test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
| import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
+ # Write OpCode
ser.write('1')
- #ser.write('0' if left >= 0 else '1')
- #ser.write(struct.pack("B", abs(left) * 255))
- #ser.write('0' if right >= 0 else '1')
- #ser.write(struct.pack("B", abs(right) * 255))
- ser.write('0')
+ # Write Left Motor Direction
+ if (left >= 0):
- ser.write(bytes(255))
? ^^^
+ ser.write(bytes(0))
? ++ ^
- ser.write('0')
+ else:
- ser.write(bytes(255))
? ^^^
+ ser.write(bytes(1))
? ++ ^
+ # Write Left Motor Speed
+ ser.write(bytes(abs(left * 255)))
- ser.write('0')
- ser.write('0')
- ser.write('0')
- ser.write('0')
+ # Write Right Motor Direction
+ if (right >= 0):
+ ser.write(bytes(0))
+ else:
+ ser.write(bytes(1))
+
+ # Write Right Motor Speed
+ ser.write(bytes(abs(right * 255)))
+
+ # Pad message to 9 bytes
+ ser.write(bytes(0))
+ ser.write(bytes(0))
+ ser.write(bytes(0))
+ ser.write(bytes(0))
+
- print('test')
? ^
+ print('Test')
? ^
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0) |
d37f91f50dd6c0c3202258daca95ee6ee111688f | pyjswidgets/pyjamas/ui/Focus.oldmoz.py | pyjswidgets/pyjamas/ui/Focus.oldmoz.py |
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
ensureFocusHandler()
return createFocusable0()
|
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
return createFocusable0(ensureFocusHandler())
| Fix for IE 11 (Focus) | Fix for IE 11 (Focus)
IE presents itself as mozilla, so it trips on the bug.
| Python | apache-2.0 | gpitel/pyjs,spaceone/pyjs,lancezlin/pyjs,spaceone/pyjs,lancezlin/pyjs,pombredanne/pyjs,pyjs/pyjs,Hasimir/pyjs,gpitel/pyjs,pyjs/pyjs,spaceone/pyjs,pyjs/pyjs,Hasimir/pyjs,lancezlin/pyjs,pyjs/pyjs,pombredanne/pyjs,gpitel/pyjs,Hasimir/pyjs,gpitel/pyjs,spaceone/pyjs,pombredanne/pyjs,lancezlin/pyjs,pombredanne/pyjs,Hasimir/pyjs |
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
+ return createFocusable0(ensureFocusHandler())
- ensureFocusHandler()
- return createFocusable0()
- | Fix for IE 11 (Focus) | ## Code Before:
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
ensureFocusHandler()
return createFocusable0()
## Instruction:
Fix for IE 11 (Focus)
## Code After:
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
return createFocusable0(ensureFocusHandler())
|
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
+ return createFocusable0(ensureFocusHandler())
- ensureFocusHandler()
- return createFocusable0()
- |
5fe53a31bd7f37f8d9bd4fbe3796c8a0fa85019a | storm/db.py | storm/db.py | import motor
import error
from tornado import gen
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **args):
self.connect()
result = yield motor.Op(getattr(self.db, table).find_one, args)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, args))
callback = args.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
| import motor
import error
from tornado import gen
from bson.objectid import ObjectId
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **kwargs):
self.connect()
if '_id' in kwargs:
kwargs['_id'] = ObjectId(kwargs['_id'])
result = yield motor.Op(getattr(self.db, table).find_one, **kwargs)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, kwargs))
callback = kwargs.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
| Make sure looking up by id works correctly | Make sure looking up by id works correctly
| Python | mit | ccampbell/storm,liujiantong/storm | import motor
import error
from tornado import gen
+ from bson.objectid import ObjectId
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
- def select_one(self, table, **args):
+ def select_one(self, table, **kwargs):
self.connect()
+ if '_id' in kwargs:
+ kwargs['_id'] = ObjectId(kwargs['_id'])
+
- result = yield motor.Op(getattr(self.db, table).find_one, args)
+ result = yield motor.Op(getattr(self.db, table).find_one, **kwargs)
if result is None:
- raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, args))
+ raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, kwargs))
- callback = args.get('callback')
+ callback = kwargs.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
| Make sure looking up by id works correctly | ## Code Before:
import motor
import error
from tornado import gen
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **args):
self.connect()
result = yield motor.Op(getattr(self.db, table).find_one, args)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, args))
callback = args.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
## Instruction:
Make sure looking up by id works correctly
## Code After:
import motor
import error
from tornado import gen
from bson.objectid import ObjectId
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **kwargs):
self.connect()
if '_id' in kwargs:
kwargs['_id'] = ObjectId(kwargs['_id'])
result = yield motor.Op(getattr(self.db, table).find_one, **kwargs)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, kwargs))
callback = kwargs.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
| import motor
import error
from tornado import gen
+ from bson.objectid import ObjectId
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
- def select_one(self, table, **args):
+ def select_one(self, table, **kwargs):
? ++
self.connect()
+ if '_id' in kwargs:
+ kwargs['_id'] = ObjectId(kwargs['_id'])
+
- result = yield motor.Op(getattr(self.db, table).find_one, args)
+ result = yield motor.Op(getattr(self.db, table).find_one, **kwargs)
? ++++
if result is None:
- raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, args))
+ raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, kwargs))
? ++
- callback = args.get('callback')
+ callback = kwargs.get('callback')
? ++
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.