commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
b4e5a284201d6d25607ff54aedcf6082e8a4d621
|
st2client/st2client/models/reactor.py
|
st2client/st2client/models/reactor.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class Sensor(core.Resource):
_plural = 'Sensortypes'
_repr_attributes = ['name', 'pack']
class TriggerType(core.Resource):
_alias = 'Trigger'
_display_name = 'Trigger'
_plural = 'Triggertypes'
_plural_display_name = 'Triggers'
_repr_attributes = ['name', 'pack']
class Rule(core.Resource):
_plural = 'Rules'
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class Sensor(core.Resource):
_plural = 'Sensortypes'
_repr_attributes = ['name', 'pack']
class TriggerType(core.Resource):
_alias = 'Trigger'
_display_name = 'Trigger'
_plural = 'Triggertypes'
_plural_display_name = 'Triggers'
_repr_attributes = ['name', 'pack']
class Trigger(core.Resource):
_alias = 'TriggerSpecification'
_display_name = 'TriggerSpecification'
_plural = 'Triggers'
_plural_display_name = 'Triggers'
_repr_attributes = ['name', 'pack']
class Rule(core.Resource):
_plural = 'Rules'
|
Add Trigger model to client and alias it as TriggerSpecification.
|
Add Trigger model to client and alias it as TriggerSpecification.
|
Python
|
apache-2.0
|
pinterb/st2,peak6/st2,pixelrebel/st2,jtopjian/st2,pixelrebel/st2,alfasin/st2,pinterb/st2,Itxaka/st2,Plexxi/st2,lakshmi-kannan/st2,Itxaka/st2,grengojbo/st2,Plexxi/st2,jtopjian/st2,punalpatel/st2,punalpatel/st2,Plexxi/st2,nzlosh/st2,armab/st2,StackStorm/st2,punalpatel/st2,dennybaa/st2,nzlosh/st2,pixelrebel/st2,peak6/st2,armab/st2,jtopjian/st2,alfasin/st2,emedvedev/st2,tonybaloney/st2,emedvedev/st2,armab/st2,emedvedev/st2,pinterb/st2,dennybaa/st2,tonybaloney/st2,lakshmi-kannan/st2,dennybaa/st2,alfasin/st2,tonybaloney/st2,StackStorm/st2,nzlosh/st2,Plexxi/st2,nzlosh/st2,peak6/st2,Itxaka/st2,grengojbo/st2,lakshmi-kannan/st2,grengojbo/st2,StackStorm/st2,StackStorm/st2
|
0903b18d1e4213cb88aa8cfcd0eb473ae54aa40b
|
shop/models/fields.py
|
shop/models/fields.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
from shop.apps import get_tuple_version
try:
if str(connection.vendor) == 'postgresql':
import psycopg2
psycopg2_version = get_tuple_version(psycopg2.__version__[:5])
with connection.cursor() as cursor:
cursor.execute("SELECT version()")
row = cursor.fetchone()[:17]
postgres_version = get_tuple_version(str(row[0][:17].split(' ')[1]))
# To be able to use the Django version of JSONField, it requires to have
# PostgreSQL ≥ 9.4 and psycopg2 ≥ 2.5.4, otherwise some issues could be faced.
if (psycopg2_version) >= (2, 5, 4) and (postgres_version >= (9, 4)):
from django.contrib.postgres.fields import JSONField as _JSONField
else:
raise ImportError
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField as _JSONField
class JSONField(_JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(JSONField, self).deconstruct()
del kwargs['default']
return name, path, args, kwargs
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.postgresql':
from django.contrib.postgres.fields import JSONField as _JSONField
else:
from jsonfield.fields import JSONField as _JSONField
class JSONField(_JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(JSONField, self).deconstruct()
del kwargs['default']
return name, path, args, kwargs
|
Fix and simplify the JSONfield wrapper code
|
Fix and simplify the JSONfield wrapper code
|
Python
|
bsd-3-clause
|
jrief/django-shop,awesto/django-shop,nimbis/django-shop,khchine5/django-shop,jrief/django-shop,nimbis/django-shop,awesto/django-shop,nimbis/django-shop,khchine5/django-shop,divio/django-shop,nimbis/django-shop,jrief/django-shop,divio/django-shop,awesto/django-shop,divio/django-shop,khchine5/django-shop,khchine5/django-shop,jrief/django-shop
|
458211091f4408136a4eb6e6a06849d93c3ede8a
|
tests/test_convert.py
|
tests/test_convert.py
|
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector2
from utils import vector_likes, vectors
class V(Vector2): pass
@pytest.mark.parametrize('vector_like', vector_likes(), ids=lambda x: type(x).__name__) # type: ignore
@pytest.mark.parametrize('cls', [Vector2, V]) # type: ignore
def test_convert_class(cls, vector_like):
vector = cls.convert(vector_like)
assert isinstance(vector, cls)
assert vector == vector_like
@given(vector=vectors())
def test_convert_tuple(vector: Vector2):
assert vector == tuple(vector) == (vector.x, vector.y)
|
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector2
from utils import vector_likes, vectors
class V(Vector2): pass
@pytest.mark.parametrize('vector_like', vector_likes(), ids=lambda x: type(x).__name__) # type: ignore
@pytest.mark.parametrize('cls', [Vector2, V]) # type: ignore
def test_convert_class(cls, vector_like):
vector = cls.convert(vector_like)
assert isinstance(vector, cls)
assert vector == vector_like
@given(vector=vectors())
def test_convert_tuple(vector: Vector2):
assert vector == tuple(vector) == (vector.x, vector.y)
@given(vector=vectors())
def test_convert_list(vector: Vector2):
assert vector == list(vector) == [vector.x, vector.y]
|
Add a list conversion test
|
tests/convert: Add a list conversion test
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
bd8901c18a6722660e7af742260ae4b8317a064b
|
youtube/tasks.py
|
youtube/tasks.py
|
import subprocess
import os
from pathlib import Path
from invoke import task
@task
def update(ctx):
"""
Update youtube-dl
"""
cmd = ['pipenv', 'update', 'youtube-dl']
subprocess.call(cmd)
@task
def clean(ctx):
"""
Clean up files
"""
import main
def rm(file_):
if file_.exists():
os.remove(file_)
rm(main.json_file)
for file_ in main.download_dir.iterdir():
if file_.name != '.gitkeep':
os.remove(file_)
@task
def playlist(ctx):
"""
Process YouTube playlist
"""
import main
main.process_playlist()
@task
def link(ctx, url):
"""
Process video link
"""
import main
main.process_link(url)
|
import subprocess
import os
from pathlib import Path
from invoke import task
@task
def update(ctx):
"""
Update dependencies such as youtube-dl, etc.
"""
subprocess.call(['pipenv', 'update'])
@task
def clean(ctx):
"""
Clean up files
"""
import main
def rm(file_):
if file_.exists():
os.remove(file_)
rm(main.json_file)
for file_ in main.download_dir.iterdir():
if file_.name != '.gitkeep':
os.remove(file_)
@task
def playlist(ctx):
"""
Process YouTube playlist
"""
import main
main.process_playlist()
@task
def link(ctx, url):
"""
Process video link
"""
import main
main.process_link(url)
|
Update task now updates all dependencies
|
Update task now updates all dependencies
|
Python
|
apache-2.0
|
feihong/chinese-music-processors,feihong/chinese-music-processors
|
532df8a669d7e54125c102ef4821272dc24aab23
|
weasyprint/logger.py
|
weasyprint/logger.py
|
# coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
# Default to logging to stderr.
if not LOGGER.handlers:
LOGGER.addHandler(logging.StreamHandler())
if LOGGER.level == logging.NOTSET:
LOGGER.setLevel(logging.INFO)
|
# coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
# Default to logging to stderr.
if not LOGGER.handlers:
handler = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)s: %(message)s')
handler.setFormatter(formatter)
LOGGER.addHandler(handler)
if LOGGER.level == logging.NOTSET:
LOGGER.setLevel(logging.INFO)
|
Add a better default formatter for logs
|
Add a better default formatter for logs
|
Python
|
bsd-3-clause
|
Kozea/WeasyPrint,Kozea/WeasyPrint
|
14b8a2a689414e65efda9b466db430ed09f777d5
|
panoptes_client/utils.py
|
panoptes_client/utils.py
|
from __future__ import absolute_import, division, print_function
from builtins import range
import functools
ITERABLE_TYPES = (
list,
set,
tuple,
)
try:
from numpy import ndarray
ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,)
except ImportError:
pass
def isiterable(v):
return isinstance(v, ITERABLE_TYPES)
def batchable(func=None, batch_size=100):
def do_batch(*args, **kwargs):
_batch_size = kwargs.pop('batch_size', batch_size)
if isiterable(args[0]):
_self = None
to_batch = args[0]
args = args[1:]
else:
_self = args[0]
to_batch = args[1]
args = args[2:]
if not isiterable(to_batch):
to_batch = [to_batch]
if isinstance(to_batch, set):
to_batch = list(to_batch)
for _batch in [
to_batch[i:i+_batch_size]
for i in range(0, len(to_batch), _batch_size)
]:
if _self is None:
func(_batch, *args, **kwargs)
else:
func(_self, _batch, *args, **kwargs)
if func is None:
return functools.partial(batchable, batch_size=batch_size)
return do_batch
|
from __future__ import absolute_import, division, print_function
from builtins import range
import functools
ITERABLE_TYPES = (
list,
set,
tuple,
)
MISSING_POSITIONAL_ERR = 'Required positional argument (pos 1) not found'
try:
from numpy import ndarray
ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,)
except ImportError:
pass
def isiterable(v):
return isinstance(v, ITERABLE_TYPES)
def batchable(func=None, batch_size=100):
def do_batch(*args, **kwargs):
if len(args) == 0:
raise TypeError(MISSING_POSITIONAL_ERR)
_batch_size = kwargs.pop('batch_size', batch_size)
if isiterable(args[0]):
_self = None
to_batch = args[0]
args = args[1:]
else:
if len(args) == 1:
raise TypeError(MISSING_POSITIONAL_ERR)
_self = args[0]
to_batch = args[1]
args = args[2:]
if not isiterable(to_batch):
to_batch = [to_batch]
if isinstance(to_batch, set):
to_batch = list(to_batch)
for _batch in [
to_batch[i:i+_batch_size]
for i in range(0, len(to_batch), _batch_size)
]:
if _self is None:
func(_batch, *args, **kwargs)
else:
func(_self, _batch, *args, **kwargs)
if func is None:
return functools.partial(batchable, batch_size=batch_size)
return do_batch
|
Raise TypeError if positional batchable argument is missing
|
Raise TypeError if positional batchable argument is missing
e.g. if it's erroneously been passed as a named argument.
|
Python
|
apache-2.0
|
zooniverse/panoptes-python-client
|
6049a916ea3adfe4ef8a7ae9dbfc918b69907ef4
|
OnionLauncher/main.py
|
OnionLauncher/main.py
|
import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
|
import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
buttons = {
self.tbAdd: self.addRow,
self.tbRemove: self.removeRow,
self.btnSwitchTor: self.switchTor,
}
self.evAddClick(buttons)
def evAddClick(self, obj_dict):
for obj in obj_dict:
obj.clicked.connect(obj_dict[obj])
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
def switchTor(self):
pass
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
|
Put mouse clicks in it's own dictionary
|
Put mouse clicks in it's own dictionary
|
Python
|
bsd-2-clause
|
neelchauhan/OnionLauncher
|
8a827d3e86cf2f6b9d36812e7058560ae120d4b2
|
tests/test_watson.py
|
tests/test_watson.py
|
from pywatson.watson import Watson
class TestWatson:
def test_init(self, config):
watson = Watson(url=config['url'], username=config['username'], password=config['password'])
|
from pywatson.answer.answer import Answer
from pywatson.watson import Watson
class TestWatson:
def test_ask_question_basic(self, watson):
answer = watson.ask_question('What is the Labour Code?')
assert type(answer) is Answer
|
Add failing test for ask_question
|
Add failing test for ask_question
|
Python
|
mit
|
sherlocke/pywatson
|
de324cc798da8694bab510efd51de4bfda528df7
|
zinnia/views/entries.py
|
zinnia/views/entries.py
|
"""Views for Zinnia entries"""
from django.views.generic.dates import BaseDateDetailView
from zinnia.models.entry import Entry
from zinnia.views.mixins.archives import ArchiveMixin
from zinnia.views.mixins.entry_protection import EntryProtectionMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin
class EntryDateDetail(ArchiveMixin,
EntryArchiveTemplateResponseMixin,
CallableQuerysetMixin,
BaseDateDetailView):
"""
Mixin combinating:
- ArchiveMixin configuration centralizing conf for archive views
- EntryArchiveTemplateResponseMixin to provide a
custom templates depending on the date
- BaseDateDetailView to retrieve the entry with date and slug
- CallableQueryMixin to defer the execution of the *queryset*
property when imported
"""
queryset = Entry.published.on_site
class EntryDetail(EntryProtectionMixin, EntryDateDetail):
"""
Detailled view archive view for an Entry
with password and login protections.
"""
|
"""Views for Zinnia entries"""
from django.views.generic.dates import BaseDateDetailView
from zinnia.models.entry import Entry
from zinnia.views.mixins.archives import ArchiveMixin
from zinnia.views.mixins.entry_preview import EntryPreviewMixin
from zinnia.views.mixins.entry_protection import EntryProtectionMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin
class EntryDateDetail(ArchiveMixin,
EntryArchiveTemplateResponseMixin,
CallableQuerysetMixin,
BaseDateDetailView):
"""
Mixin combinating:
- ArchiveMixin configuration centralizing conf for archive views
- EntryArchiveTemplateResponseMixin to provide a
custom templates depending on the date
- BaseDateDetailView to retrieve the entry with date and slug
- CallableQueryMixin to defer the execution of the *queryset*
property when imported
"""
queryset = Entry.published.on_site
class EntryDetail(EntryPreviewMixin,
EntryProtectionMixin,
EntryDateDetail):
"""
Detailled archive view for an Entry with password
and login protections and restricted preview.
"""
|
Implement the EntryPreviewMixin in the EntryDetail view
|
Implement the EntryPreviewMixin in the EntryDetail view
|
Python
|
bsd-3-clause
|
Maplecroft/django-blog-zinnia,ZuluPro/django-blog-zinnia,petecummings/django-blog-zinnia,Maplecroft/django-blog-zinnia,ZuluPro/django-blog-zinnia,petecummings/django-blog-zinnia,petecummings/django-blog-zinnia,aorzh/django-blog-zinnia,extertioner/django-blog-zinnia,Maplecroft/django-blog-zinnia,ghachey/django-blog-zinnia,bywbilly/django-blog-zinnia,ghachey/django-blog-zinnia,dapeng0802/django-blog-zinnia,Zopieux/django-blog-zinnia,dapeng0802/django-blog-zinnia,marctc/django-blog-zinnia,Fantomas42/django-blog-zinnia,aorzh/django-blog-zinnia,extertioner/django-blog-zinnia,Zopieux/django-blog-zinnia,bywbilly/django-blog-zinnia,Zopieux/django-blog-zinnia,dapeng0802/django-blog-zinnia,Fantomas42/django-blog-zinnia,1844144/django-blog-zinnia,Fantomas42/django-blog-zinnia,extertioner/django-blog-zinnia,marctc/django-blog-zinnia,aorzh/django-blog-zinnia,marctc/django-blog-zinnia,ghachey/django-blog-zinnia,bywbilly/django-blog-zinnia,ZuluPro/django-blog-zinnia,1844144/django-blog-zinnia,1844144/django-blog-zinnia
|
e93a321e3d137fb21a42d0e0bfd257a537be05d3
|
diy/parerga/config.py
|
diy/parerga/config.py
|
# -*- set coding: utf-8 -*-
import os
# directories constants
PARERGA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
PARERGA_ENTRY_DIR = os.path.join(PARERGA_ROOT_DIR, "p")
PARERGA_STATIC_DIR = os.path.join(PARERGA_ROOT_DIR, "static")
PARERGA_TEMPLATE_DIR = os.path.join(PARERGA_ROOT_DIR, "templates")
# database location
PARERGA_DB = os.path.join(PARERGA_ROOT_DIR, 'static', 'parerga.db')
|
# -*- set coding: utf-8 -*-
import os
# directories constants
PARERGA_ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PARERGA_ENTRY_DIR = os.path.join(PARERGA_ROOT_DIR, "p")
PARERGA_STATIC_DIR = os.path.join(PARERGA_ROOT_DIR, "static")
PARERGA_TEMPLATE_DIR = os.path.join(PARERGA_ROOT_DIR, "templates")
# database location
PARERGA_DB = os.path.join(PARERGA_ROOT_DIR, 'static', 'parerga.db')
|
Update path vars for the new source location
|
Update path vars for the new source location
|
Python
|
bsd-3-clause
|
nadirs/parerga,nadirs/parerga
|
624d6e4fc5455720badf4315e06f423eb60411ab
|
scripts/init_tree.py
|
scripts/init_tree.py
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/lazy.sh', 'deps/lazy.sh')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
#shutil.copyfile('src/scripts/source_deps.sh', 'build/source_deps.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
Update to copy new scripts
|
Update to copy new scripts
|
Python
|
bsd-3-clause
|
lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123
|
0338f8c66f14d6dbf43a2583ba17a8ae7d690466
|
apps/survey/urls.py
|
apps/survey/urls.py
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^group_management/$', views.group_management, name='group_management'),
#url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
#url(r'^survey_data/(?P<survey_shortname>.+)/$', views.survey_data, name='survey_data'),
#url(r'intake/$', views.survey_intake, name='survey_intake'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
Modify call to personal surveis
|
Modify call to personal surveis
|
Python
|
agpl-3.0
|
chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork
|
2724b4dd7ed350baeae0a8e0ef53475f40b1208b
|
project_generator/tools/makearmclang.py
|
project_generator/tools/makearmclang.py
|
# Copyright 2020 Chris Reed
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
import logging
from .makefile import MakefileTool
logger = logging.getLogger('progen.tools.armclang')
class MakefileArmclang(MakefileTool):
def __init__(self, workspace, env_settings):
MakefileTool.__init__(self, workspace, env_settings, logger)
@staticmethod
def get_toolnames():
return ['make_armclang']
@staticmethod
def get_toolchain():
return 'armclang'
def export_project(self):
""" Processes misc options specific for AC6, and run generator """
generated_projects = deepcopy(self.generated_projects)
self.process_data_for_makefile(self.workspace)
generated_projects['path'], generated_projects['files']['makefile'] = \
self.gen_file_jinja('makefile_armclang.tmpl', self.workspace, 'Makefile',
self.workspace['output_dir']['path'])
return generated_projects
|
# Copyright 2020 Chris Reed
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
import logging
from .makefile import MakefileTool
logger = logging.getLogger('progen.tools.armclang')
class MakefileArmclang(MakefileTool):
def __init__(self, workspace, env_settings):
MakefileTool.__init__(self, workspace, env_settings, logger)
# enable preprocessing linker files for GCC ARM
self.workspace['preprocess_linker_file'] = True
self.workspace['linker_extension'] = '.sct'
@staticmethod
def get_toolnames():
return ['make_armclang']
@staticmethod
def get_toolchain():
return 'armclang'
def export_project(self):
""" Processes misc options specific for AC6, and run generator """
generated_projects = deepcopy(self.generated_projects)
self.process_data_for_makefile(self.workspace)
generated_projects['path'], generated_projects['files']['makefile'] = \
self.gen_file_jinja('makefile_armclang.tmpl', self.workspace, 'Makefile',
self.workspace['output_dir']['path'])
return generated_projects
|
Enable linker preprocessing for armclang.
|
Enable linker preprocessing for armclang.
This should be temporary; for some reason the .sct cpp shebang isn't working for me. Same result in any case.
|
Python
|
apache-2.0
|
project-generator/project_generator
|
9ae5ea3876fae6ef0bc092d87c71d9ea86040cf7
|
InvenTree/company/api.py
|
InvenTree/company/api.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
from rest_framework import generics, permissions
from django.conf.urls import url
from .models import Company
from .serializers import CompanySerializer
class CompanyList(generics.ListCreateAPIView):
serializer_class = CompanySerializer
queryset = Company.objects.all()
permission_classes = [
permissions.IsAuthenticatedOrReadOnly,
]
filter_backends = [
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
]
filter_fields = [
'name',
'is_customer',
'is_supplier',
]
search_fields = [
'name',
'description',
]
ordering_fields = [
'name',
]
ordering = 'name'
company_api_urls = [
url(r'^.*$', CompanyList.as_view(), name='api-company-list'),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
from rest_framework import generics, permissions
from django.conf.urls import url
from .models import Company
from .serializers import CompanySerializer
class CompanyList(generics.ListCreateAPIView):
serializer_class = CompanySerializer
queryset = Company.objects.all()
permission_classes = [
permissions.IsAuthenticatedOrReadOnly,
]
filter_backends = [
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
]
filter_fields = [
'name',
'is_customer',
'is_supplier',
]
search_fields = [
'name',
'description',
]
ordering_fields = [
'name',
]
ordering = 'name'
class CompanyDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Company.objects.all()
serializer_class = CompanySerializer
permission_classes = [
permissions.IsAuthenticatedOrReadOnly,
]
company_api_urls = [
url(r'^(?P<pk>\d+)/?', CompanyDetail.as_view(), name='api-company-detail'),
url(r'^.*$', CompanyList.as_view(), name='api-company-list'),
]
|
Add RUD endpoint for Company
|
Add RUD endpoint for Company
|
Python
|
mit
|
SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree
|
a8818e2058fdfaec7f283a5115619d42d23b7dde
|
anchorhub/builtin/github/writer.py
|
anchorhub/builtin/github/writer.py
|
"""
File that initializes a Writer object designed for GitHub style markdown files.
"""
from anchorhub.writer import Writer
from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \
MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy
import anchorhub.builtin.github.switches as ghswitches
def make_github_markdown_writer(opts):
"""
Creates a Writer object used for parsing and writing Markdown files with
a GitHub style anchor transformation
:param opts:
:return: A Writer object designed for parsing, modifying, and writing
AnchorHub tags to converted anchors in Markdown files using GitHub style
anchors
"""
assert hasattr(opts, 'wrapper_regex')
atx = MarkdownATXWriterStrategy(opts)
inline = MarkdownInlineLinkWriterStrategy(opts)
code_block_switch = ghswitches.code_block_switch
strategies = [atx, inline]
switches = [code_block_switch]
return Writer(strategies, switches=switches)
|
"""
File that initializes a Writer object designed for GitHub style markdown files.
"""
from anchorhub.writer import Writer
from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \
MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy
import anchorhub.builtin.github.switches as ghswitches
def make_github_markdown_writer(opts):
"""
Creates a Writer object used for parsing and writing Markdown files with
a GitHub style anchor transformation
:param opts:
:return: A Writer object designed for parsing, modifying, and writing
AnchorHub tags to converted anchors in Markdown files using GitHub style
anchors
"""
assert hasattr(opts, 'wrapper_regex')
atx = MarkdownATXWriterStrategy(opts)
setext = MarkdownSetextWriterStrategy(opts)
inline = MarkdownInlineLinkWriterStrategy(opts)
code_block_switch = ghswitches.code_block_switch
strategies = [atx, setext, inline]
switches = [code_block_switch]
return Writer(strategies, switches=switches)
|
Use Setext strategy in GitHub built in Writer
|
Use Setext strategy in GitHub built in Writer
|
Python
|
apache-2.0
|
samjabrahams/anchorhub
|
c154d79ba13d95f3240efd9eb4725cf9fc16060f
|
forms.py
|
forms.py
|
from flask_wtf import Form
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email
class Login(Form):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email
class Login(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
|
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
|
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
|
Python
|
mit
|
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
|
ce95e50b7cb3ef9bbabddb033352aacb96b9237a
|
pywikibot/families/wikivoyage_family.py
|
pywikibot/families/wikivoyage_family.py
|
# -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2016
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
# The new wikivoyage family that is hosted at wikimedia
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
def __init__(self):
"""Constructor."""
self.languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'ru', 'pl', 'nl', 'pt', 'es', 'he',
'zh', 'vi', 'sv', 'el', 'ro', 'uk',
]
super(Family, self).__init__()
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation
self.cross_allowed = ['es', 'ru', ]
|
# -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2016
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
__version__ = '$Id$'
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
def __init__(self):
"""Constructor."""
self.languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'ru', 'pl', 'nl', 'pt', 'fi', 'es',
'zh', 'he', 'vi', 'sv', 'el', 'ro', 'uk',
]
super(Family, self).__init__()
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation
self.cross_allowed = ['es', 'ru', ]
|
Add fi:wikivoyage and sort by current article count
|
Add fi:wikivoyage and sort by current article count
Fix also pycodestyle (former PEP8) E402 problem
Bug: T153470
Change-Id: Id9bc980c7a9cfb21063597a3d5eae11c31d8040c
|
Python
|
mit
|
Darkdadaah/pywikibot-core,magul/pywikibot-core,jayvdb/pywikibot-core,hasteur/g13bot_tools_new,happy5214/pywikibot-core,magul/pywikibot-core,happy5214/pywikibot-core,Darkdadaah/pywikibot-core,npdoty/pywikibot,wikimedia/pywikibot-core,PersianWikipedia/pywikibot-core,hasteur/g13bot_tools_new,jayvdb/pywikibot-core,hasteur/g13bot_tools_new,npdoty/pywikibot,wikimedia/pywikibot-core
|
9cc15bc4a7ed8efb82071fa19e9d1ada8771a87d
|
app/soc/views/helper/decorators.py
|
app/soc/views/helper/decorators.py
|
#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(request, *args, **kwds):
try:
return func(request, *args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper
|
#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views decorators.
"""
__authors__ = [
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import logging
from functools import wraps
from google.appengine.runtime import DeadlineExceededError
from django import http
def view(func):
"""Decorator that insists that exceptions are handled by view."""
@wraps(func)
def view_wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except DeadlineExceededError:
logging.exception('DeadlineExceededError')
return http.HttpResponse('DeadlineExceededError')
except MemoryError:
logging.exception('MemoryError')
return http.HttpResponse('MemoryError')
except AssertionError:
logging.exception('AssertionError')
return http.HttpResponse('AssertionError')
return view_wrapper
|
Remove not needed request argument in view decorator.
|
Remove not needed request argument in view decorator.
Patch by: Pawel Solyga
Review by: to-be-reviewed
|
Python
|
apache-2.0
|
MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging
|
38216f9d1b875c31b97c80bb9217557e67c92ff3
|
spicedham/backend.py
|
spicedham/backend.py
|
class BaseBackend(object):
"""
A base class for backend plugins.
"""
def __init__(self, config):
pass
def reset(self):
"""
Resets the training data to a blank slate.
"""
raise NotImplementedError()
def get_key(self, classifier, key, default=None):
"""
Gets the value held by the classifier, key composite key.
If it doesn't exist, return default.
"""
raise NotImplementedError()
def get_key_list(self, classifier, keys, default=None):
"""
Given a list of key, classifier pairs get all values.
If key, classifier doesn't exist, return default.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.get_key(classifier, key, default)
for key in keys]
def set_key_list(self, classifier, key_value_pairs):
"""
Given a list of pairs of key, value and a classifier set them all.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.set_key(classifier, key, value)
for key, value in key_value_pairs]
def set_key(self, classifier, key, value):
"""
Set the value held by the classifier, key composite key.
"""
raise NotImplementedError()
|
class BaseBackend(object):
"""
A base class for backend plugins.
"""
def __init__(self, config):
pass
def reset(self):
"""
Resets the training data to a blank slate.
"""
raise NotImplementedError()
def get_key(self, classification_type, classifier, key, default=None):
"""
Gets the value held by the classifier, key composite key.
If it doesn't exist, return default.
"""
raise NotImplementedError()
def get_key_list(self, classification_type, classifier, keys, default=None):
"""
Given a list of key, classifier pairs get all values.
If key, classifier doesn't exist, return default.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.get_key(classification_type, classifier, key, default)
for classifier, key in izip(repeat(classifier), keys)]
def set_key_list(self, classification_type, classifier, key_value_pairs):
"""
Given a list of pairs of key, value and a classifier set them all.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.set_key(classification_type, classifier, key, value)
for classifier, key, value
in izip(repeat(classifier), key_value_pairs)]
def set_key(self, classification_type, classifier, key, value):
"""
Set the value held by the classifier, key composite key.
"""
raise NotImplementedError()
|
Add classifier type to the base class
|
Add classifier type to the base class
|
Python
|
mpl-2.0
|
mozilla/spicedham,mozilla/spicedham
|
ba2f2d7e53f0ffc58c882d78f1b8bc9a468eb164
|
predicates.py
|
predicates.py
|
class OneOf:
def __init__(self, members):
self.members = members
def __call__(self, candidate):
if candidate in self.members:
return True
return "%s not in %s" % (candidate, self.members)
def __repr__(self):
return "one of %s" % ', '.join(self.members)
def oneof(*members):
return OneOf(members)
class InRange:
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, candidate):
if self.start <= candidate <= self.end:
return True
return "%s not between %s and %s" % (candidate, self.start, self.end)
def __repr__(self):
return "between %s and %s" % (self.start, self.end)
def inrange(start, end):
return InRange(start, end)
|
class OneOf:
def __init__(self, members):
self.members = members
def __call__(self, candidate):
if candidate in self.members:
return True
return "%s not in %s" % (candidate, self.members)
def __repr__(self):
return "one of %s" % ', '.join(map(repr, self.members))
def oneof(*members):
return OneOf(members)
class InRange:
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, candidate):
if self.start <= candidate <= self.end:
return True
return "%s not between %s and %s" % (candidate, self.start, self.end)
def __repr__(self):
return "between %s and %s" % (self.start, self.end)
def inrange(start, end):
return InRange(start, end)
|
Fix problem rendering oneof() predicate when the members aren't strings
|
Fix problem rendering oneof() predicate when the members aren't strings
|
Python
|
mit
|
mrozekma/pytypecheck
|
7955e777d6ba3bbbd104bd3916f131ab7fa8f8b5
|
asyncmongo/__init__.py
|
asyncmongo/__init__.py
|
#!/bin/env python
#
# Copyright 2010 bit.ly
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
AsyncMongo is an asynchronous library for accessing mongo
http://github.com/bitly/asyncmongo
"""
try:
import bson
except ImportError:
raise ImportError("bson library not installed. Install pymongo >= 1.9 https://github.com/mongodb/mongo-python-driver")
# also update in setup.py
version = "1.3"
version_info = (1, 3)
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
GEO2D = "2d"
"""Index specifier for a 2-dimensional `geospatial index`"""
from errors import (Error, InterfaceError, AuthenticationError, DatabaseError, RSConnectionError,
DataError, IntegrityError, ProgrammingError, NotSupportedError)
from client import Client
|
#!/bin/env python
#
# Copyright 2010 bit.ly
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
AsyncMongo is an asynchronous library for accessing mongo
http://github.com/bitly/asyncmongo
"""
try:
import bson
except ImportError:
raise ImportError("bson library not installed. Install pymongo >= 1.9 https://github.com/mongodb/mongo-python-driver")
# also update in setup.py
version = "1.3"
version_info = (1, 3)
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
GEO2D = "2d"
"""Index specifier for a 2-dimensional `geospatial index`"""
TEXT = '{ $meta: "textScore" }'
"""TEXT Index sort order."""
from errors import (Error, InterfaceError, AuthenticationError, DatabaseError, RSConnectionError,
DataError, IntegrityError, ProgrammingError, NotSupportedError)
from client import Client
|
Support Sort Order For TEXT Index
|
Support Sort Order For TEXT Index
|
Python
|
apache-2.0
|
RealGeeks/asyncmongo
|
26efd98c88a627f76ebd0865053353eb7a30e3bb
|
.glerbl/repo_conf.py
|
.glerbl/repo_conf.py
|
checks = {
'pre-commit': [
# BEFORE_COMMIT in the root of the working tree can be used as
# reminder to do something before the next commit.
"no_before_commit",
# We only allow ASCII filenames.
"no_non_ascii_filenames",
# We don't allow trailing whitespaces.
"no_trailing_whitespace",
# Python files must conform to PEP8
"python_pep8",
# Python files must not have trailing semicolons
"python_no_trailing_semicolon"
]
}
|
import sys
import os
dirname = os.path.dirname(__file__)
python_path = os.path.join(os.path.dirname(dirname), "selenium_test", "lib")
if "PYTHONPATH" not in os.environ:
os.environ["PYTHONPATH"] = python_path
else:
os.environ["PYTHONPATH"] = python_path + ":" + os.environ["PYTHONPATH"]
checks = {
'pre-commit': [
# BEFORE_COMMIT in the root of the working tree can be used as
# reminder to do something before the next commit.
"no_before_commit",
# We only allow ASCII filenames.
"no_non_ascii_filenames",
# We don't allow trailing whitespaces.
"no_trailing_whitespace",
# Python files must conform to PEP8
"python_pep8",
# Python files must not have trailing semicolons
"python_no_trailing_semicolon"
]
}
|
Modify PYTHONPATH so that pylint is able to find wedutil.
|
Modify PYTHONPATH so that pylint is able to find wedutil.
|
Python
|
mpl-2.0
|
mangalam-research/wed,slattery/wed,lddubeau/wed,slattery/wed,mangalam-research/wed,slattery/wed,mangalam-research/wed,lddubeau/wed,mangalam-research/wed,lddubeau/wed,lddubeau/wed
|
7608d0e89781f70fcb49e7dc3ee5cd57a094f18c
|
rx/__init__.py
|
rx/__init__.py
|
from .observable import Observable
from .anonymousobservable import AnonymousObservable
from .observer import Observer
from . import checkedobserver
from . import linq
from . import backpressure
try:
from asyncio import Future
except ImportError:
Future = None
# Rx configuration dictionary
config = {
"Future" : Future
}
|
from threading import Lock
from .observable import Observable
from .anonymousobservable import AnonymousObservable
from .observer import Observer
from . import checkedobserver
from . import linq
from . import backpressure
try:
from asyncio import Future
except ImportError:
Future = None
# Rx configuration dictionary
config = {
"Future" : Future,
"Lock" : Lock
}
|
Make it possible to set custom Lock
|
Make it possible to set custom Lock
|
Python
|
mit
|
ReactiveX/RxPY,ReactiveX/RxPY,dbrattli/RxPY
|
0aa61fb32df9ae3ef9c465f4b246edf04897cd14
|
staticfiles/views.py
|
staticfiles/views.py
|
"""
Views and functions for serving static files. These are only to be used during
development, and SHOULD NOT be used in a production setting.
"""
from django.views.static import serve as django_serve
from staticfiles.resolvers import resolve
def serve(request, path, show_indexes=False):
"""
Serve static files from locations inferred from INSTALLED_APPS and
STATICFILES_DIRS.
To use, put a URL pattern such as::
(r'^(?P<path>.*)$', 'staticfiles.views.serve')
in your URLconf. You may also set ``show_indexes`` to ``True`` if you'd
like to serve a basic index of the directory. This index view will use the
template hardcoded below, but if you'd like to override it, you can create
a template called ``static/directory_index``.
"""
return django_serve(request, path='', document_root=resolve(path),
show_indexes=show_indexes)
|
"""
Views and functions for serving static files. These are only to be used during
development, and SHOULD NOT be used in a production setting.
"""
from django import http
from django.views.static import serve as django_serve
from staticfiles.resolvers import resolve
def serve(request, path, show_indexes=False):
"""
Serve static files from locations inferred from INSTALLED_APPS and
STATICFILES_DIRS.
To use, put a URL pattern such as::
(r'^(?P<path>.*)$', 'staticfiles.views.serve')
in your URLconf. You may also set ``show_indexes`` to ``True`` if you'd
like to serve a basic index of the directory. This index view will use the
template hardcoded below, but if you'd like to override it, you can create
a template called ``static/directory_index``.
"""
absolute_path = resolve(path)
if not absolute_path:
raise http.Http404('%r could not be resolved to a static file.' % path)
return django_serve(request, path='', document_root=absolute_path,
show_indexes=show_indexes)
|
Make the staticfiles serve view raise a 404 for paths which could not be resolved.
|
Make the staticfiles serve view raise a 404 for paths which could not be resolved.
|
Python
|
bsd-3-clause
|
tusbar/django-staticfiles,jezdez-archive/django-staticfiles,tusbar/django-staticfiles
|
e640ed3770cd3c3dbab90866a77449d17a633704
|
wcsaxes/wcs_utils.py
|
wcsaxes/wcs_utils.py
|
# Adapted from Astropy core package until 1.0 is released
#
# Copyright (c) 2011-2014, Astropy Developers
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the Astropy Team nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import numpy as np
|
import numpy as np
|
Remove old LICENSE that was there for astropy-ported code
|
Remove old LICENSE that was there for astropy-ported code
|
Python
|
bsd-3-clause
|
stargaser/astropy,stargaser/astropy,saimn/astropy,astropy/astropy,DougBurke/astropy,aleksandr-bakanov/astropy,pllim/astropy,mhvk/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,bsipocz/astropy,tbabej/astropy,StuartLittlefair/astropy,AustereCuriosity/astropy,larrybradley/astropy,larrybradley/astropy,joergdietrich/astropy,saimn/astropy,joergdietrich/astropy,bsipocz/astropy,tbabej/astropy,kelle/astropy,MSeifert04/astropy,larrybradley/astropy,bsipocz/astropy,astropy/astropy,joergdietrich/astropy,kelle/astropy,kelle/astropy,stargaser/astropy,astropy/astropy,lpsinger/astropy,pllim/astropy,lpsinger/astropy,tbabej/astropy,mhvk/astropy,MSeifert04/astropy,funbaker/astropy,MSeifert04/astropy,mhvk/astropy,saimn/astropy,StuartLittlefair/astropy,astropy/astropy,aleksandr-bakanov/astropy,DougBurke/astropy,astropy/astropy,AustereCuriosity/astropy,pllim/astropy,larrybradley/astropy,larrybradley/astropy,funbaker/astropy,DougBurke/astropy,stargaser/astropy,mhvk/astropy,kelle/astropy,lpsinger/astropy,tbabej/astropy,dhomeier/astropy,pllim/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,funbaker/astropy,dhomeier/astropy,StuartLittlefair/astropy,tbabej/astropy,lpsinger/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,funbaker/astropy,pllim/astropy,AustereCuriosity/astropy,dhomeier/astropy,DougBurke/astropy,saimn/astropy,joergdietrich/astropy,bsipocz/astropy,kelle/astropy,saimn/astropy,mhvk/astropy,dhomeier/astropy,lpsinger/astropy,dhomeier/astropy,AustereCuriosity/astropy
|
979c56f882178ce49194850bd9e78c9dea4692dd
|
chardet/__init__.py
|
chardet/__init__.py
|
######################## BEGIN LICENSE BLOCK ########################
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import PY2, PY3
from .universaldetector import UniversalDetector
from .version import __version__, VERSION
def detect(byte_str):
"""
Detect the encoding of the given byte string.
:param byte_str: The byte sequence to examine.
:type byte_str: ``bytes`` or ``bytearray``
"""
if not isinstance(byte_str, bytearray):
if not isinstance(byte_str, bytes):
raise TypeError('Expected object of type bytes or bytearray, got: '
'{0}'.format(type(byte_str)))
else:
byte_str = bytearray(byte_str)
detector = UniversalDetector()
detector.feed(byte_str)
detector.close()
return detector.result
|
######################## BEGIN LICENSE BLOCK ########################
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import PY2, PY3
from .universaldetector import UniversalDetector
from .version import __version__, VERSION
def detect(byte_str):
"""
Detect the encoding of the given byte string.
:param byte_str: The byte sequence to examine.
:type byte_str: ``bytes`` or ``bytearray``
"""
if not isinstance(byte_str, bytearray):
if not isinstance(byte_str, bytes):
raise TypeError('Expected object of type bytes or bytearray, got: '
'{0}'.format(type(byte_str)))
else:
byte_str = bytearray(byte_str)
detector = UniversalDetector()
detector.feed(byte_str)
return detector.close()
|
Remove unnecessary line from detect
|
Remove unnecessary line from detect
|
Python
|
lgpl-2.1
|
ddboline/chardet,chardet/chardet,chardet/chardet,ddboline/chardet
|
e0989ff4c2292d0f2d053065bfa71124a3705559
|
jarn/mkrelease/colors.py
|
jarn/mkrelease/colors.py
|
import os
import functools
import blessed
def color(func):
functools.wraps(func)
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
return func(string)
return wrapper
term = blessed.Terminal()
bold = color(term.bold)
blue = color(term.bold_blue)
green = color(term.bold_green)
red = color(term.bold_red)
|
import os
import functools
import blessed
def color(func):
assignments = functools.WRAPPER_ASSIGNMENTS
if not hasattr(func, '__name__'):
assignments = [x for x in assignments if x != '__name__']
@functools.wraps(func, assignments)
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
return func(string)
return wrapper
term = blessed.Terminal()
bold = color(term.bold)
blue = color(term.bold_blue)
green = color(term.bold_green)
red = color(term.bold_red)
|
Fix wrapping in color decorator.
|
Fix wrapping in color decorator.
|
Python
|
bsd-2-clause
|
Jarn/jarn.mkrelease
|
9ddc63eb0e1e3612ac4a1ea5b95e405ca0915b52
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="sysops-api",
version="1.0",
description="LinkedIn Redis / Cfengine API",
author = "Mike Svoboda",
author_email = "msvoboda@linkedin.com",
py_modules=['CacheExtractor', 'RedisFinder'],
data_files=[('/usr/local/bin', ['./scripts/extract_sysops_cache.py']),
('/usr/local/bin', ['./scripts/extract_sysops_api_to_disk.py']),
('/usr/local/bin', ['./scripts/extract_sysctl_live_vs_persistant_entries.py']),
('/usr/local/bin', ['./scripts/extract_user_account_access.py']),
('/usr/local/bin', ['./scripts/extract_user_sudo_privileges.py'])],
package_dir={'': 'src'},
packages = ['seco'],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="sysops-api",
version="1.0",
description="LinkedIn Redis / Cfengine API",
author="Mike Svoboda",
author_email="msvoboda@linkedin.com",
py_modules=['CacheExtractor', 'RedisFinder'],
scripts=['scripts/extract_sysops_cache.py',
'scripts/extract_sysops_api_to_disk.py',
'scripts/extract_sysctl_live_vs_persistant_entries.py',
'scripts/extract_user_account_access.py',
'scripts/extract_user_sudo_privileges.py'],
package_dir={'': 'src'},
packages=['seco'],
)
|
Install scripts properly rather than as datafiles
|
Install scripts properly rather than as datafiles
- also fix whitespace
|
Python
|
apache-2.0
|
linkedin/sysops-api,linkedin/sysops-api,slietz/sysops-api,slietz/sysops-api
|
2727fccdb3672e1c7b28e4ba94ec743b53298f26
|
src/main.py
|
src/main.py
|
'''
Created on Aug 12, 2017
@author: Aditya
This is the main file and will import other modules/codes written for python tkinter demonstration
'''
import program1 as p1
import program2 as p2
import program3 as p3
import program4 as p4
import program5 as p5
import program6 as p6
import program7 as p7
import program8 as p8
import program9 as p9
import program10 as p10
import program11 as p11
import program12 as p12
def main():
p1.sayhello()
p2.HelloAppLaunch()
p3.GreetingAppLaunch()
p4.launchButtonApp()
p5.launchButton2App()
p6.launchEntryApp()
p7.launchSimpleCalenderApp()
p8.ControlledPorgressApp()
p9.DisplayAppLaunch()
p10.launchTopLevelApp()
p11.launchPanedWindowApp()
p12.launchNoteBookApp()
if __name__ == '__main__':main()
|
'''
Created on Aug 12, 2017
@author: Aditya
This is the main file and will import other modules/codes written for python tkinter demonstration
'''
import program1 as p1
import program2 as p2
import program3 as p3
import program4 as p4
import program5 as p5
import program6 as p6
import program7 as p7
import program8 as p8
import program9 as p9
import program10 as p10
import program11 as p11
import program12 as p12
import program13 as p13
def main():
p1.sayhello()
p2.HelloAppLaunch()
p3.GreetingAppLaunch()
p4.launchButtonApp()
p5.launchButton2App()
p6.launchEntryApp()
p7.launchSimpleCalenderApp()
p8.ControlledPorgressApp()
p9.DisplayAppLaunch()
p10.launchTopLevelApp()
p11.launchPanedWindowApp()
p12.launchNoteBookApp()
p13.launchApp()
if __name__ == '__main__':main()
|
Include Text App in Main
|
Include Text App in Main
|
Python
|
mit
|
deshadi/python-gui-demos
|
e3a2e65199c3d0db9576a25dc039f66e094171b6
|
src/passgen.py
|
src/passgen.py
|
import string
import random
import argparse
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
parser = argparse.ArgumentParser("Generate strong random password.")
parser.add_argument("length",
help="the number of characters to generate",
type=int)
parser.add_argument("-n", "--number",
help="how many passwords to generate",
type=int)
args = parser.parse_args()
for _ in range(args.number):
print passgen(args.length)
|
import string
import random
import argparse
def passgen(length=12):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
parser = argparse.ArgumentParser("Generate strong random password.")
parser.add_argument("-l", "--length",
help="the number of characters to generate",
type=int, default=12)
parser.add_argument("-n", "--number",
help="how many passwords to generate",
type=int, default=10)
args = parser.parse_args()
for _ in range(args.number):
print passgen(args.length)
|
Make length optional. Set up defaults.
|
Make length optional. Set up defaults.
|
Python
|
mit
|
soslan/passgen
|
1e327401d9c020bb7941b20ff51890ad1729973d
|
tests.py
|
tests.py
|
import pytest
from django.contrib.auth import get_user_model
from seleniumlogin import force_login
pytestmark = [pytest.mark.django_db(transaction=True)]
def test_non_authenticated_user_cannot_access_test_page(selenium, live_server):
selenium.get('{}/test/login_required/'.format(live_server.url))
assert 'fail' in selenium.page_source
def test_authenticated_user_can_access_blank_login_page(selenium, live_server):
User = get_user_model()
user = User.objects.create_user(username='selenium', password='password')
force_login(user, selenium, live_server.url)
selenium.get('{}/test/login_required/'.format(live_server.url))
assert 'success' in selenium.page_source
|
import pytest
from django.contrib.auth import get_user_model
from seleniumlogin import force_login
pytestmark = [pytest.mark.django_db(transaction=True)]
def test_non_authenticated_user_cannot_access_test_page(selenium, live_server):
selenium.get('{}/test/login_required/'.format(live_server.url))
assert 'fail' in selenium.page_source
def test_authenticated_user_can_access_test_page(selenium, live_server):
User = get_user_model()
user = User.objects.create_user(username='selenium', password='password')
force_login(user, selenium, live_server.url)
selenium.get('{}/test/login_required/'.format(live_server.url))
assert 'success' in selenium.page_source
|
Rename test. The test tries to access a test page, not a blank page
|
Rename test. The test tries to access a test page, not a blank page
|
Python
|
mit
|
feffe/django-selenium-login,feffe/django-selenium-login
|
741545dcf58fdfaf882d797d3ce4f7607ca0dad4
|
kobo/client/commands/cmd_resubmit_tasks.py
|
kobo/client/commands/cmd_resubmit_tasks.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
from kobo.client.task_watcher import TaskWatcher
from kobo.client import ClientCommand
class Resubmit_Tasks(ClientCommand):
"""resubmit failed tasks"""
enabled = True
def options(self):
self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name
self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.")
def run(self, *args, **kwargs):
if len(args) == 0:
self.parser.error("At least one task id must be specified.")
username = kwargs.pop("username", None)
password = kwargs.pop("password", None)
tasks = args
self.set_hub(username, password)
resubmitted_tasks = []
failed = False
for task_id in tasks:
try:
resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False))
resubmitted_tasks.append(resubmitted_id)
except Exception as ex:
failed = True
print(ex)
TaskWatcher.watch_tasks(self.hub, resubmitted_tasks)
if failed:
sys.exit(1)
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
from kobo.client.task_watcher import TaskWatcher
from kobo.client import ClientCommand
class Resubmit_Tasks(ClientCommand):
"""resubmit failed tasks"""
enabled = True
def options(self):
self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name
self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.")
self.parser.add_option("--nowait", default=False, action="store_true", help="Don't wait until tasks finish.")
def run(self, *args, **kwargs):
if len(args) == 0:
self.parser.error("At least one task id must be specified.")
username = kwargs.pop("username", None)
password = kwargs.pop("password", None)
tasks = args
self.set_hub(username, password)
resubmitted_tasks = []
failed = False
for task_id in tasks:
try:
resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False))
resubmitted_tasks.append(resubmitted_id)
except Exception as ex:
failed = True
print(ex)
if not kwargs.get('nowait'):
TaskWatcher.watch_tasks(self.hub, resubmitted_tasks)
if failed:
sys.exit(1)
|
Add --nowait option to resubmit-tasks cmd
|
Add --nowait option to resubmit-tasks cmd
In some use cases, waiting till the tasks finish is undesirable. Nowait
option should be provided.
|
Python
|
lgpl-2.1
|
release-engineering/kobo,release-engineering/kobo,release-engineering/kobo,release-engineering/kobo
|
8e7a92bce03ca472bc78bb9df5e2c9cf063c29b7
|
temba/campaigns/tasks.py
|
temba/campaigns/tasks.py
|
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'):
try:
push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'):
try:
push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
Use correct field to get org from
|
Use correct field to get org from
|
Python
|
agpl-3.0
|
harrissoerja/rapidpro,pulilab/rapidpro,pulilab/rapidpro,reyrodrigues/EU-SMS,tsotetsi/textily-web,harrissoerja/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,tsotetsi/textily-web,Thapelo-Tsotetsi/rapidpro,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,praekelt/rapidpro,harrissoerja/rapidpro,praekelt/rapidpro,reyrodrigues/EU-SMS,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,tsotetsi/textily-web,reyrodrigues/EU-SMS,ewheeler/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,praekelt/rapidpro,ewheeler/rapidpro,pulilab/rapidpro,praekelt/rapidpro
|
1e2086b868861034d89138349c4da909f380f19e
|
feedback/views.py
|
feedback/views.py
|
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from rest_framework import serializers, status
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Feedback
class FeedbackSerializer(serializers.ModelSerializer):
class Meta:
model = Feedback
@method_decorator(csrf_exempt, name='dispatch')
class FeedbackView(APIView):
@csrf_exempt
def post(self, request, format=None):
if self.request.user.is_authenticated():
user = self.request.user
else:
user = None
if 'user' in request.data:
del request.data['user']
user_agent = request.data.get('user_agent')
if not user_agent:
user_agent = request.META.get('HTTP_USER_AGENT', None)
serializer = FeedbackSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=user, user_agent=user_agent)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from rest_framework import serializers, status
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Feedback
class FeedbackSerializer(serializers.ModelSerializer):
class Meta:
model = Feedback
fields = '__all__'
@method_decorator(csrf_exempt, name='dispatch')
class FeedbackView(APIView):
@csrf_exempt
def post(self, request, format=None):
if self.request.user.is_authenticated():
user = self.request.user
else:
user = None
if 'user' in request.data:
del request.data['user']
user_agent = request.data.get('user_agent')
if not user_agent:
user_agent = request.META.get('HTTP_USER_AGENT', None)
serializer = FeedbackSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=user, user_agent=user_agent)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
Make feedback compatible with DRF >3.3.0
|
Make feedback compatible with DRF >3.3.0
|
Python
|
mit
|
City-of-Helsinki/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel
|
90bdcad66a6f29c9e3d731b5b09b0a2ba477ae2f
|
tviit/urls.py
|
tviit/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^', views.IndexView.as_view(), name='tviit_index'),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='tviit_index'),
url(r'create/$', views.create_tviit, name="create_tviit"),
]
|
Create url-patterns for tviit creation
|
Create url-patterns for tviit creation
|
Python
|
mit
|
DeWaster/Tviserrys,DeWaster/Tviserrys
|
881222a49c6b3e8792adf5754c61992bd12c7b28
|
tests/test_conduction.py
|
tests/test_conduction.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test Mongo Conduction."""
import logging
import pymongo
from mockupdb import go
from pymongo.errors import OperationFailure
from conduction.server import get_mockup, main_loop
from tests import unittest # unittest2 on Python 2.6.
class ConductionTest(unittest.TestCase):
def setUp(self):
self.mockup = get_mockup(releases={}, env=None,
port=None, verbose=False)
# Quiet.
logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL)
self.mockup.run()
self.loop_future = go(main_loop, self.mockup)
# Cleanups are LIFO: Stop the server, wait for the loop to exit.
self.addCleanup(self.loop_future)
self.addCleanup(self.mockup.stop)
self.conduction = pymongo.MongoClient(self.mockup.uri).test
def test_bad_command_name(self):
with self.assertRaises(OperationFailure):
self.conduction.command('foo')
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test Mongo Conduction."""
import logging
import pymongo
from mockupdb import go
from pymongo.errors import OperationFailure
from conduction.server import get_mockup, main_loop
from tests import unittest # unittest2 on Python 2.6.
class ConductionTest(unittest.TestCase):
def setUp(self):
self.mockup = get_mockup(releases={}, env=None,
port=None, verbose=False)
# Quiet.
logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL)
self.mockup.run()
self.loop_future = go(main_loop, self.mockup)
# Cleanups are LIFO: Stop the server, wait for the loop to exit.
self.addCleanup(self.loop_future)
self.addCleanup(self.mockup.stop)
# Any database name will do.
self.conduction = pymongo.MongoClient(self.mockup.uri).conduction
def test_root_uri(self):
reply = self.conduction.command('get', '/')
self.assertIn('links', reply)
self.assertIn('service', reply)
def test_bad_command_name(self):
with self.assertRaises(OperationFailure) as context:
self.conduction.command('foo')
self.assertIn('unrecognized: {"foo": 1}',
str(context.exception))
def test_server_id_404(self):
with self.assertRaises(OperationFailure) as context:
self.conduction.command({'post': '/v1/servers/'})
self.assertIn('404 Not Found', str(context.exception))
if __name__ == '__main__':
unittest.main()
|
Test root URI and 404s.
|
Test root URI and 404s.
|
Python
|
apache-2.0
|
ajdavis/mongo-conduction
|
e3b1a323921b8331d7fd84c013e80a89a5b21bde
|
haproxy_status.py
|
haproxy_status.py
|
#!/usr/bin/env python
from BaseHTTPServer import BaseHTTPRequestHandler
from helpers.etcd import Etcd
from helpers.postgresql import Postgresql
import sys, yaml, socket
f = open(sys.argv[1], "r")
config = yaml.load(f.read())
f.close()
etcd = Etcd(config["etcd"])
postgresql = Postgresql(config["postgresql"])
class StatusHandler(BaseHTTPRequestHandler):
def do_GET(self):
return self.do_ANY()
def do_OPTIONS(self):
return self.do_ANY()
def do_ANY(self):
if postgresql.name == etcd.current_leader()["hostname"]:
self.send_response(200)
else:
self.send_response(503)
self.end_headers()
self.wfile.write('\r\n')
return
try:
from BaseHTTPServer import HTTPServer
host, port = config["haproxy_status"]["listen"].split(":")
server = HTTPServer((host, int(port)), StatusHandler)
print 'listening on %s:%s' % (host, port)
server.serve_forever()
except KeyboardInterrupt:
print('^C received, shutting down server')
server.socket.close()
|
#!/usr/bin/env python
from BaseHTTPServer import BaseHTTPRequestHandler
from helpers.etcd import Etcd
from helpers.postgresql import Postgresql
import sys, yaml, socket
f = open(sys.argv[1], "r")
config = yaml.load(f.read())
f.close()
etcd = Etcd(config["etcd"])
postgresql = Postgresql(config["postgresql"])
class StatusHandler(BaseHTTPRequestHandler):
def do_GET(self):
return self.do_ANY()
def do_OPTIONS(self):
return self.do_ANY()
def do_ANY(self):
leader = etcd.current_leader()
is_leader = leader != None and postgresql.name == leader["hostname"]
if ((self.path == "/" or self.path == "/master") and is_leader) or (self.path == "/replica" and not is_leader):
self.send_response(200)
else:
self.send_response(503)
self.end_headers()
self.wfile.write('\r\n')
return
try:
from BaseHTTPServer import HTTPServer
host, port = config["haproxy_status"]["listen"].split(":")
server = HTTPServer((host, int(port)), StatusHandler)
print 'listening on %s:%s' % (host, port)
server.serve_forever()
except KeyboardInterrupt:
print('^C received, shutting down server')
server.socket.close()
|
Add the ability to query for the replica status of a PG instance
|
Add the ability to query for the replica status of a PG instance
|
Python
|
mit
|
Tapjoy/governor
|
d0191c43c784b229ce104700989dfb91c67ec490
|
helper/windows.py
|
helper/windows.py
|
"""
Windows platform support for running the application as a detached process.
"""
import multiprocessing
import subprocess
import sys
DETACHED_PROCESS = 8
class Daemon(object):
def __init__(self, controller, user=None, group=None,
pid_file=None, prevent_core=None, exception_log=None):
"""Daemonize the controller, optionally passing in the user and group
to run as, a pid file, if core dumps should be prevented and a path to
write out exception logs to.
:param helper.Controller controller: The controller to daaemonize & run
:param str user: Optional username to run as
:param str group: Optional group to run as
:param str pid_file: Optional path to the pidfile to run
:param bool prevent_core: Don't make any core files
:param str exception_log: Optional exception log path
"""
args = [sys.executable]
args.extend(sys.argv)
self.pid = subprocess.Popen(args,
creationflags=DETACHED_PROCESS,
shell=True).pid
|
"""
Windows platform support for running the application as a detached process.
"""
import subprocess
import sys
DETACHED_PROCESS = 8
class Daemon(object):
"""Daemonize the helper application, putting it in a forked background
process.
"""
def __init__(self, controller):
raise NotImplementedError
#args = [sys.executable]
#args.extend(sys.argv)
#self.pid = subprocess.Popen(args,
# creationflags=DETACHED_PROCESS,
# shell=True).pid
|
Raise a NotImplementedError for Windows
|
Raise a NotImplementedError for Windows
|
Python
|
bsd-3-clause
|
gmr/helper,dave-shawley/helper,gmr/helper
|
b8350e91d7bd1e3a775ed230820c96a180a2ad02
|
tests/test_solver.py
|
tests/test_solver.py
|
from tinyik import Link, Joint, FKSolver
from .utils import x, y, z, theta, approx_eq
def test_forward_kinematics():
fk = FKSolver([
Joint('z'), Link([1., 0., 0.]), Joint('y'), Link([1., 0., 0.])
])
assert all(fk.solve([0., 0.]) == [2., 0., 0.])
assert approx_eq(fk.solve([theta, theta]), [x, y, -z])
assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z])
|
from tinyik import Link, Joint, FKSolver, CCDFKSolver, CCDIKSolver
from .utils import x, y, z, theta, approx_eq
components = [Joint('z'), Link([1., 0., 0.]), Joint('y'), Link([1., 0., 0.])]
predicted = [2., 0., 0.]
def test_fk():
fk = FKSolver(components)
assert all(fk.solve([0., 0.]) == predicted)
assert approx_eq(fk.solve([theta, theta]), [x, y, -z])
assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z])
def test_ccd_fk():
fk = CCDFKSolver(components)
assert all(fk.solve([0., 0.]) == predicted)
assert approx_eq(fk.solve([theta, theta]), [x, y, -z])
assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z])
def test_ccd_ik():
fk = CCDFKSolver(components)
ik = CCDIKSolver(fk)
assert approx_eq(ik.solve([0., 0.], [x, y, -z]), [theta, theta])
assert approx_eq(ik.solve([0., 0.], [x, -y, z]), [-theta, -theta])
|
Add tests for CCD IK solver
|
Add tests for CCD IK solver
|
Python
|
mit
|
lanius/tinyik
|
2f63f134d2c9aa67044eb176a3f81857279f107d
|
troposphere/utils.py
|
troposphere/utils.py
|
import time
def get_events(conn, stackname):
"""Get the events in batches and return in chronological order"""
next = None
event_list = []
while 1:
events = conn.describe_stack_events(stackname, next)
event_list.append(events)
if events.next_token is None:
break
next = events.next_token
time.sleep(1)
return reversed(sum(event_list, []))
def tail(conn, stack_name):
"""Show and then tail the event log"""
def tail_print(e):
print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id))
# First dump the full list of events in chronological order and keep
# track of the events we've seen already
seen = set()
initial_events = get_events(conn, stack_name)
for e in initial_events:
tail_print(e)
seen.add(e.event_id)
# Now keep looping through and dump the new events
while 1:
events = get_events(conn, stack_name)
for e in events:
if e.event_id not in seen:
tail_print(e)
seen.add(e.event_id)
time.sleep(5)
|
import time
def _tail_print(e):
print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id))
def get_events(conn, stackname):
"""Get the events in batches and return in chronological order"""
next = None
event_list = []
while 1:
events = conn.describe_stack_events(stackname, next)
event_list.append(events)
if events.next_token is None:
break
next = events.next_token
time.sleep(1)
return reversed(sum(event_list, []))
def tail(conn, stack_name, log_func=_tail_print, sleep_time=5):
"""Show and then tail the event log"""
# First dump the full list of events in chronological order and keep
# track of the events we've seen already
seen = set()
initial_events = get_events(conn, stack_name)
for e in initial_events:
log_func(e)
seen.add(e.event_id)
# Now keep looping through and dump the new events
while 1:
events = get_events(conn, stack_name)
for e in events:
if e.event_id not in seen:
log_func(e)
seen.add(e.event_id)
time.sleep(sleep_time)
|
Support a custom logging function and sleep time within tail
|
Support a custom logging function and sleep time within tail
|
Python
|
bsd-2-clause
|
mhahn/troposphere
|
35594a4f8c549d507c7d7030141ae511aed57c09
|
workflowmax/__init__.py
|
workflowmax/__init__.py
|
from .api import WorkflowMax # noqa
__version__ = "0.1.0"
|
from .api import WorkflowMax # noqa
from .credentials import Credentials # noqa
__version__ = "0.1.0"
|
Add Credentials to root namespace
|
Add Credentials to root namespace
|
Python
|
bsd-3-clause
|
ABASystems/pyworkflowmax
|
ab5aac0c9b0e075901c4cd8dd5d134e79f0e0110
|
brasileirao/spiders/results_spider.py
|
brasileirao/spiders/results_spider.py
|
import scrapy
import scrapy.selector
from brasileirao.items import BrasileiraoItem
import hashlib
class ResultsSpider(scrapy.Spider):
name = "results"
start_urls = [
'https://esporte.uol.com.br/futebol/campeonatos/brasileirao/jogos/',
]
def parse(self, response):
actual_round = 0
for rodada in response.css('.rodadas .confrontos li'):
actual_round += 1
for game in rodada.css(".confronto"):
home_team = game.css(".partida .time1")
away_team = game.css(".partida .time2")
item = BrasileiraoItem()
item['rodada'] = actual_round
item['home_team'] = home_team.css("abbr::attr(title)").extract_first().encode('utf8')
item['away_team'] = away_team.css("abbr::attr(title)").extract_first().encode('utf8')
item['home_score'] = home_team.css(".gols::text").extract_first()
item['away_score'] = away_team.css(".gols::text").extract_first()
item['date'] = game.css(".info-partida time::attr(datetime)").extract_first()
id = item['home_team'] + item['away_team']
item['id'] = hashlib.md5(id).hexdigest()
yield item
|
# -*- coding: utf-8 -*-
import scrapy
import scrapy.selector
from brasileirao.items import BrasileiraoItem
import hashlib
class ResultsSpider(scrapy.Spider):
name = "results"
start_urls = [
'https://esporte.uol.com.br/futebol/campeonatos/brasileirao/jogos/',
]
def parse(self, response):
actual_round = 0
for rodada in response.css('.rodadas .confrontos li'):
actual_round += 1
for game in rodada.css(".confronto"):
home_team = game.css(".partida .time1")
away_team = game.css(".partida .time2")
item = BrasileiraoItem()
item['rodada'] = actual_round
item['home_team'] = home_team.css("abbr::attr(title)").extract_first()
item['away_team'] = away_team.css("abbr::attr(title)").extract_first()
item['home_score'] = home_team.css(".gols::text").extract_first()
item['away_score'] = away_team.css(".gols::text").extract_first()
item['date'] = game.css(".info-partida time::attr(datetime)").extract_first()
id = item['home_team'] + item['away_team']
item['id'] = hashlib.md5(id).hexdigest()
yield item
|
Set utf-8 as default encoding.
|
Set utf-8 as default encoding.
|
Python
|
mit
|
pghilardi/live-football-client
|
a3c1822dd2942de4b6bf5cac14039e6789babf85
|
wafer/pages/admin.py
|
wafer/pages/admin.py
|
from django.contrib import admin
from wafer.pages.models import File, Page
class PageAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('name', 'slug', 'get_people_display_names', 'get_in_schedule')
admin.site.register(Page, PageAdmin)
admin.site.register(File)
|
from django.contrib import admin
from wafer.pages.models import File, Page
from reversion.admin import VersionAdmin
class PageAdmin(VersionAdmin, admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('name', 'slug', 'get_people_display_names', 'get_in_schedule')
admin.site.register(Page, PageAdmin)
admin.site.register(File)
|
Add reversion support to Pages
|
Add reversion support to Pages
|
Python
|
isc
|
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
|
d30605d82d5f04e8478c785f1bb5086066e50878
|
awx/wsgi.py
|
awx/wsgi.py
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from django.core.wsgi import get_wsgi_application
from awx import prepare_env
from awx import __version__ as tower_version
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
# Prepare the AWX environment.
prepare_env()
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from awx import __version__ as tower_version
# Prepare the AWX environment.
from awx import prepare_env
prepare_env()
from django.core.wsgi import get_wsgi_application
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
|
Fix import error by calling prepare_env first
|
Fix import error by calling prepare_env first
|
Python
|
apache-2.0
|
wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx
|
4076fb322814848d802d1f925d163e90b3d629a9
|
selenium_testcase/testcases/forms.py
|
selenium_testcase/testcases/forms.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from selenium.webdriver.common.by import By
from .utils import wait_for
class FormTestMixin:
# default search element
form_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
(By.XPATH, '//form[@action="{}"]',),
(By.XPATH, '//form[@name="{}"]',),
(By.XPATH, '//form/*',),
)
@wait_for
def get_form(self, *args, **kwargs):
""" Return form element or None. """
return self.find_element(
self.form_search_list, *args, **kwargs)
input_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
)
@wait_for
def set_input(self, field, value, **kwargs):
input = self.find_element(
self.input_search_list, field, **kwargs)
input.clear()
input.send_keys(value)
return input
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from selenium.webdriver.common.by import By
from .utils import wait_for
class FormTestMixin:
# default search element
form_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
(By.XPATH, '//form[@action="{}"]',),
(By.XPATH, '//form[@name="{}"]',),
(By.XPATH, '//form',),
)
@wait_for
def get_form(self, *args, **kwargs):
""" Return form element or None. """
return self.find_element(
self.form_search_list, *args, **kwargs)
input_search_list = (
(By.ID, '{}',),
(By.NAME, '{}',),
)
@wait_for
def get_input(self, field, **kwargs):
""" Return matching input field. """
return self.find_element(
self.input_search_list, field, **kwargs)
def set_input(self, field, value, **kwargs):
""" Clear the field and enter value. """
element = self.get_input(field, **kwargs)
element.clear()
element.send_keys(value)
return element
|
Split get_input from set_input in FormTestMixin.
|
Split get_input from set_input in FormTestMixin.
In order to reduce side-effects, this commit moves the @wait_for to
a get_input method and set_input operates immediately.
|
Python
|
bsd-3-clause
|
nimbis/django-selenium-testcase,nimbis/django-selenium-testcase
|
149a8091333766068cac445db770ea73055d8647
|
simuvex/procedures/stubs/UserHook.py
|
simuvex/procedures/stubs/UserHook.py
|
import simuvex
class UserHook(simuvex.SimProcedure):
NO_RET = True
# pylint: disable=arguments-differ
def run(self, user_func=None, user_kwargs=None, default_return_addr=None):
result = user_func(self.state, **user_kwargs)
if result is None:
self.add_successor(self.state, default_return_addr, self.state.se.true, 'Ijk_NoHook')
else:
for state in result:
self.add_successor(state, state.ip, state.scratch.guard, state.scratch.jumpkind)
|
import simuvex
class UserHook(simuvex.SimProcedure):
NO_RET = True
# pylint: disable=arguments-differ
def run(self, user_func=None, user_kwargs=None, default_return_addr=None, length=None):
result = user_func(self.state, **user_kwargs)
if result is None:
self.add_successor(self.state, default_return_addr, self.state.se.true, 'Ijk_NoHook')
else:
for state in result:
self.add_successor(state, state.ip, state.scratch.guard, state.scratch.jumpkind)
|
Make the userhook take the length arg b/c why not
|
Make the userhook take the length arg b/c why not
|
Python
|
bsd-2-clause
|
axt/angr,schieb/angr,tyb0807/angr,chubbymaggie/angr,chubbymaggie/simuvex,chubbymaggie/angr,chubbymaggie/simuvex,f-prettyland/angr,axt/angr,angr/angr,f-prettyland/angr,tyb0807/angr,schieb/angr,axt/angr,chubbymaggie/angr,f-prettyland/angr,iamahuman/angr,tyb0807/angr,iamahuman/angr,angr/angr,iamahuman/angr,angr/angr,schieb/angr,chubbymaggie/simuvex,angr/simuvex
|
8528beef5d10355af07f641b4987df3cd64a7b0f
|
sprockets/mixins/metrics/__init__.py
|
sprockets/mixins/metrics/__init__.py
|
from .influxdb import InfluxDBMixin
from .statsd import StatsdMixin
version_info = (1, 0, 0)
__version__ = '.'.join(str(v) for v in version_info)
__all__ = ['InfluxDBMixin', 'StatsdMixin']
|
try:
from .influxdb import InfluxDBMixin
from .statsd import StatsdMixin
except ImportError as error:
def InfluxDBMixin(*args, **kwargs):
raise error
def StatsdMixin(*args, **kwargs):
raise error
version_info = (1, 0, 0)
__version__ = '.'.join(str(v) for v in version_info)
__all__ = ['InfluxDBMixin', 'StatsdMixin']
|
Make it safe to import __version__.
|
Make it safe to import __version__.
|
Python
|
bsd-3-clause
|
sprockets/sprockets.mixins.metrics
|
afa6687c317191b77949ba246f3dcc0909c435f5
|
organizer/urls/tag.py
|
organizer/urls/tag.py
|
from django.conf.urls import url
from ..models import Tag
from ..utils import DetailView
from ..views import (
TagCreate, TagDelete, TagList, TagPageList,
TagUpdate)
urlpatterns = [
url(r'^$',
TagList.as_view(),
name='organizer_tag_list'),
url(r'^create/$',
TagCreate.as_view(),
name='organizer_tag_create'),
url(r'^(?P<page_number>\d+)/$',
TagPageList.as_view(),
name='organizer_tag_page'),
url(r'^(?P<slug>[\w\-]+)/$',
DetailView.as_view(
context_object_name='tag',
model=Tag,
template_name=(
'organizer/tag_detail.html')),
name='organizer_tag_detail'),
url(r'^(?P<slug>[\w-]+)/delete/$',
TagDelete.as_view(),
name='organizer_tag_delete'),
url(r'^(?P<slug>[\w\-]+)/update/$',
TagUpdate.as_view(),
name='organizer_tag_update'),
]
|
from django.conf.urls import url
from ..views import (
TagCreate, TagDelete, TagDetail, TagList,
TagPageList, TagUpdate)
urlpatterns = [
url(r'^$',
TagList.as_view(),
name='organizer_tag_list'),
url(r'^create/$',
TagCreate.as_view(),
name='organizer_tag_create'),
url(r'^(?P<page_number>\d+)/$',
TagPageList.as_view(),
name='organizer_tag_page'),
url(r'^(?P<slug>[\w\-]+)/$',
TagDetail.as_view(),
name='organizer_tag_detail'),
url(r'^(?P<slug>[\w-]+)/delete/$',
TagDelete.as_view(),
name='organizer_tag_delete'),
url(r'^(?P<slug>[\w\-]+)/update/$',
TagUpdate.as_view(),
name='organizer_tag_update'),
]
|
Revert to Tag Detail URL pattern.
|
Ch17: Revert to Tag Detail URL pattern.
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
7b1d520278b8fe33b68103d26f9aa7bb945f6791
|
cryptography/hazmat/backends/__init__.py
|
cryptography/hazmat/backends/__init__.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cryptography.hazmat.backends import openssl
from cryptography.hazmat.bindings.commoncrypto.binding import (
Binding as CommonCryptoBinding
)
_ALL_BACKENDS = [openssl.backend]
if CommonCryptoBinding.is_available():
from cryptography.hazmat.backends import commoncrypto
_ALL_BACKENDS.append(commoncrypto.backend)
def default_backend():
return openssl.backend
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cryptography.hazmat.backends import openssl
from cryptography.hazmat.backends.multibackend import MultiBackend
from cryptography.hazmat.bindings.commoncrypto.binding import (
Binding as CommonCryptoBinding
)
_ALL_BACKENDS = [openssl.backend]
if CommonCryptoBinding.is_available():
from cryptography.hazmat.backends import commoncrypto
_ALL_BACKENDS.append(commoncrypto.backend)
_default_backend = MultiBackend(_ALL_BACKENDS)
def default_backend():
return _default_backend
|
Make the default backend be a multi-backend
|
Make the default backend be a multi-backend
|
Python
|
bsd-3-clause
|
bwhmather/cryptography,Ayrx/cryptography,bwhmather/cryptography,Lukasa/cryptography,Ayrx/cryptography,bwhmather/cryptography,kimvais/cryptography,skeuomorf/cryptography,dstufft/cryptography,kimvais/cryptography,Lukasa/cryptography,dstufft/cryptography,Ayrx/cryptography,skeuomorf/cryptography,Lukasa/cryptography,sholsapp/cryptography,Hasimir/cryptography,dstufft/cryptography,Ayrx/cryptography,skeuomorf/cryptography,dstufft/cryptography,sholsapp/cryptography,Hasimir/cryptography,Hasimir/cryptography,skeuomorf/cryptography,kimvais/cryptography,Hasimir/cryptography,sholsapp/cryptography,dstufft/cryptography,kimvais/cryptography,sholsapp/cryptography,bwhmather/cryptography
|
0f3b413b269f8b95b6f8073ba39d11f156ae632c
|
zwebtest.py
|
zwebtest.py
|
""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine
Copyright 2003 Paul Scott-Murphy, 2014 William McBrine
This module provides a unit test suite for the Multicast DNS
Service Discovery for Python module.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
USA
"""
__author__ = 'Paul Scott-Murphy'
__maintainer__ = 'William McBrine <wmcbrine@gmail.com>'
__version__ = '0.14-wmcbrine'
__license__ = 'LGPL'
from zeroconf import *
import socket
desc = {'path':'/~paulsm/'}
info = ServiceInfo("_http._tcp.local.", "Paul's Test Web Site._http._tcp.local.", socket.inet_aton("10.0.1.2"), 80, 0, 0, desc, "ash-2.local.")
r = Zeroconf()
print "Registration of a service..."
r.registerService(info)
print "Waiting..."
|
from zeroconf import *
import socket
desc = {'path': '/~paulsm/'}
info = ServiceInfo("_http._tcp.local.",
"Paul's Test Web Site._http._tcp.local.",
socket.inet_aton("10.0.1.2"), 80, 0, 0,
desc, "ash-2.local.")
r = Zeroconf()
print "Registration of a service..."
r.registerService(info)
raw_input("Waiting (press Enter to exit)...")
print "Unregistering..."
r.unregisterService(info)
r.close()
|
Allow graceful exit from announcement test.
|
Allow graceful exit from announcement test.
|
Python
|
lgpl-2.1
|
basilfx/python-zeroconf,daid/python-zeroconf,jstasiak/python-zeroconf,gbiddison/python-zeroconf,giupo/python-zeroconf,AndreaCensi/python-zeroconf,nameoftherose/python-zeroconf,balloob/python-zeroconf,wmcbrine/pyzeroconf,decabyte/python-zeroconf,jantman/python-zeroconf
|
5957999c52f939691cbe6b8dd5aa929980a24501
|
tests/unit/test_start.py
|
tests/unit/test_start.py
|
import pytest
from iwant_bot import start
def test_add():
assert start.add_numbers(0, 0) == 0
assert start.add_numbers(1, 1) == 2
|
from iwant_bot import start
def test_add():
assert start.add_numbers(0, 0) == 0
assert start.add_numbers(1, 1) == 2
|
Remove the unused pytest import
|
Remove the unused pytest import
|
Python
|
mit
|
kiwicom/iwant-bot
|
f5d4da9fa71dbb59a9459e376fde8840037bf39a
|
account_banking_sepa_credit_transfer/__init__.py
|
account_banking_sepa_credit_transfer/__init__.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# SEPA Credit Transfer module for OpenERP
# Copyright (C) 2010-2013 Akretion (http://www.akretion.com)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import wizard
from . import models
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# SEPA Credit Transfer module for OpenERP
# Copyright (C) 2010-2013 Akretion (http://www.akretion.com)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import wizard
|
Remove import models from init in sepa_credit_transfer
|
Remove import models from init in sepa_credit_transfer
|
Python
|
agpl-3.0
|
open-synergy/bank-payment,sergio-incaser/bank-payment,hbrunn/bank-payment,sergio-teruel/bank-payment,ndtran/bank-payment,David-Amaro/bank-payment,rlizana/bank-payment,sergiocorato/bank-payment,damdam-s/bank-payment,CompassionCH/bank-payment,CompassionCH/bank-payment,incaser/bank-payment,Antiun/bank-payment,sergio-teruel/bank-payment,damdam-s/bank-payment,syci/bank-payment,sergio-incaser/bank-payment,David-Amaro/bank-payment,rlizana/bank-payment,Antiun/bank-payment,sergiocorato/bank-payment,ndtran/bank-payment,acsone/bank-payment,syci/bank-payment,diagramsoftware/bank-payment
|
f1a1272bebcc4edf9063c75d3fe29fdcb9e277eb
|
rml/unitconversion.py
|
rml/unitconversion.py
|
import numpy as np
class UnitConversion():
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
|
import numpy as np
from scipy.interpolate import PchipInterpolator
class UnitConversion():
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class PPConversion():
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
pass
|
Add PPChipInterpolator unit conversion class
|
Add PPChipInterpolator unit conversion class
|
Python
|
apache-2.0
|
willrogers/pml,razvanvasile/RML,willrogers/pml
|
00e4663940ed1d22e768b3de3d1c645c8649aecc
|
src/WhiteLibrary/keywords/items/textbox.py
|
src/WhiteLibrary/keywords/items/textbox.py
|
from TestStack.White.UIItems import TextBox
from WhiteLibrary.keywords.librarycomponent import LibraryComponent
from WhiteLibrary.keywords.robotlibcore import keyword
class TextBoxKeywords(LibraryComponent):
@keyword
def input_text_to_textbox(self, locator, input):
"""
Writes text to a textbox.
``locator`` is the locator of the text box.
``input`` is the text to write.
"""
textBox = self.state._get_typed_item_by_locator(TextBox, locator)
textBox.Text = input
@keyword
def verify_text_in_textbox(self, locator, expected):
"""
Verifies text in a text box.
``locator`` is the locator of the text box.
``expected`` is the expected text of the text box.
"""
textbox = self.state._get_typed_item_by_locator(TextBox, locator)
self.state._verify_value(expected, textbox.Text)
@keyword
def get_text_from_textbox(self, locator):
"""
Gets text from text box.
``locator`` is the locator of the text box.
"""
textbox = self.state._get_typed_item_by_locator(TextBox, locator)
return textbox.Text
|
from TestStack.White.UIItems import TextBox
from WhiteLibrary.keywords.librarycomponent import LibraryComponent
from WhiteLibrary.keywords.robotlibcore import keyword
class TextBoxKeywords(LibraryComponent):
@keyword
def input_text_to_textbox(self, locator, input_value):
"""
Writes text to a textbox.
``locator`` is the locator of the text box.
``input_value`` is the text to write.
"""
textBox = self.state._get_typed_item_by_locator(TextBox, locator)
textBox.Text = input_value
@keyword
def verify_text_in_textbox(self, locator, expected):
"""
Verifies text in a text box.
``locator`` is the locator of the text box.
``expected`` is the expected text of the text box.
"""
textbox = self.state._get_typed_item_by_locator(TextBox, locator)
self.state._verify_value(expected, textbox.Text)
@keyword
def get_text_from_textbox(self, locator):
"""
Gets text from text box.
``locator`` is the locator of the text box.
"""
textbox = self.state._get_typed_item_by_locator(TextBox, locator)
return textbox.Text
|
Change to better argument name
|
Change to better argument name
|
Python
|
apache-2.0
|
Omenia/robotframework-whitelibrary,Omenia/robotframework-whitelibrary
|
39dbbac659e9ae9c1bbad8a979cc99ef6eafaeff
|
models.py
|
models.py
|
#!/usr/bin/env python
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL')
db = SQLAlchemy(app)
class FoodMenu(db.Model):
id = db.Column(db.Integer, primary_key=True)
result = db.Column(db.Text)
def __init__(self, result):
self.result = result
def __repr__(self):
return self.result
class FoodServices(db.Model):
id = db.Column(db.Integer, primary_key=True)
result = db.Column(db.Text)
def __init__(self, result):
self.result = result
def __repr__(self):
return self.result
|
#!/usr/bin/env python
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL')
db = SQLAlchemy(app)
class FoodMenu(db.Model):
id = db.Column(db.Integer, primary_key=True)
result = db.Column(db.Text)
def __init__(self, result):
self.result = result
def __repr__(self):
return "<FoodMenu('%s')>" % (self.result)
class FoodServices(db.Model):
id = db.Column(db.Integer, primary_key=True)
result = db.Column(db.Text)
def __init__(self, result):
self.result = result
def __repr__(self):
return "<FoodServices('%s')>" % (self.result)
|
Include class name in model representations
|
Include class name in model representations
|
Python
|
mit
|
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
|
e16c65ec8c774cc27f9f7aa43e88521c3854b6b7
|
ella/imports/management/commands/fetchimports.py
|
ella/imports/management/commands/fetchimports.py
|
from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
help = 'Fetch all registered imports'
def handle(self, *test_labels, **options):
from ella.imports.models import fetch_all
fetch_all()
|
from django.core.management.base import NoArgsCommand
from optparse import make_option
import sys
class Command(NoArgsCommand):
help = 'Fetch all registered imports'
def handle(self, *test_labels, **options):
from ella.imports.models import fetch_all
errors = fetch_all()
if errors:
sys.exit(errors)
|
Return exit code (count of errors)
|
Return exit code (count of errors)
git-svn-id: 6ce22b13eace8fe533dbb322c2bb0986ea4cd3e6@520 2d143e24-0a30-0410-89d7-a2e95868dc81
|
Python
|
bsd-3-clause
|
MichalMaM/ella,MichalMaM/ella,WhiskeyMedia/ella,whalerock/ella,ella/ella,whalerock/ella,WhiskeyMedia/ella,petrlosa/ella,petrlosa/ella,whalerock/ella
|
45c400e02fbeb5b455e27fef81e47e45f274eaec
|
core/forms.py
|
core/forms.py
|
from django import forms
class GameForm(forms.Form):
amount = forms.IntegerField()
def __init__(self, *args, **kwargs):
super(GameForm, self).__init__(*args, **kwargs)
for name, field in self.fields.items():
if isinstance(field, forms.IntegerField):
self.fields[name].widget.input_type = "number"
if field.required:
self.fields[name].widget.attrs["required"] = ""
|
from django import forms
class GameForm(forms.Form):
amount = forms.IntegerField(initial=100)
def __init__(self, *args, **kwargs):
super(GameForm, self).__init__(*args, **kwargs)
for name, field in self.fields.items():
if isinstance(field, forms.IntegerField):
self.fields[name].widget.input_type = "number"
if field.required:
self.fields[name].widget.attrs["required"] = ""
|
Add a default bet amount.
|
Add a default bet amount.
|
Python
|
bsd-2-clause
|
stephenmcd/gamblor,stephenmcd/gamblor
|
fcc571d2f4c35ac8f0e94e51e6ac94a0c051062d
|
src/rinoh/__init__.py
|
src/rinoh/__init__.py
|
# This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
"""rinohtype
"""
import os
import sys
from importlib import import_module
from .version import __version__, __release_date__
if sys.version_info < (3, 3):
print('rinohtype requires Python 3.3 or higher')
sys.exit(1)
CORE_MODULES = ['annotation', 'color', 'dimension', 'document', 'draw', 'float',
'flowable', 'highlight', 'index', 'inline', 'layout', 'number',
'paper', 'paragraph', 'reference', 'structure', 'style',
'table', 'text']
__all__ = CORE_MODULES + ['font', 'frontend', 'backend', 'styleds', 'styles']
DATA_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
# create proxies for the core classes/constants at the top level for easy access
for name in CORE_MODULES:
module = import_module('.' + name, __name__)
module_dict, module_all = module.__dict__, module.__all__
globals().update({name: module_dict[name] for name in module_all})
__all__ += module_all
|
# This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
"""rinohtype
"""
import os
import sys
from importlib import import_module
from .version import __version__, __release_date__
if sys.version_info < (3, 3):
print('rinohtype requires Python 3.3 or higher')
sys.exit(1)
CORE_MODULES = ['annotation', 'attribute', 'color', 'dimension', 'document',
'draw', 'element', 'float', 'flowable', 'highlight', 'index',
'inline', 'layout', 'number', 'paper', 'paragraph',
'reference', 'structure', 'style', 'table', 'template', 'text']
__all__ = CORE_MODULES + ['font', 'fonts', 'frontend', 'backend', 'resource',
'styleds', 'styles', 'stylesheets', 'templates',
'strings', 'language']
DATA_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
# create proxies for the core classes/constants at the top level for easy access
for name in CORE_MODULES:
module = import_module('.' + name, __name__)
module_dict, module_all = module.__dict__, module.__all__
globals().update({name: module_dict[name] for name in module_all})
__all__ += module_all
|
Update the top-level rinoh package
|
Update the top-level rinoh package
Make all symbols and modules relevant to users available directly
from the rinoh package.
|
Python
|
agpl-3.0
|
brechtm/rinohtype,brechtm/rinohtype,brechtm/rinohtype
|
f9293d838a21f495ea9b56cbe0f6f75533360aed
|
pyinfra/api/config.py
|
pyinfra/api/config.py
|
import six
from pyinfra import logger
class Config(object):
'''
The default/base configuration options for a pyinfra deploy.
'''
state = None
# % of hosts which have to fail for all operations to stop
FAIL_PERCENT = None
# Seconds to timeout SSH connections
CONNECT_TIMEOUT = 10
# Temporary directory (on the remote side) to use for caching any files/downloads
TEMP_DIR = '/tmp'
# Gevent pool size (defaults to #of target hosts)
PARALLEL = None
# Specify a minimum required pyinfra version for a deploy
MIN_PYINFRA_VERSION = None
# All these can be overridden inside individual operation calls:
# Switch to this user (from ssh_user) using su before executing operations
SU_USER = None
USE_SU_LOGIN = False
# Use sudo and optional user
SUDO = False
SUDO_USER = None
PRESERVE_SUDO_ENV = False
USE_SUDO_LOGIN = False
USE_SUDO_PASSWORD = False
# Only show errors, but don't count as failure
IGNORE_ERRORS = False
# Shell to use to execute commands
SHELL = None
def __init__(self, **kwargs):
# Always apply some env
env = kwargs.pop('ENV', {})
self.ENV = env
# Replace TIMEOUT -> CONNECT_TIMEOUT
if 'TIMEOUT' in kwargs:
logger.warning((
'Config.TIMEOUT is deprecated, '
'please use Config.CONNECT_TIMEOUT instead'
))
kwargs['CONNECT_TIMEOUT'] = kwargs.pop('TIMEOUT')
# Apply kwargs
for key, value in six.iteritems(kwargs):
setattr(self, key, value)
|
import six
class Config(object):
'''
The default/base configuration options for a pyinfra deploy.
'''
state = None
# % of hosts which have to fail for all operations to stop
FAIL_PERCENT = None
# Seconds to timeout SSH connections
CONNECT_TIMEOUT = 10
# Temporary directory (on the remote side) to use for caching any files/downloads
TEMP_DIR = '/tmp'
# Gevent pool size (defaults to #of target hosts)
PARALLEL = None
# Specify a minimum required pyinfra version for a deploy
MIN_PYINFRA_VERSION = None
# All these can be overridden inside individual operation calls:
# Switch to this user (from ssh_user) using su before executing operations
SU_USER = None
USE_SU_LOGIN = False
# Use sudo and optional user
SUDO = False
SUDO_USER = None
PRESERVE_SUDO_ENV = False
USE_SUDO_LOGIN = False
USE_SUDO_PASSWORD = False
# Only show errors, but don't count as failure
IGNORE_ERRORS = False
# Shell to use to execute commands
SHELL = None
def __init__(self, **kwargs):
# Always apply some env
env = kwargs.pop('ENV', {})
self.ENV = env
# Apply kwargs
for key, value in six.iteritems(kwargs):
setattr(self, key, value)
|
Remove support for deprecated `Config.TIMEOUT`.
|
Remove support for deprecated `Config.TIMEOUT`.
|
Python
|
mit
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
94596f036270f8958afd84eb9788ce2b15f5cbd4
|
registration/admin.py
|
registration/admin.py
|
from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.
|
Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.
|
Python
|
bsd-3-clause
|
rafaduran/django-pluggable-registration,rbarrois/django-registration,maraujop/django-registration,thedod/django-registration-hg-mirror,CoatedMoose/django-registration,AndrewLvov/django-registration,AndrewLvov/django-registration,aptivate/django-registration,fedenko/django-registration,CoatedMoose/django-registration,christang/django-registration-1.5,siddharthsarda/django-registration,QPmedia/django-registration,christang/django-registration-1.5,newvem/django-registration,rbarrois/django-registration,QPmedia/django-registration,pelletier/django-registration-81,fedenko/django-registration,newvem/django-registration,aptivate/django-registration
|
22f3d6d6fdc3e5f07ead782828b406c9a27d0199
|
UDPSender.py
|
UDPSender.py
|
from can import Listener
import socket
class UDPSender(Listener):
dataConvert = {"0x600": {"String":"RPM:",
"Slot":0,
"Conversion":1},
"0x601": {"String":"OIL:",
"Slot":2,
"Conversion":(1/81.92)}}
def __init__(self, IP="10.0.0.4", PORT=5555):
self.ip = IP
self.port = PORT
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def on_message_received(self, msg):
udpMessage = self.can_to_udp_message(msg)
if udpMessage:
self.sock.sendto(udpMessage.encode(), (self.ip, self.port))
def can_to_udp_message(self, msg):
hexId = msg.arbritation_id
if self.dataConvert.get(hexId):
dataId = self.dataConvert[hexId]["String"]
dataSlot = self.dataConvert[hexId]["Slot"]
dataConversion = self.dataConvert[hexID]["Conversion"]
data = ( (msg.data[dataSlot] << 8) + msg.data[dataSlot + 1] ) * dataConversion
udpMessage = dataId + data
return udpMessage
else:
return None
def __del__(self):
self.sock.close()
|
from can import Listener
from socket import socket
class UDPSender(Listener):
dataConvert = {"0x600": {"String":"RPM:",
"Slot":0,
"Conversion":1},
"0x601": {"String":"OIL:",
"Slot":2,
"Conversion":(1/81.92)}}
def __init__(self, IP="10.0.0.4", PORT=5555):
self.ip = IP
self.port = PORT
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def on_message_received(self, msg):
udpMessage = self.can_to_udp_message(msg)
if udpMessage:
self.sock.sendto(udpMessage.encode(), (self.ip, self.port))
def can_to_udp_message(self, msg):
hexId = msg.arbritation_id
if self.dataConvert.get(hexId):
dataId = self.dataConvert[hexId]["String"]
dataSlot = self.dataConvert[hexId]["Slot"]
dataConversion = self.dataConvert[hexID]["Conversion"]
data = ( (msg.data[dataSlot] << 8) + msg.data[dataSlot + 1] ) * dataConversion
udpMessage = dataId + data
return udpMessage
else:
return None
def __del__(self):
self.sock.close()
|
Change of import of libraries.
|
Change of import of libraries.
Tried to fix issue displayed below.
[root@alarm BeagleDash]# python3.3 CANtoUDP.py
Traceback (most recent call last):
File "CANtoUDP.py", line 10, in <module>
listeners = [csv, UDPSender()]
TypeError: 'module' object is not callable
Exception AttributeError: "'super' object has no attribute '__del__'" in
<bound method CSVWriter.__del__ of <can.CAN.CSVWriter object at
0xb6867730>> ignored
|
Python
|
mit
|
TAURacing/BeagleDash
|
1da2c0e00d43c4fb9a7039e98401d333d387a057
|
saleor/search/views.py
|
saleor/search/views.py
|
from __future__ import unicode_literals
from django.core.paginator import Paginator, InvalidPage
from django.conf import settings
from django.http import Http404
from django.shortcuts import render
from .forms import SearchForm
from ..product.utils import products_with_details
def paginate_results(results, get_data, paginate_by=25):
paginator = Paginator(results, paginate_by)
page_number = get_data.get('page', 1)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404('No such page!')
return page
def search(request):
form = SearchForm(data=request.GET or None)
if form.is_valid():
visible_products = products_with_details(request.user)
results = form.search(model_or_queryset=visible_products)
page = paginate_results(results, request.GET, settings.PAGINATE_BY)
else:
page = form.no_query_found()
query = form.cleaned_data['q']
ctx = {
'query': query,
'results': page,
'query_string': '?q=%s' % query}
return render(request, 'search/results.html', ctx)
|
from __future__ import unicode_literals
from django.core.paginator import Paginator, InvalidPage
from django.conf import settings
from django.http import Http404
from django.shortcuts import render
from .forms import SearchForm
from ..product.utils import products_with_details
def paginate_results(results, get_data, paginate_by=25):
paginator = Paginator(results, paginate_by)
page_number = get_data.get('page', 1)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404('No such page!')
return page
def search(request):
form = SearchForm(data=request.GET or None)
if form.is_valid():
visible_products = products_with_details(request.user)
results = form.search(model_or_queryset=visible_products)
page = paginate_results(results, request.GET, settings.PAGINATE_BY)
else:
page = []
query = form.cleaned_data.get('q', '')
ctx = {
'query': query,
'results': page,
'query_string': '?q=%s' % query}
return render(request, 'search/results.html', ctx)
|
Fix empty search results logic
|
Fix empty search results logic
|
Python
|
bsd-3-clause
|
mociepka/saleor,jreigel/saleor,itbabu/saleor,maferelo/saleor,KenMutemi/saleor,HyperManTT/ECommerceSaleor,HyperManTT/ECommerceSaleor,HyperManTT/ECommerceSaleor,KenMutemi/saleor,tfroehlich82/saleor,jreigel/saleor,KenMutemi/saleor,itbabu/saleor,car3oon/saleor,maferelo/saleor,car3oon/saleor,UITools/saleor,maferelo/saleor,itbabu/saleor,mociepka/saleor,car3oon/saleor,UITools/saleor,UITools/saleor,UITools/saleor,mociepka/saleor,tfroehlich82/saleor,tfroehlich82/saleor,jreigel/saleor,UITools/saleor
|
6c9b0b0c7e78524ea889f8a89c2eba8acb57f782
|
gaphor/ui/iconname.py
|
gaphor/ui/iconname.py
|
"""
With `get_icon_name` you can retrieve an icon name
for a UML model element.
"""
from gaphor import UML
import re
from functools import singledispatch
TO_KEBAB = re.compile(r"([a-z])([A-Z]+)")
def to_kebab_case(s):
return TO_KEBAB.sub("\\1-\\2", s).lower()
@singledispatch
def get_icon_name(element):
"""
Get an icon name for a UML model element.
"""
return "gaphor-" + to_kebab_case(element.__class__.__name__)
@get_icon_name.register(UML.Class)
def get_name_for_class(element):
if element.extension:
return "gaphor-metaclass"
else:
return "gaphor-class"
@get_icon_name.register(UML.Property)
def get_name_for_property(element):
if element.association:
return "gaphor-association-end"
else:
return "gaphor-property"
|
"""
With `get_icon_name` you can retrieve an icon name
for a UML model element.
"""
from gaphor import UML
import re
from functools import singledispatch
TO_KEBAB = re.compile(r"([a-z])([A-Z]+)")
def to_kebab_case(s):
return TO_KEBAB.sub("\\1-\\2", s).lower()
@singledispatch
def get_icon_name(element):
"""
Get an icon name for a UML model element.
"""
return "gaphor-" + to_kebab_case(element.__class__.__name__)
@get_icon_name.register(UML.Class)
def get_name_for_class(element):
if isinstance(element, UML.Stereotype):
return "gaphor-stereotype"
elif element.extension:
return "gaphor-metaclass"
else:
return "gaphor-class"
@get_icon_name.register(UML.Property)
def get_name_for_property(element):
if element.association:
return "gaphor-association-end"
else:
return "gaphor-property"
|
Fix stereotype icon in namespace view
|
Fix stereotype icon in namespace view
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
9a33ac3f563ad657129d64cb591f08f9fd2a00a2
|
tests/test_command.py
|
tests/test_command.py
|
"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
|
"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
|
Add command test for '--version' option
|
Add command test for '--version' option
Check output that is program name, "version" and version number.
|
Python
|
apache-2.0
|
ma8ma/yanico
|
9d23940c430a4f95ec11b33362141ec2ffc3f533
|
src/tempel/models.py
|
src/tempel/models.py
|
from datetime import datetime, timedelta
from django.db import models
from django.conf import settings
from tempel import utils
def default_edit_expires():
return datetime.now() + timedelta(seconds=60*settings.TEMPEL_EDIT_AGE)
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(default=datetime.now)
active = models.BooleanField(default=True)
edit_token = models.CharField(max_length=8, default=utils.create_token, null=True)
edit_expires = models.DateTimeField(default=default_edit_expires, null=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
def __unicode__(self):
return '<Entry: id=%s lang=%s>' % (self.id, self.language)
|
from datetime import datetime, timedelta
from django.db import models
from django.conf import settings
from tempel import utils
def default_edit_expires():
return datetime.now() + timedelta(seconds=60*settings.TEMPEL_EDIT_AGE)
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(default=datetime.now)
active = models.BooleanField(default=True)
edit_token = models.CharField(max_length=8, default=utils.create_token, null=True)
edit_expires = models.DateTimeField(default=default_edit_expires, null=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
def done_editable(self):
self.edit_token = None
self.save()
def is_editable(self, token):
time_ok = self.edit_expires is not None and self.edit_expires >= datetime.now()
token_ok = token == self.edit_token
return time_ok and time_ok
def __unicode__(self):
return '<Entry: id=%s lang=%s>' % (self.id, self.language)
|
Add is_editable and done_editable functions to Entry
|
Add is_editable and done_editable functions to Entry
|
Python
|
agpl-3.0
|
fajran/tempel
|
02ca3946662fd996f77c30d9e61d8fc8d9243de7
|
trac/upgrades/db20.py
|
trac/upgrades/db20.py
|
from trac.db import Table, Column, Index, DatabaseManager
from trac.core import TracError
from trac.versioncontrol.cache import CACHE_YOUNGEST_REV
def do_upgrade(env, ver, cursor):
"""Modify the repository cache scheme (if needed)
Now we use the 'youngest_rev' entry in the system table
to explicitly store the youngest rev in the cache.
"""
db = env.get_db_cnx()
try:
repos = env.get_repository()
youngest = repos.get_youngest_rev_in_cache(db) or ''
# deleting first, for the 0.11dev and 0.10.4dev users
cursor.execute("DELETE FROM system WHERE name=%s",
(CACHE_YOUNGEST_REV,))
cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
(CACHE_YOUNGEST_REV, youngest))
except TracError: # no repository available
pass
|
from trac.db import Table, Column, Index, DatabaseManager
from trac.core import TracError
from trac.versioncontrol.cache import CACHE_YOUNGEST_REV
def do_upgrade(env, ver, cursor):
"""Modify the repository cache scheme (if needed)
Now we use the 'youngest_rev' entry in the system table
to explicitly store the youngest rev in the cache.
"""
db = env.get_db_cnx()
try:
repos = env.get_repository()
youngest = repos.get_youngest_rev_in_cache(db) or ''
except TracError: # no repository available
youngest = ''
# deleting first, for the 0.11dev and 0.10.4dev users
cursor.execute("DELETE FROM system WHERE name=%s",
(CACHE_YOUNGEST_REV,))
cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
(CACHE_YOUNGEST_REV, youngest))
|
Make db upgrade step 20 more robust.
|
Make db upgrade step 20 more robust.
git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@5815 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
Python
|
bsd-3-clause
|
exocad/exotrac,dokipen/trac,moreati/trac-gitsvn,exocad/exotrac,dokipen/trac,dafrito/trac-mirror,dafrito/trac-mirror,moreati/trac-gitsvn,dafrito/trac-mirror,dafrito/trac-mirror,exocad/exotrac,dokipen/trac,moreati/trac-gitsvn,exocad/exotrac,moreati/trac-gitsvn
|
ceb75d6f58ab16e3afdf3c7b00de539012d790d5
|
djangopeoplenet/manage.py
|
djangopeoplenet/manage.py
|
#!/usr/bin/env python
import sys
paths = (
'/home/simon/sites/djangopeople.net',
'/home/simon/sites/djangopeople.net/djangopeoplenet',
'/home/simon/sites/djangopeople.net/djangopeoplenet/djangopeople/lib',
)
for path in paths:
if not path in sys.path:
sys.path.insert(0, path)
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
|
#!/usr/bin/env python
import sys, os
root = os.path.dirname(__file__)
paths = (
os.path.join(root),
os.path.join(root, "djangopeople", "lib"),
)
for path in paths:
if not path in sys.path:
sys.path.insert(0, path)
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
|
Make the lib imports work on other computers than Simon's
|
Make the lib imports work on other computers than Simon's
Signed-off-by: Simon Willison <088e16a1019277b15d58faf0541e11910eb756f6@simonwillison.net>
|
Python
|
mit
|
brutasse/djangopeople,django/djangopeople,polinom/djangopeople,brutasse/djangopeople,polinom/djangopeople,brutasse/djangopeople,polinom/djangopeople,django/djangopeople,polinom/djangopeople,django/djangopeople,brutasse/djangopeople
|
bb34b21ebd2378f944498708ac4f13d16aa61aa1
|
src/mist/io/tests/api/features/steps/backends.py
|
src/mist/io/tests/api/features/steps/backends.py
|
from behave import *
@given(u'"{text}" backend added')
def given_backend(context, text):
backends = context.client.list_backends()
for backend in backends:
if text in backend['title']:
return
@when(u'I list backends')
def list_backends(context):
context.backends = context.client.list_backends()
|
from behave import *
@given(u'"{text}" backend added through api')
def given_backend(context, text):
backends = context.client.list_backends()
for backend in backends:
if text in backend['title']:
return
@when(u'I list backends')
def list_backends(context):
context.backends = context.client.list_backends()
|
Rename Behave steps for api tests
|
Rename Behave steps for api tests
|
Python
|
agpl-3.0
|
johnnyWalnut/mist.io,DimensionDataCBUSydney/mist.io,zBMNForks/mist.io,afivos/mist.io,Lao-liu/mist.io,Lao-liu/mist.io,munkiat/mist.io,kelonye/mist.io,kelonye/mist.io,afivos/mist.io,Lao-liu/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,johnnyWalnut/mist.io,zBMNForks/mist.io,DimensionDataCBUSydney/mist.io,DimensionDataCBUSydney/mist.io,munkiat/mist.io,zBMNForks/mist.io,munkiat/mist.io,johnnyWalnut/mist.io,afivos/mist.io,munkiat/mist.io,kelonye/mist.io
|
6f42f03f950e4c3967eb1efd7feb9364c9fbaf1f
|
google.py
|
google.py
|
import os
from werkzeug.contrib.fixers import ProxyFix
from flask import Flask, redirect, url_for
from flask_dance.contrib.google import make_google_blueprint, google
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)
sentry = Sentry(app)
app.secret_key = os.environ.get("FLASK_SECRET_KEY", "supersekrit")
app.config["GOOGLE_OAUTH_CLIENT_ID"] = os.environ.get("GOOGLE_OAUTH_CLIENT_ID")
app.config["GOOGLE_OAUTH_CLIENT_SECRET"] = os.environ.get("GOOGLE_OAUTH_CLIENT_SECRET")
google_bp = make_google_blueprint(scope=["profile", "email"])
app.register_blueprint(google_bp, url_prefix="/login")
@app.route("/")
def index():
if not google.authorized:
return redirect(url_for("google.login"))
resp = google.get("/plus/v1/people/me")
assert resp.ok, resp.text
return "You are {email} on Google".format(email=resp.json()["emails"][0]["value"])
if __name__ == "__main__":
app.run()
|
import os
from werkzeug.contrib.fixers import ProxyFix
from flask import Flask, redirect, url_for
from flask_dance.contrib.google import make_google_blueprint, google
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)
sentry = Sentry(app)
app.secret_key = os.environ.get("FLASK_SECRET_KEY", "supersekrit")
app.config["GOOGLE_OAUTH_CLIENT_ID"] = os.environ.get("GOOGLE_OAUTH_CLIENT_ID")
app.config["GOOGLE_OAUTH_CLIENT_SECRET"] = os.environ.get("GOOGLE_OAUTH_CLIENT_SECRET")
google_bp = make_google_blueprint(scope=["profile", "email"])
app.register_blueprint(google_bp, url_prefix="/login")
@app.route("/")
def index():
if not google.authorized:
return redirect(url_for("google.login"))
resp = google.get("/oauth2/v1/userinfo")
assert resp.ok, resp.text
return "You are {email} on Google".format(email=resp.json()["emails"][0]["value"])
if __name__ == "__main__":
app.run()
|
Use userinfo URI for user profile info
|
Use userinfo URI for user profile info
|
Python
|
mit
|
singingwolfboy/flask-dance-google
|
2bb8ee6ae30e233f28ea0ae0fb01c0e4a1f8d9f1
|
tests/functional/test_warning.py
|
tests/functional/test_warning.py
|
import pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
def test_deprecation_warnings_can_be_silenced(script, warnings_demo):
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
|
import textwrap
import pytest
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
def test_deprecation_warnings_can_be_silenced(script, warnings_demo):
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
|
Sort imports for the greater good
|
Sort imports for the greater good
|
Python
|
mit
|
xavfernandez/pip,sbidoul/pip,pypa/pip,rouge8/pip,rouge8/pip,pfmoore/pip,xavfernandez/pip,sbidoul/pip,pypa/pip,pradyunsg/pip,xavfernandez/pip,pradyunsg/pip,pfmoore/pip,rouge8/pip
|
ad73789f74106a2d6014a2f737578494d2d21fbf
|
virtool/api/processes.py
|
virtool/api/processes.py
|
import virtool.http.routes
import virtool.utils
from virtool.api.utils import json_response
routes = virtool.http.routes.Routes()
@routes.get("/api/processes")
async def find(req):
db = req.app["db"]
documents = [virtool.utils.base_processor(d) async for d in db.processes.find()]
return json_response(documents)
@routes.get("/api/processes/{process_id}")
async def get(req):
db = req.app["db"]
process_id = req.match_info["process_id"]
document = await db.processes.find_one(process_id)
return json_response(virtool.utils.base_processor(document))
@routes.get("/api/processes/software_update")
async def get_software_update(req):
db = req.app["db"]
document = await db.processes.find_one({"type": "software_update"})
return json_response(virtool.utils.base_processor(document))
@routes.get("/api/processes/hmm_install")
async def get_hmm_install(req):
db = req.app["db"]
document = await db.processes.find_one({"type": "hmm_install"})
return json_response(virtool.utils.base_processor(document))
|
import virtool.http.routes
import virtool.utils
from virtool.api.utils import json_response
routes = virtool.http.routes.Routes()
@routes.get("/api/processes")
async def find(req):
db = req.app["db"]
documents = [virtool.utils.base_processor(d) async for d in db.processes.find()]
return json_response(documents)
@routes.get("/api/processes/{process_id}")
async def get(req):
db = req.app["db"]
process_id = req.match_info["process_id"]
document = await db.processes.find_one(process_id)
return json_response(virtool.utils.base_processor(document))
|
Remove specific process API GET endpoints
|
Remove specific process API GET endpoints
|
Python
|
mit
|
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
|
77ad68b04b66feb47116999cf79892f6630d9601
|
thefuck/rules/ln_no_hard_link.py
|
thefuck/rules/ln_no_hard_link.py
|
"""Suggest creating symbolic link if hard link is not allowed.
Example:
> ln barDir barLink
ln: ‘barDir’: hard link not allowed for directory
--> ln -s barDir barLink
"""
import re
from thefuck.specific.sudo import sudo_support
@sudo_support
def match(command):
return (command.stderr.endswith("hard link not allowed for directory") and
command.script.startswith("ln "))
@sudo_support
def get_new_command(command):
return re.sub(r'^ln ', 'ln -s ', command.script)
|
# -*- coding: utf-8 -*-
"""Suggest creating symbolic link if hard link is not allowed.
Example:
> ln barDir barLink
ln: ‘barDir’: hard link not allowed for directory
--> ln -s barDir barLink
"""
import re
from thefuck.specific.sudo import sudo_support
@sudo_support
def match(command):
return (command.stderr.endswith("hard link not allowed for directory") and
command.script.startswith("ln "))
@sudo_support
def get_new_command(command):
return re.sub(r'^ln ', 'ln -s ', command.script)
|
Fix encoding error in source file example
|
Fix encoding error in source file example
|
Python
|
mit
|
lawrencebenson/thefuck,mlk/thefuck,nvbn/thefuck,PLNech/thefuck,scorphus/thefuck,nvbn/thefuck,SimenB/thefuck,Clpsplug/thefuck,SimenB/thefuck,mlk/thefuck,scorphus/thefuck,lawrencebenson/thefuck,Clpsplug/thefuck,PLNech/thefuck
|
d7f3ea41bc3d252d786a339fc34337f01e1cc3eb
|
django_dbq/migrations/0001_initial.py
|
django_dbq/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import uuid
try:
from django.db.models import UUIDField
except ImportError:
from django_dbq.fields import UUIDField
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)),
('created', models.DateTimeField(db_index=True, auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=100)),
('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])),
('next_task', models.CharField(max_length=100, blank=True)),
('workspace', jsonfield.fields.JSONField(null=True)),
('queue_name', models.CharField(db_index=True, max_length=20, default='default')),
],
options={
'ordering': ['-created'],
},
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import uuid
from django.db.models import UUIDField
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)),
('created', models.DateTimeField(db_index=True, auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=100)),
('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])),
('next_task', models.CharField(max_length=100, blank=True)),
('workspace', jsonfield.fields.JSONField(null=True)),
('queue_name', models.CharField(db_index=True, max_length=20, default='default')),
],
options={
'ordering': ['-created'],
},
),
]
|
Remove reference to old UUIDfield in django migration
|
Remove reference to old UUIDfield in django migration
|
Python
|
bsd-2-clause
|
dabapps/django-db-queue
|
a5130e32bffa1dbc4d83f349fc3653b690154d71
|
vumi/workers/vas2nets/workers.py
|
vumi/workers/vas2nets/workers.py
|
# -*- test-case-name: vumi.workers.vas2nets.test_vas2nets -*-
# -*- encoding: utf-8 -*-
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, Deferred
from vumi.message import Message
from vumi.service import Worker
class EchoWorker(Worker):
@inlineCallbacks
def startWorker(self):
"""called by the Worker class when the AMQP connections been established"""
self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config)
self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config,
self.handle_inbound_message)
def handle_inbound_message(self, message):
log.msg("Received: %s" % (message.payload,))
"""Reply to the message with the same content"""
data = message.payload
reply = {
'to_msisdn': data['from_msisdn'],
'from_msisdn': data['to_msisdn'],
'message': data['message'],
'id': data['transport_message_id'],
'transport_network_id': data['transport_network_id'],
}
return self.publisher.publish_message(Message(**reply))
def stopWorker(self):
"""shutdown"""
pass
|
# -*- test-case-name: vumi.workers.vas2nets.test_vas2nets -*-
# -*- encoding: utf-8 -*-
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, Deferred
from vumi.message import Message
from vumi.service import Worker
class EchoWorker(Worker):
@inlineCallbacks
def startWorker(self):
"""called by the Worker class when the AMQP connections been established"""
self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config)
self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config,
self.handle_inbound_message)
def handle_inbound_message(self, message):
log.msg("Received: %s" % (message.payload,))
"""Reply to the message with the same content"""
data = message.payload
reply = {
'to_msisdn': data['from_msisdn'],
'from_msisdn': data['to_msisdn'],
'message': data['message'],
'id': data['transport_message_id'],
'transport_network_id': data['transport_network_id'],
'transport_keyword': data['transport_keyword'],
}
return self.publisher.publish_message(Message(**reply))
def stopWorker(self):
"""shutdown"""
pass
|
Add keyword to echo worker.
|
Add keyword to echo worker.
|
Python
|
bsd-3-clause
|
TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi
|
bbf22dc68202d81a8c7e94fbb8e61d819d808115
|
wisely_project/pledges/models.py
|
wisely_project/pledges/models.py
|
from django.utils import timezone
from django.db import models
from users.models import Course, BaseModel, User
class Pledge(BaseModel):
user = models.ForeignKey(User)
course = models.ForeignKey(Course)
money = models.DecimalField(max_digits=8, decimal_places=2)
pledge_date = models.DateTimeField('date pledged', default=timezone.now())
complete_date = models.DateTimeField('date completed', null=True)
is_active = models.BooleanField(default=True)
is_complete = models.BooleanField(default=False)
class Follower(BaseModel):
pledge = models.ForeignKey(Pledge)
email = models.EmailField(default='', blank=True)
|
from django.utils import timezone
from django.db import models
from users.models import Course, BaseModel, UserProfile
class Pledge(BaseModel):
user = models.ForeignKey(UserProfile)
course = models.ForeignKey(Course)
money = models.DecimalField(max_digits=8, decimal_places=2)
pledge_date = models.DateTimeField('date pledged', default=timezone.now())
complete_date = models.DateTimeField('date completed', null=True)
is_active = models.BooleanField(default=True)
is_complete = models.BooleanField(default=False)
class Follower(BaseModel):
pledge = models.ForeignKey(Pledge)
email = models.EmailField(default='', blank=True)
|
Make pledge foreignkey to userprofile
|
Make pledge foreignkey to userprofile
|
Python
|
mit
|
TejasM/wisely,TejasM/wisely,TejasM/wisely
|
8eca7b30865e4d02fd440f55ad3215dee6fab8a1
|
gee_asset_manager/batch_remover.py
|
gee_asset_manager/batch_remover.py
|
import fnmatch
import logging
import sys
import ee
def delete(asset_path):
root = asset_path[:asset_path.rfind('/')]
all_assets_names = [e['id'] for e in ee.data.getList({'id': root})]
filtered_names = fnmatch.filter(all_assets_names, asset_path)
if not filtered_names:
logging.warning('Nothing to remove. Exiting.')
sys.exit(1)
else:
for path in filtered_names:
__delete_recursive(path)
logging.info('Collection %s removed', path)
def __delete_recursive(asset_path):
info = ee.data.getInfo(asset_path)
if not info:
logging.warning('Nothing to delete.')
sys.exit(1)
elif info['type'] == 'Image':
pass
elif info['type'] == 'Folder':
items_in_destination = ee.data.getList({'id': asset_path})
for item in items_in_destination:
logging.info('Removing items in %s folder', item['id'])
delete(item['id'])
else:
items_in_destination = ee.data.getList({'id': asset_path})
for item in items_in_destination:
ee.data.deleteAsset(item['id'])
ee.data.deleteAsset(asset_path)
|
import fnmatch
import logging
import sys
import ee
def delete(asset_path):
root_idx = asset_path.rfind('/')
if root_idx == -1:
logging.warning('Asset not found. Make sure you pass full asset name, e.g. users/pinkiepie/rainbow')
sys.exit(1)
root = asset_path[:root_idx]
all_assets_names = [e['id'] for e in ee.data.getList({'id': root})]
filtered_names = fnmatch.filter(all_assets_names, asset_path)
if not filtered_names:
logging.warning('Nothing to remove. Exiting.')
sys.exit(1)
else:
for path in filtered_names:
__delete_recursive(path)
logging.info('Collection %s removed', path)
def __delete_recursive(asset_path):
info = ee.data.getInfo(asset_path)
if not info:
logging.warning('Nothing to delete.')
sys.exit(1)
elif info['type'] == 'Image':
pass
elif info['type'] == 'Folder':
items_in_destination = ee.data.getList({'id': asset_path})
for item in items_in_destination:
logging.info('Removing items in %s folder', item['id'])
delete(item['id'])
else:
items_in_destination = ee.data.getList({'id': asset_path})
for item in items_in_destination:
ee.data.deleteAsset(item['id'])
ee.data.deleteAsset(asset_path)
|
Add warning when removing an asset without full path
|
Add warning when removing an asset without full path
|
Python
|
apache-2.0
|
tracek/gee_asset_manager
|
18a874f312a57b4b9b7a5ce5cf9857585f0f0fef
|
truffe2/app/utils.py
|
truffe2/app/utils.py
|
def add_current_unit(request):
"""Template context processor to add current unit"""
return {'CURRENT_UNIT': get_current_unit(request)}
def get_current_unit(request):
"""Return the current unit"""
from units.models import Unit
current_unit_pk = request.session.get('current_unit_pk', 1)
try:
current_unit = Unit.objects.get(pk=current_unit_pk)
except Unit.DoesNotExist:
current_unit = Unit.objects.get(pk=1)
return current_unit
def update_current_unit(request, unit_pk):
"""Update the current unit"""
request.session['current_unit_pk'] = unit_pk
|
from django.conf import settings
def add_current_unit(request):
"""Template context processor to add current unit"""
return {'CURRENT_UNIT': get_current_unit(request)}
def get_current_unit(request):
"""Return the current unit"""
from units.models import Unit
current_unit_pk = request.session.get('current_unit_pk', 1)
try:
current_unit = Unit.objects.get(pk=current_unit_pk)
except Unit.DoesNotExist:
try:
current_unit = Unit.objects.get(pk=settings.ROOT_UNIT_PK)
except:
current_unit = None
return current_unit
def update_current_unit(request, unit_pk):
"""Update the current unit"""
request.session['current_unit_pk'] = unit_pk
|
Fix error if no units
|
Fix error if no units
|
Python
|
bsd-2-clause
|
agepoly/truffe2,ArcaniteSolutions/truffe2,ArcaniteSolutions/truffe2,agepoly/truffe2,agepoly/truffe2,ArcaniteSolutions/truffe2,agepoly/truffe2,ArcaniteSolutions/truffe2
|
0a1358f27db3abb04032fac1b8a3da09d846d23e
|
oauth_provider/utils.py
|
oauth_provider/utils.py
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=request.META,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
# Django converts Authorization header in HTTP_AUTHORIZATION
# Warning: it doesn't happen in tests but it's useful, do not remove!
auth_header = {}
if 'Authorization' in request.META:
auth_header = {'Authorization': request.META['Authorization']}
elif 'HTTP_AUTHORIZATION' in request.META:
auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']}
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=auth_header,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
|
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
|
Python
|
bsd-3-clause
|
e-loue/django-oauth-plus
|
1fa0eb2c792b3cc89d27b322c80548f022b7fbb9
|
api/base/exceptions.py
|
api/base/exceptions.py
|
from rest_framework.exceptions import APIException
from rest_framework import status
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
Modify exception handler to cover multiple data types i.e. dict and list and handle when more than one error returned
|
Modify exception handler to cover multiple data types i.e. dict and list and handle when more than one error returned
|
Python
|
apache-2.0
|
monikagrabowska/osf.io,hmoco/osf.io,asanfilippo7/osf.io,njantrania/osf.io,sloria/osf.io,MerlinZhang/osf.io,acshi/osf.io,mluke93/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,haoyuchen1992/osf.io,ckc6cz/osf.io,GageGaskins/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,chennan47/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,haoyuchen1992/osf.io,mluo613/osf.io,acshi/osf.io,samanehsan/osf.io,baylee-d/osf.io,cosenal/osf.io,samchrisinger/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,arpitar/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,KAsante95/osf.io,sbt9uc/osf.io,TomHeatwole/osf.io,laurenrevere/osf.io,saradbowman/osf.io,abought/osf.io,ZobairAlijan/osf.io,kwierman/osf.io,rdhyee/osf.io,mattclark/osf.io,adlius/osf.io,alexschiller/osf.io,arpitar/osf.io,kch8qx/osf.io,caseyrollins/osf.io,petermalcolm/osf.io,cslzchen/osf.io,pattisdr/osf.io,aaxelb/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,petermalcolm/osf.io,danielneis/osf.io,arpitar/osf.io,TomHeatwole/osf.io,saradbowman/osf.io,abought/osf.io,icereval/osf.io,MerlinZhang/osf.io,sloria/osf.io,cosenal/osf.io,sbt9uc/osf.io,aaxelb/osf.io,mfraezz/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,Nesiehr/osf.io,erinspace/osf.io,GageGaskins/osf.io,caneruguz/osf.io,RomanZWang/osf.io,njantrania/osf.io,felliott/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,felliott/osf.io,laurenrevere/osf.io,erinspace/osf.io,crcresearch/osf.io,crcresearch/osf.io,wearpants/osf.io,billyhunt/osf.io,monikagrabowska/osf.io,emetsger/osf.io,billyhunt/osf.io,kch8qx/osf.io,Ghalko/osf.io,adlius/osf.io,aaxelb/osf.io,zachjanicki/osf.io,zamattiac/osf.io,caneruguz/osf.io,SSJohns/osf.io,caseyrollins/osf.io,mluo613/osf.io,samchrisinger/osf.io,cosenal/osf.io,abought/osf.io,kwierman/osf.io,rdhyee/osf.io,danielneis/osf.io,SSJohns/osf.io,wearpants/osf.io,sbt9uc/osf.io,haoyuchen1992/osf.io,emetsger/osf.io,Nesiehr/osf.io,pattisdr/osf.io,chrisseto/osf.io,samanehsan/osf.io,KAsante95/osf.io,binoculars/osf.io,brandonPurvis/osf.io,mluo613/osf.io,binoculars/osf.io,wearpants/osf.io,ckc6cz/osf.io,jnayak1/osf.io,RomanZWang/osf.io,caseyrygt/osf.io,chrisseto/osf.io,adlius/osf.io,TomHeatwole/osf.io,emetsger/osf.io,Johnetordoff/osf.io,ZobairAlijan/osf.io,HalcyonChimera/osf.io,samchrisinger/osf.io,samanehsan/osf.io,mluke93/osf.io,chennan47/osf.io,asanfilippo7/osf.io,asanfilippo7/osf.io,ticklemepierce/osf.io,acshi/osf.io,mluo613/osf.io,amyshi188/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,sbt9uc/osf.io,brianjgeiger/osf.io,jmcarp/osf.io,amyshi188/osf.io,cwisecarver/osf.io,binoculars/osf.io,samchrisinger/osf.io,jnayak1/osf.io,billyhunt/osf.io,alexschiller/osf.io,acshi/osf.io,petermalcolm/osf.io,cslzchen/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,ckc6cz/osf.io,kch8qx/osf.io,cwisecarver/osf.io,jnayak1/osf.io,abought/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,alexschiller/osf.io,emetsger/osf.io,doublebits/osf.io,ticklemepierce/osf.io,mattclark/osf.io,danielneis/osf.io,ckc6cz/osf.io,mfraezz/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,chennan47/osf.io,doublebits/osf.io,kwierman/osf.io,RomanZWang/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,jmcarp/osf.io,kch8qx/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,zamattiac/osf.io,leb2dg/osf.io,sloria/osf.io,amyshi188/osf.io,jmcarp/osf.io,RomanZWang/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,hmoco/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,erinspace/osf.io,mluke93/osf.io,hmoco/osf.io,GageGaskins/osf.io,kwierman/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,alexschiller/osf.io,MerlinZhang/osf.io,arpitar/osf.io,Ghalko/osf.io,Nesiehr/osf.io,mfraezz/osf.io,mluo613/osf.io,leb2dg/osf.io,zachjanicki/osf.io,mattclark/osf.io,GageGaskins/osf.io,KAsante95/osf.io,mfraezz/osf.io,icereval/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,wearpants/osf.io,felliott/osf.io,danielneis/osf.io,brandonPurvis/osf.io,njantrania/osf.io,KAsante95/osf.io,alexschiller/osf.io,SSJohns/osf.io,zachjanicki/osf.io,SSJohns/osf.io,baylee-d/osf.io,doublebits/osf.io,DanielSBrown/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,jmcarp/osf.io,rdhyee/osf.io,kch8qx/osf.io,billyhunt/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,jnayak1/osf.io,baylee-d/osf.io,felliott/osf.io,samanehsan/osf.io,amyshi188/osf.io,monikagrabowska/osf.io,cosenal/osf.io,ticklemepierce/osf.io,TomBaxter/osf.io,ZobairAlijan/osf.io,zamattiac/osf.io,laurenrevere/osf.io,mluke93/osf.io,chrisseto/osf.io,Ghalko/osf.io,acshi/osf.io,monikagrabowska/osf.io,njantrania/osf.io,zamattiac/osf.io,caneruguz/osf.io,billyhunt/osf.io,Ghalko/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,icereval/osf.io
|
4c1bf1757baa5beec50377724961c528f5985864
|
ptest/screencapturer.py
|
ptest/screencapturer.py
|
import threading
import traceback
import plogger
__author__ = 'karl.gong'
def take_screen_shot():
current_thread = threading.currentThread()
active_browser = current_thread.get_property("browser")
if active_browser is not None:
while True:
try:
active_browser.switch_to.alert.dismiss()
except Exception:
break
try:
screen_shot = active_browser.get_screenshot_as_png()
except Exception as e:
plogger.warn("Failed to take the screenshot: \n%s\n%s" % (e.message, traceback.format_exc()))
return
current_thread.get_property("running_test_case_fixture").screen_shot = screen_shot
else:
pass # todo: take screen shot for desktop
|
import threading
import traceback
import StringIO
import plogger
try:
from PIL import ImageGrab
except ImportError:
PIL_installed = False
else:
PIL_installed = True
try:
import wx
except ImportError:
wxpython_installed = False
else:
wxpython_installed = True
__author__ = 'karl.gong'
def take_screen_shot():
current_thread = threading.currentThread()
active_browser = current_thread.get_property("browser")
if active_browser is not None:
while True:
try:
active_browser.switch_to.alert.dismiss()
except Exception:
break
def capture_screen():
return active_browser.get_screenshot_as_png()
elif PIL_installed:
def capture_screen():
output = StringIO.StringIO()
ImageGrab.grab().save(output, format="png")
return output.getvalue()
elif wxpython_installed:
def capture_screen():
app = wx.App(False)
screen = wx.ScreenDC()
width, height = screen.GetSize()
bmp = wx.EmptyBitmap(width, height)
mem = wx.MemoryDC(bmp)
mem.Blit(0, 0, width, height, screen, 0, 0)
output = StringIO.StringIO()
bmp.ConvertToImage().SaveStream(output, wx.BITMAP_TYPE_PNG)
return output.getvalue()
else:
return
try:
current_thread.get_property("running_test_case_fixture").screen_shot = capture_screen()
except Exception as e:
plogger.warn("Failed to take the screenshot: \n%screen\n%screen" % (e.message, traceback.format_exc()))
|
Support capture screenshot for no-selenium test
|
Support capture screenshot for no-selenium test
|
Python
|
apache-2.0
|
KarlGong/ptest,KarlGong/ptest
|
c82f0f10ea8b96377ebed8a6859ff3cd8ed4cd3f
|
python/turbodbc/exceptions.py
|
python/turbodbc/exceptions.py
|
from __future__ import absolute_import
from functools import wraps
from exceptions import StandardError
from turbodbc_intern import Error as InternError
class Error(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
def translate_exceptions(f):
@wraps(f)
def wrapper(*args, **kwds):
try:
return f(*args, **kwds)
except InternError as e:
raise DatabaseError(str(e))
return wrapper
|
from __future__ import absolute_import
from functools import wraps
from turbodbc_intern import Error as InternError
# Python 2/3 compatibility
try:
from exceptions import StandardError as _BaseError
except ImportError:
_BaseError = Exception
class Error(_BaseError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
def translate_exceptions(f):
@wraps(f)
def wrapper(*args, **kwds):
try:
return f(*args, **kwds)
except InternError as e:
raise DatabaseError(str(e))
return wrapper
|
Fix Python 2/3 exception base class compatibility
|
Fix Python 2/3 exception base class compatibility
|
Python
|
mit
|
blue-yonder/turbodbc,blue-yonder/turbodbc,blue-yonder/turbodbc,blue-yonder/turbodbc
|
80f1ee23f85aee9a54e0c6cae7a30dddbe96541b
|
scorecard/tests/test_views.py
|
scorecard/tests/test_views.py
|
import json
from infrastructure.models import FinancialYear
from django.test import (
TransactionTestCase,
Client,
override_settings,
)
from . import (
import_data,
)
from .resources import (
GeographyResource,
MunicipalityProfileResource,
MedianGroupResource,
RatingCountGroupResource,
)
@override_settings(
SITE_ID=2,
STATICFILES_STORAGE="django.contrib.staticfiles.storage.StaticFilesStorage",
)
class GeographyDetailViewTestCase(TransactionTestCase):
serialized_rollback = True
def test_context(self):
# Import sample data
import_data(
GeographyResource,
"views/scorecard_geography.csv",
)
import_data(
MunicipalityProfileResource,
"views/municipality_profile.csv",
)
import_data(
MedianGroupResource,
"views/median_group.csv",
)
import_data(
RatingCountGroupResource,
"views/rating_count_group.csv",
)
fy = FinancialYear.objects.create(budget_year="2019/2020")
# Make request
client = Client()
response = client.get("/profiles/municipality-CPT-city-of-cape-town/")
context = response.context
page_data = json.loads(context["page_data_json"])
# Test for amount types
self.assertIsInstance(page_data["amount_types_v1"], dict)
# Test for cube names
self.assertIsInstance(page_data["cube_names"], dict)
# Test for municipality category descriptions
self.assertIsInstance(page_data["municipal_category_descriptions"], dict)
|
import json
from django.test import (
TransactionTestCase,
Client,
override_settings,
)
@override_settings(
SITE_ID=2,
STATICFILES_STORAGE="django.contrib.staticfiles.storage.StaticFilesStorage",
)
class GeographyDetailViewTestCase(TransactionTestCase):
serialized_rollback = True
fixtures = ["seeddata", "demo-data", "compiled_profile"]
def test_context(self):
# Make request
client = Client()
response = client.get("/profiles/municipality-BUF-buffalo-city/")
context = response.context
page_data = json.loads(context["page_data_json"])
# Test for amount types
self.assertIsInstance(page_data["amount_types_v1"], dict)
# Test for cube names
self.assertIsInstance(page_data["cube_names"], dict)
# Test for municipality category descriptions
self.assertIsInstance(page_data["municipal_category_descriptions"], dict)
|
Use new fixtures for geography views test
|
Use new fixtures for geography views test
|
Python
|
mit
|
Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data
|
4666849791cad70ae1bb907a2dcc35ccfc0b7de4
|
backend/populate_dimkarakostas.py
|
backend/populate_dimkarakostas.py
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip,
# method='serial'
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
from string import ascii_lowercase
import django
import os
import string
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength,
alignmentalphabet=string.ascii_uppercase
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip,
# method='serial'
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
Update dimkarakostas population with alignmentalphabet
|
Update dimkarakostas population with alignmentalphabet
|
Python
|
mit
|
esarafianou/rupture,dionyziz/rupture,esarafianou/rupture,dionyziz/rupture,dimriou/rupture,dionyziz/rupture,dimriou/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dimriou/rupture,dimriou/rupture
|
8abdce9c60c9d2ead839e0065d35128ec16a82a1
|
chatterbot/__main__.py
|
chatterbot/__main__.py
|
import sys
if __name__ == '__main__':
import chatterbot
if '--version' in sys.argv:
print(chatterbot.__version__)
if 'list_nltk_data' in sys.argv:
import nltk.data
print('\n'.join(nltk.data.path))
|
import sys
if __name__ == '__main__':
import chatterbot
if '--version' in sys.argv:
print(chatterbot.__version__)
if 'list_nltk_data' in sys.argv:
import os
import nltk.data
data_directories = []
# Find each data directory in the NLTK path that has content
for path in nltk.data.path:
if os.path.exists(path):
if os.listdir(path):
data_directories.append(path)
print(os.linesep.join(data_directories))
|
Add commad line utility to find NLTK data
|
Add commad line utility to find NLTK data
|
Python
|
bsd-3-clause
|
gunthercox/ChatterBot,vkosuri/ChatterBot
|
210c7b7fb421a7c083b9d292370b15c0ece17fa7
|
source/bark/__init__.py
|
source/bark/__init__.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .handler.distribute import Distribute
#: Top level handler responsible for relaying all logs to other handlers.
handle = Distribute()
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .handler.distribute import Distribute
#: Top level handler responsible for relaying all logs to other handlers.
handler = Distribute()
handlers = handler.handlers
handle = handler.handle
|
Correct handler reference variable name and add convenient accessors.
|
Correct handler reference variable name and add convenient accessors.
|
Python
|
apache-2.0
|
4degrees/mill,4degrees/sawmill
|
696a79069ad1db1caee4d6da0c3c48dbd79f9157
|
sqliteschema/_logger.py
|
sqliteschema/_logger.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import logbook
import pytablewriter
import simplesqlite
logger = logbook.Logger("sqliteschema")
logger.disable()
def set_logger(is_enable):
pytablewriter.set_logger(is_enable=is_enable)
simplesqlite.set_logger(is_enable=is_enable)
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. Using
`logbook <http://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of
`logbook <http://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if ``log_level`` is ``logbook.NOTSET``.
"""
pytablewriter.set_log_level(log_level)
simplesqlite.set_log_level(log_level)
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import logbook
import pytablewriter
import simplesqlite
logger = logbook.Logger("sqliteschema")
logger.disable()
def set_logger(is_enable):
if is_enable != logger.disabled:
return
if is_enable:
logger.enable()
else:
logger.disable()
pytablewriter.set_logger(is_enable=is_enable)
simplesqlite.set_logger(is_enable=is_enable)
def set_log_level(log_level):
"""
Set logging level of this module. Using
`logbook <http://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of
`logbook <http://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if ``log_level`` is ``logbook.NOTSET``.
"""
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
pytablewriter.set_log_level(log_level)
simplesqlite.set_log_level(log_level)
|
Modify to avoid excessive logger initialization
|
Modify to avoid excessive logger initialization
|
Python
|
mit
|
thombashi/sqliteschema
|
1c78dfa0e0d1905910476b4052e42de287a70b74
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import os
import sys
import string
def main():
"""
Executes the tests. Requires the CherryPy live server to be installed.
"""
command = "python manage.py test"
options = "--exe --with-selenium --with-selenium-fixtures --with-cherrypyliveserver"
apps = []
if len(sys.argv) > 1:
apps = sys.argv[1:]
os.system(command + " " + string.join(apps, " ") + " " + options)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
import os
import sys
import string
def main():
"""
Executes the tests. Requires the CherryPy live server to be installed.
"""
command = "python manage.py test"
options = "--exe --with-selenium --with-selenium-fixtures --with-cherrypyliveserver --noinput"
apps = []
if len(sys.argv) > 1:
apps = sys.argv[1:]
os.system(command + " " + string.join(apps, " ") + " " + options)
if __name__ == "__main__":
main()
|
Update to the run tests script to force database deletion if the test database exists.
|
Update to the run tests script to force database deletion if the test database exists.
|
Python
|
mit
|
jtakayama/makahiki-draft,jtakayama/ics691-setupbooster,csdl/makahiki,yongwen/makahiki,yongwen/makahiki,jtakayama/makahiki-draft,yongwen/makahiki,justinslee/Wai-Not-Makahiki,csdl/makahiki,jtakayama/makahiki-draft,yongwen/makahiki,csdl/makahiki,csdl/makahiki,jtakayama/makahiki-draft,jtakayama/ics691-setupbooster,jtakayama/ics691-setupbooster
|
20124d599c6305889315847c15329c02efdd2b8c
|
migrations/versions/0313_email_access_validated_at.py
|
migrations/versions/0313_email_access_validated_at.py
|
"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# if user has email_auth, set email_access_validated_at on last login, else set it at user created_at date.
op.execute("""
UPDATE
users
SET
email_access_validated_at = created_at
WHERE
auth_type = 'sms_auth'
""")
op.execute("""
UPDATE
users
SET
email_access_validated_at = logged_in_at
WHERE
auth_type = 'email_auth'
""")
op.alter_column('users', 'email_access_validated_at', nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
|
"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# if user has email_auth, set email_access_validated_at on last login, else set it at user created_at date.
op.execute("""
UPDATE
users
SET
email_access_validated_at = created_at
""")
op.execute("""
UPDATE
users
SET
email_access_validated_at = logged_in_at
WHERE
auth_type = 'email_auth'
AND
email_access_validated_at IS NOT NULL
""")
op.alter_column('users', 'email_access_validated_at', nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
|
Make sure email_access_validated_at is not null after being populated
|
Make sure email_access_validated_at is not null after being populated
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
78b2978c3e0e56c4c75a3a6b532e02c995ca69ed
|
openedx/core/djangoapps/user_api/permissions/views.py
|
openedx/core/djangoapps/user_api/permissions/views.py
|
"""
NOTE: this API is WIP and has not yet been approved. Do not use this API
without talking to Christina or Andy.
For more information, see:
https://openedx.atlassian.net/wiki/display/TNL/User+API
"""
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import permissions
from django.db import transaction
from django.utils.translation import ugettext as _
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from openedx.core.lib.api.parsers import MergePatchParser
from openedx.core.lib.api.permissions import IsUserInUrlOrStaff
from ..errors import UserNotFound, UserNotAuthorized
class PermissionsView(APIView):
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
parser_classes = (MergePatchParser,)
def get(self, request):
"""
GET /api/user/v1/
"""
try:
is_staff = request.user.is_staff
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(is_staff)
|
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from openedx.core.lib.api.parsers import MergePatchParser
from ..errors import UserNotFound, UserNotAuthorized
class PermissionsView(APIView):
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
parser_classes = (MergePatchParser,)
def get(self, request):
"""
GET /api/user/v1/
"""
try:
is_staff = request.user.is_staff
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(is_staff)
|
Remove unused import and redundant comment
|
Remove unused import and redundant comment
|
Python
|
agpl-3.0
|
mbareta/edx-platform-ft,mbareta/edx-platform-ft,mbareta/edx-platform-ft,mbareta/edx-platform-ft
|
cadee051a462de765bab59ac42d6b372fa49c033
|
examples/logfile.py
|
examples/logfile.py
|
"""
Output an Eliot message to a log file using the threaded log writer.
"""
from __future__ import unicode_literals, print_function
from twisted.internet.task import react
from eliot.logwriter import ThreadedFileWriter
from eliot import Message, Logger, addDestination
_logger = Logger()
def main(reactor):
print("Logging to example-eliot.log...")
logWriter = ThreadedFileWriter(open("example-eliot.log", "ab"), reactor)
addDestination(logWriter)
# Manually start the service. Normally we'd register ThreadedFileWriter
# with the usual Twisted Service/Application infrastructure.
logWriter.startService()
# Log a message:
Message.new(value="hello", another=1).write(_logger)
# Manually stop the service.
done = logWriter.stopService()
return done
if __name__ == '__main__':
react(main, [])
|
"""
Output an Eliot message to a log file using the threaded log writer.
"""
from __future__ import unicode_literals, print_function
from twisted.internet.task import react
from eliot.logwriter import ThreadedFileWriter
from eliot import Message, Logger
_logger = Logger()
def main(reactor):
print("Logging to example-eliot.log...")
logWriter = ThreadedFileWriter(open("example-eliot.log", "ab"), reactor)
# Manually start the service, which will add it as a
# destination. Normally we'd register ThreadedFileWriter with the usual
# Twisted Service/Application infrastructure.
logWriter.startService()
# Log a message:
Message.new(value="hello", another=1).write(_logger)
# Manually stop the service.
done = logWriter.stopService()
return done
if __name__ == '__main__':
react(main, [])
|
Fix bug where the service was added as a destination one time too many.
|
Fix bug where the service was added as a destination one time too many.
|
Python
|
apache-2.0
|
iffy/eliot,ClusterHQ/eliot,ScatterHQ/eliot,ScatterHQ/eliot,ScatterHQ/eliot
|
9f10dbdabe61ed841c0def319f021a4735f39217
|
src/sct/templates/__init__.py
|
src/sct/templates/__init__.py
|
# -*- coding: utf-8 -*-
'''
Copyright 2014 Universitatea de Vest din Timișoara
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author: Marian Neagul <marian@info.uvt.ro>
@contact: marian@info.uvt.ro
@copyright: 2014 Universitatea de Vest din Timișoara
'''
|
# -*- coding: utf-8 -*-
"""
Copyright 2014 Universitatea de Vest din Timișoara
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author: Marian Neagul <marian@info.uvt.ro>
@contact: marian@info.uvt.ro
@copyright: 2014 Universitatea de Vest din Timișoara
"""
from sct.templates.hadoop import HadoopServer, HadoopWorker
TEMPLATES = {
'hadoop-server': {
'max-node-count': 1,
'cloudinit': HadoopServer
},
'hadoop-worker': {
'max-node-count': None,
'cloudinit': HadoopWorker
}
}
def get_available_templates():
return TEMPLATES.keys()
def get_template(name):
if name not in TEMPLATES:
raise NameError("No such template %s" % name)
else:
return TEMPLATES.get(name)
|
Add provisional (needs to be replaced with pkg_resources entry point discovery) template registry
|
Add provisional (needs to be replaced with pkg_resources entry point discovery) template registry
|
Python
|
apache-2.0
|
mneagul/scape-cloud-toolkit,mneagul/scape-cloud-toolkit,mneagul/scape-cloud-toolkit
|
0534c1cdeb92503a90ef309dee6edddb45234bf7
|
comrade/users/urls.py
|
comrade/users/urls.py
|
from django.conf.urls.defaults import *
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
# LH #269 - ideally this wouldn't be hard coded
{'post_reset_redirect':'/accounts/password/forgot/done/'},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
# LH #269
{'post_reset_redirect':'/accounts/password/reset/done/'},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
|
from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('users:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('users:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
|
Resolve old Django 1.1 bug in URLs to keep it DRY.
|
Resolve old Django 1.1 bug in URLs to keep it DRY.
|
Python
|
mit
|
bueda/django-comrade
|
e9e4c622ff667e475986e1544ec78b0604b8a511
|
girder_worker/tasks.py
|
girder_worker/tasks.py
|
import core
from girder_worker.utils import JobStatus
from .app import app
def _cleanup(*args, **kwargs):
core.events.trigger('cleanup')
@app.task(name='girder_worker.run', bind=True, after_return=_cleanup)
def run(tasks, *pargs, **kwargs):
jobInfo = kwargs.pop('jobInfo', {})
retval = 0
kwargs['_job_manager'] = task.job_manager \
if hasattr(task, 'job_manager') else None
kwargs['status'] = JobStatus.RUNNING
return core.run(*pargs, **kwargs)
@app.task(name='girder_worker.convert')
def convert(*pargs, **kwargs):
return core.convert(*pargs, **kwargs)
@app.task(name='girder_worker.validators')
def validators(*pargs, **kwargs):
_type, _format = pargs
nodes = []
for (node, data) in core.format.conv_graph.nodes(data=True):
if ((_type is None) or (_type == node.type)) and \
((_format is None) or (_format == node.format)):
nodes.append({'type': node.type,
'format': node.format,
'validator': data})
return nodes
|
import core
from girder_worker.utils import JobStatus
from .app import app
def _cleanup(*args, **kwargs):
core.events.trigger('cleanup')
@app.task(name='girder_worker.run', bind=True, after_return=_cleanup)
def run(task, *pargs, **kwargs):
kwargs['_job_manager'] = task.job_manager \
if hasattr(task, 'job_manager') else None
kwargs['status'] = JobStatus.RUNNING
return core.run(*pargs, **kwargs)
@app.task(name='girder_worker.convert')
def convert(*pargs, **kwargs):
return core.convert(*pargs, **kwargs)
@app.task(name='girder_worker.validators')
def validators(*pargs, **kwargs):
_type, _format = pargs
nodes = []
for (node, data) in core.format.conv_graph.nodes(data=True):
if ((_type is None) or (_type == node.type)) and \
((_format is None) or (_format == node.format)):
nodes.append({'type': node.type,
'format': node.format,
'validator': data})
return nodes
|
Fix typo from bad conflict resolution during merge
|
Fix typo from bad conflict resolution during merge
|
Python
|
apache-2.0
|
girder/girder_worker,girder/girder_worker,girder/girder_worker
|
0a4922dba3367a747d7460b5c1b59c49c67f3026
|
hcalendar/hcalendar.py
|
hcalendar/hcalendar.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .vcalendar import vCalendar
from bs4 import BeautifulSoup
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
if isinstance(markup, BeautifulSoup):
self._soup = markup
else:
self._soup = BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = list(map(vCalendar, self._cals))
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .vcalendar import vCalendar
from bs4 import BeautifulSoup
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
if isinstance(markup, BeautifulSoup):
self._soup = markup
else:
self._soup = BeautifulSoup(markup, 'html.parser')
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = list(map(vCalendar, self._cals))
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
|
Add missing parser argument to BeautifulSoup instance
|
Add missing parser argument to BeautifulSoup instance
|
Python
|
mit
|
mback2k/python-hcalendar
|
9a32f922e6d5ec6e5bd22eccbe3dceaef7bbd7dc
|
tailor/tests/utils/charformat_test.py
|
tailor/tests/utils/charformat_test.py
|
import unittest
from tailor.utils import charformat
class MyTestCase(unittest.TestCase):
def is_upper_camel_case_test_upper_camel_case_name(self):
self.assertTrue(charformat.is_upper_camel_case('HelloWorld'))
def is_upper_camel_case_test_lower_camel_case_name(self):
self.assertFalse(charformat.is_upper_camel_case('helloWorld'))
def is_upper_camel_case_test_blank_name(self):
self.assertFalse(charformat.is_upper_camel_case(''))
def is_upper_camel_case_test_snake_case_name(self):
self.assertFalse(charformat.is_upper_camel_case('Hello_World'))
def is_upper_camel_case_test_numeric_name(self):
self.assertFalse(charformat.is_upper_camel_case('1ello_world'))
if __name__ == '__main__':
unittest.main()
|
import unittest
from tailor.utils import charformat
class MyTestCase(unittest.TestCase):
def is_upper_camel_case_test_upper_camel_case_name(self):
self.assertTrue(charformat.is_upper_camel_case('HelloWorld'))
def is_upper_camel_case_test_lower_camel_case_name(self):
self.assertFalse(charformat.is_upper_camel_case('helloWorld'))
def is_upper_camel_case_test_blank_name(self):
self.assertFalse(charformat.is_upper_camel_case(''))
def is_upper_camel_case_test_snake_case_name(self):
self.assertFalse(charformat.is_upper_camel_case('Hello_World'))
def is_upper_camel_case_test_numeric_name(self):
self.assertFalse(charformat.is_upper_camel_case('1ello_world'))
def is_upper_camel_case_test_special_character_name(self):
self.assertFalse(charformat.is_upper_camel_case('!ello_world'))
if __name__ == '__main__':
unittest.main()
|
Add special character name test case
|
Add special character name test case
|
Python
|
mit
|
sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor
|
fd48211548c8c2d5daec0994155ddb7e8d226882
|
tests/test_anki_sync.py
|
tests/test_anki_sync.py
|
import pytest
import os
import rememberberry
from rememberscript import RememberMachine, FileStorage
from rememberberry.testing import tmp_data_path, assert_replies, get_isolated_story
@pytest.mark.asyncio
@tmp_data_path('/tmp/data/', delete=True)
async def test_anki_account():
storage = FileStorage()
m, storage = get_isolated_story('login_anki', storage)
await assert_replies(m.reply(''), 'What is your Anki username?')
await assert_replies(m.reply('ajshdkajhsdkajshd'), 'And now the password')
await assert_replies(m.reply('jkdhskjhgdksjhg'),
'Authentication with ankiweb failed, try again?',
'What is your Anki username?')
await assert_replies(m.reply('ankitest8080@gmail.com'), 'And now the password')
await assert_replies(m.reply('ankitest'),
'Authentication worked, now I\'ll try to sync your account',
'Syncing anki database',
'Syncing media files (this may take a while)',
'Syncing done',
'Great, you\'re all synced up!',
'enter init')
|
import pytest
import os
import rememberberry
from rememberscript import RememberMachine, FileStorage
from rememberberry.testing import tmp_data_path, assert_replies, get_isolated_story
@pytest.mark.asyncio
@tmp_data_path('/tmp/data/', delete=True)
async def test_anki_account():
storage = FileStorage()
storage['username'] = 'alice'
m, storage = get_isolated_story('login_anki', storage)
await assert_replies(m.reply(''), 'What is your Anki username?')
await assert_replies(m.reply('ajshdkajhsdkajshd'), 'And now the password')
await assert_replies(m.reply('jkdhskjhgdksjhg'),
'Authentication with ankiweb failed, try again?',
'What is your Anki username?')
await assert_replies(m.reply('ankitest8080@gmail.com'), 'And now the password')
await assert_replies(m.reply('ankitest'),
'Authentication worked, now I\'ll try to sync your account',
'Syncing anki database',
'Syncing media files (this may take a while)',
'Syncing done',
'Great, you\'re all synced up!',
'enter init')
|
Fix missing username in test
|
Fix missing username in test
|
Python
|
agpl-3.0
|
rememberberry/rememberberry-server,rememberberry/rememberberry-server
|
2c38fea1434f8591957c2707359412151c4b6c43
|
tests/test_timezones.py
|
tests/test_timezones.py
|
import unittest
import datetime
from garage.timezones import TimeZone
class TimeZoneTest(unittest.TestCase):
def test_time_zone(self):
utc = datetime.datetime(2000, 1, 2, 3, 4, 0, 0, TimeZone.UTC)
cst = utc.astimezone(TimeZone.CST)
print('xxx', utc, cst)
self.assertEqual(2000, cst.year)
self.assertEqual(1, cst.month)
self.assertEqual(2, cst.day)
self.assertEqual(11, cst.hour)
self.assertEqual(4, cst.minute)
self.assertEqual(0, cst.second)
self.assertEqual(0, cst.microsecond)
if __name__ == '__main__':
unittest.main()
|
import unittest
import datetime
from garage.timezones import TimeZone
class TimeZoneTest(unittest.TestCase):
def test_time_zone(self):
utc = datetime.datetime(2000, 1, 2, 3, 4, 0, 0, TimeZone.UTC)
cst = utc.astimezone(TimeZone.CST)
self.assertEqual(2000, cst.year)
self.assertEqual(1, cst.month)
self.assertEqual(2, cst.day)
self.assertEqual(11, cst.hour)
self.assertEqual(4, cst.minute)
self.assertEqual(0, cst.second)
self.assertEqual(0, cst.microsecond)
if __name__ == '__main__':
unittest.main()
|
Remove print in unit test
|
Remove print in unit test
|
Python
|
mit
|
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
|
b1d3a0c79a52ca1987ea08a546213e1135539927
|
tools/bots/ddc_tests.py
|
tools/bots/ddc_tests.py
|
#!/usr/bin/env python
#
# Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import os.path
import shutil
import sys
import subprocess
import bot
import bot_utils
utils = bot_utils.GetUtils()
BUILD_OS = utils.GuessOS()
(bot_name, _) = bot.GetBotName()
CHANNEL = bot_utils.GetChannelFromName(bot_name)
if __name__ == '__main__':
with utils.ChangedWorkingDirectory('pkg/dev_compiler'):
dart_exe = utils.CheckedInSdkExecutable()
# These two calls mirror pkg/dev_compiler/tool/test.sh.
bot.RunProcess([dart_exe, 'tool/build_pkgs.dart', 'test'])
bot.RunProcess([dart_exe, 'test/all_tests.dart'])
# These mirror pkg/dev_compiler/tool/browser_test.sh.
bot.RunProcess(['npm', 'install'])
bot.RunProcess(['npm', 'test'])
|
#!/usr/bin/env python
#
# Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import os.path
import shutil
import sys
import subprocess
import bot
import bot_utils
utils = bot_utils.GetUtils()
BUILD_OS = utils.GuessOS()
(bot_name, _) = bot.GetBotName()
CHANNEL = bot_utils.GetChannelFromName(bot_name)
if __name__ == '__main__':
with utils.ChangedWorkingDirectory('pkg/dev_compiler'):
dart_exe = utils.CheckedInSdkExecutable()
# These two calls mirror pkg/dev_compiler/tool/test.sh.
bot.RunProcess([dart_exe, 'tool/build_pkgs.dart', 'test'])
bot.RunProcess([dart_exe, 'test/all_tests.dart'])
# These mirror pkg/dev_compiler/tool/browser_test.sh.
bot.RunProcess(['npm', 'install'])
bot.RunProcess(['npm', 'test'], {'CHROME_BIN': 'chrome'})
|
Set CHROME_BIN on DDC bot
|
Set CHROME_BIN on DDC bot
Noticed the Linux bot is failing on this:
https://build.chromium.org/p/client.dart.fyi/builders/ddc-linux-release-be/builds/1724/steps/ddc%20tests/logs/stdio
R=whesse@google.com
Review-Url: https://codereview.chromium.org/2640093002 .
|
Python
|
bsd-3-clause
|
dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk
|
c143bc14be8d486d313056c0d1313e03ac438284
|
examples/ex_aps_parser.py
|
examples/ex_aps_parser.py
|
from __future__ import print_function
import os
import glob
import pyingest.parsers.aps as aps
import pyingest.parsers.arxiv as arxiv
import pyingest.serializers.classic
import traceback
import json
import xmltodict
from datetime import datetime
input_list = 'bibc.2.out'
testfile=[]
xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/'
xmltail = '/fulltext.xml'
with open(input_list,'rU') as fi:
for l in fi.readlines():
doi = l.strip().split('\t')[1]
(a,b) = doi.split('/')
b = b.replace('.','/')
infile = xmldir + a + '/' + b + xmltail
testfile.append(infile)
for f in testfile:
fnord = f[92:]
if os.path.isfile(f):
print("found! ",fnord)
with open(f, 'rU') as fp:
parser = aps.APSJATSParser()
document = parser.parse(fp)
serializer = pyingest.serializers.classic.Tagged()
outputfp = open('aps.tag', 'a')
serializer.write(document, outputfp)
outputfp.close()
#except:
# print "ERROR!\n%s\n"%f
# traceback.print_exc()
# pass
else:
print("not found :( ", fnord)
|
from __future__ import print_function
import os
import glob
import pyingest.parsers.aps as aps
import pyingest.parsers.arxiv as arxiv
import pyingest.serializers.classic
import traceback
import json
import xmltodict
from datetime import datetime
import sys
input_list = 'bibc.2.out'
testfile=[]
xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/'
xmltail = '/fulltext.xml'
if sys.version_info > (3,):
open_mode = 'r'
else:
open_mode = 'rU'
with open(input_list, open_mode) as fi:
for l in fi.readlines():
doi = l.strip().split('\t')[1]
(a,b) = doi.split('/')
b = b.replace('.', '/')
infile = xmldir + a + '/' + b + xmltail
testfile.append(infile)
for f in testfile:
fnord = f[92:]
if os.path.isfile(f):
print("found! ", fnord)
with open(f, open_mode) as fp:
parser = aps.APSJATSParser()
document = parser.parse(fp)
serializer = pyingest.serializers.classic.Tagged()
outputfp = open('aps.tag', 'a')
serializer.write(document, outputfp)
outputfp.close()
#except:
# print "ERROR!\n%s\n"%f
# traceback.print_exc()
# pass
else:
print("not found :( ", fnord)
|
Use open mode syntax on example file
|
Use open mode syntax on example file
|
Python
|
mit
|
adsabs/adsabs-pyingest,adsabs/adsabs-pyingest,adsabs/adsabs-pyingest
|
bfd166e9679e6fa06e694fd5e587fcf10186d79b
|
vx_intro.py
|
vx_intro.py
|
import vx
import math
import os
import sys
_tick_functions = []
def _register_tick_function(f, front=False):
if front:
_tick_functions.insert(0, f)
else:
_tick_functions.append(f)
def _tick():
for f in _tick_functions:
f()
vx.my_vx = _tick
vx.register_tick_function = _register_tick_function
vx.files = sys.argv[1:]
import utils
import scheduler
import keybindings
import windows
import prompt
def _default_start():
if len(vx.files) == 0:
win = vx.window(vx.rows, vx.cols, 0, 0)
win.blank()
win.focus()
else:
d = math.floor(vx.rows / (len(vx.files)))
y = 0
for f in vx.files:
win = vx.window(d, vx.cols, y, 0)
win.attach_file(f)
y += d
win.focus()
vx.default_start = _default_start
sys.path.append(os.path.expanduser('~/.python'))
import rc
|
import vx
import math
import os
import sys
_tick_functions = []
def _register_tick_function(f, front=False):
if front:
_tick_functions.insert(0, f)
else:
_tick_functions.append(f)
def _tick():
for f in _tick_functions:
f()
vx.my_vx = _tick
vx.register_tick_function = _register_tick_function
vx.files = sys.argv[1:]
import utils
import scheduler
import keybindings
import windows
import prompt
def _default_start():
if len(vx.files) == 0:
win = vx.window(vx.rows, vx.cols, 0, 0)
win.blank()
win.focus()
else:
d = math.floor(vx.rows / (len(vx.files)))
y = 0
for f in vx.files:
win = vx.window(d, vx.cols, y, 0)
win.attach_file(f)
y += d
win.focus()
vx.default_start = _default_start
sys.path.append(os.path.expanduser('~/.python'))
try:
import rc
except ImportError:
pass # just means there was no ~/.python/rc module
|
Fix a crash if there is no ~/.python/rc.py
|
Fix a crash if there is no ~/.python/rc.py
|
Python
|
mit
|
philipdexter/vx,philipdexter/vx
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.