commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
75f01ff3be060e033b24d141b0ca824cb7f81c22
|
tests/twisted/avahi/test-register.py
|
tests/twisted/avahi/test-register.py
|
from saluttest import exec_test
import avahitest
from avahitest import AvahiListener
import time
def test(q, bus, conn):
a = AvahiListener(q)
a.listen_for_service("_presence._tcp")
conn.Connect()
q.expect('service-added',
name='test-register@' + avahitest.get_host_name())
if __name__ == '__main__':
exec_test(test, { "published-name": "test-register" })
|
from saluttest import exec_test
import avahitest
from avahitest import AvahiListener
from avahitest import txt_get_key
from avahi import txt_array_to_string_array
import time
PUBLISHED_NAME="test-register"
FIRST_NAME="lastname"
LAST_NAME="lastname"
def test(q, bus, conn):
a = AvahiListener(q)
a.listen_for_service("_presence._tcp")
conn.Connect()
e = q.expect('service-added',
name=PUBLISHED_NAME + "@" + avahitest.get_host_name())
service = a.resolver_for_service(e)
e = q.expect('service-resolved', service = service)
for (key, val) in { "1st": FIRST_NAME,
"last": LAST_NAME,
"status": "avail",
"txtvers": "1" }.iteritems():
v = txt_get_key(e.txt, key)
assert v == val, (key, val, v)
if __name__ == '__main__':
exec_test(test, { "published-name": PUBLISHED_NAME,
"first-name": FIRST_NAME,
"last-name": LAST_NAME })
|
Test that the service is register with the correct txt record
|
Test that the service is register with the correct txt record
|
Python
|
lgpl-2.1
|
freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut
|
f898d1cc96fe66a097def29552f3774f3509be83
|
insultgenerator/words.py
|
insultgenerator/words.py
|
import pkg_resources
import random
_insulting_adjectives = []
def _load_wordlists():
global _insulting_adjectives
insulting_adjective_list = pkg_resources.resource_string(__name__, "wordlists/insulting_adjectives.txt")
_insulting_adjectives = insulting_adjective_list.decode().split('\n')
def get_insulting_adjective():
return _insulting_adjectives[0]
return random.choice(_insulting_adjectives)
_load_wordlists()
|
import pkg_resources
import random
_insulting_adjectives = []
def _load_wordlists():
global _insulting_adjectives
insulting_adjective_list = pkg_resources.resource_string(__name__, "wordlists/insulting_adjectives.txt")
_insulting_adjectives = insulting_adjective_list.decode().split('\n')
def get_insulting_adjective():
return random.choice(_insulting_adjectives)
_load_wordlists()
|
Revert "Adding test failure to ensure that CI is functioning correctly"
|
Revert "Adding test failure to ensure that CI is functioning correctly"
This reverts commit 754be81c1ccc385d8e7b418460271966d7db2361.
|
Python
|
mit
|
tr00st/insult_generator
|
6fbe58692005e5c8b7a9c4f4e98984ae86d347a2
|
pinax/messages/context_processors.py
|
pinax/messages/context_processors.py
|
from .models import Thread
def user_messages(request):
c = {}
if request.user.is_authenticated():
c["inbox_count"] = Thread.inbox(request.user).count()
return c
|
from .models import Thread
def user_messages(request):
c = {}
if request.user.is_authenticated():
c["inbox_threads"] = Thread.inbox(request.user)
c["unread_threads"] = Thread.unread(request.user)
return c
|
Return querysets in context processor to be more useful
|
Return querysets in context processor to be more useful
|
Python
|
mit
|
eldarion/user_messages,pinax/pinax-messages,pinax/pinax-messages,arthur-wsw/pinax-messages,eldarion/user_messages,arthur-wsw/pinax-messages
|
c63a1c2bc92267ac2b5ffc52c7189942d034c37b
|
src/dashboard/src/installer/views.py
|
src/dashboard/src/installer/views.py
|
# This file is part of Archivematica.
#
# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
from django.shortcuts import render
from django.http import HttpResponse
def welcome(request):
return render(request, 'installer/welcome.html', locals())
|
# This file is part of Archivematica.
#
# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
from django.contrib.auth.models import User
from django.shortcuts import render
from django.http import HttpResponse
def welcome(request):
return render(request, 'installer/welcome.html', locals())
def create_superuser(request):
pass
# username
# password
# email
# User.objects.create_superuser(username, email, password)
|
Define skeleton for the function that will create the superuser
|
Define skeleton for the function that will create the superuser
Autoconverted from SVN (revision:2929)
|
Python
|
agpl-3.0
|
artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history
|
c555c53290c8894c80dc7991081dd5d7591fda8c
|
helpers/run_feeds.py
|
helpers/run_feeds.py
|
from core.feed import Feed
import core.config.celeryimports
if __name__ == '__main__':
all_feeds = Feed.objects()
for n in all_feeds:
print "Testing: {}".format(n)
n.update()
|
import sys
from core.feed import Feed
import core.config.celeryimports
if __name__ == '__main__':
if len(sys.argv) == 1:
all_feeds = Feed.objects()
elif len(sys.argv) >= 2:
all_feeds = [Feed.objects.get(name=sys.argv[1])]
print all_feeds
for n in all_feeds:
print "Testing: {}".format(n)
n.update()
|
Add argument to run single feed
|
Add argument to run single feed
|
Python
|
apache-2.0
|
yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti
|
12254ea15b1f761ad63095ed7244f347d42e4c85
|
file_encryptor/__init__.py
|
file_encryptor/__init__.py
|
from file_encryptor import (convergence, key_generators)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014 Storj Labs
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from file_encryptor import (convergence, key_generators)
__version__ = '0.2.0'
|
Add copyright, license and version information.
|
Add copyright, license and version information.
|
Python
|
mit
|
Storj/file-encryptor
|
864dac9b2586891f62700e3170421617aca48a88
|
deployment/config.py
|
deployment/config.py
|
class Azure:
resource_group = "MajavaShakki"
location = "northeurope"
cosmosdb_name = f"{resource_group}mongo".lower()
plan_name = f"{resource_group}Plan"
site_name = f"{resource_group}Site"
class Mongo:
database_name = "Majavashakki"
collection_throughput = 500
collections = ["gamemodels", "sessions", "users"]
|
class Azure:
resource_group = "MajavaShakki"
location = "northeurope"
cosmosdb_name = f"{resource_group}mongo".lower()
plan_name = f"{resource_group}Plan"
site_name = f"{resource_group}Site"
class Mongo:
database_name = "Majavashakki"
collection_throughput = 500
system_indexes_collection = "undefined" # https://github.com/Automattic/mongoose/issues/6989
collections = ["gamemodels", "sessions", "users", system_indexes_collection]
|
Configure throughput for 'undefined' collection
|
Configure throughput for 'undefined' collection
https://github.com/Automattic/mongoose/issues/6989
https://jira.mongodb.org/browse/NODE-1662
|
Python
|
mit
|
Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki
|
3eb3cc047f2f5a358066eac8f806580089d70df2
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='dscsrf',
version='1.0',
description='Global double-submit Flask CSRF',
packages=['dscsrf'],
py_modules=['flask'],
)
|
from distutils.core import setup
setup(name='dscsrf',
version='1.0',
description='Global double-submit Flask CSRF',
packages=['dscsrf'],
py_modules=['flask'],
author='sc4reful',
url = 'https://github.com/sc4reful/dscsrf',
keywords = ['security', 'flask', 'website', 'csrf'],
download_url = 'https://github.com/sc4reful/dscsrf/tarball/1.0',
)
|
Prepare for tagging for PyPI
|
Prepare for tagging for PyPI
|
Python
|
mit
|
wkoathp/dscsrf
|
5a92773a1d9c40e745026ca318ae21bfce2d4fb6
|
flaskext/cache/backends.py
|
flaskext/cache/backends.py
|
from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache,
GAEMemcachedCache, FileSystemCache)
def null(app, args, kwargs):
return NullCache()
def simple(app, args, kwargs):
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return SimpleCache(*args, **kwargs)
def memcached(app, args, kwargs):
args.append(app.config['CACHE_MEMCACHED_SERVERS'])
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return MemcachedCache(*args, **kwargs)
def gaememcached(app, args, kwargs):
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return GAEMemcachedCache(*args, **kwargs)
def filesystem(app, args, kwargs):
args.append(app.config['CACHE_DIR'])
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return FileSystemCache(*args, **kwargs)
# RedisCache is supported since Werkzeug 0.7.
try:
from werkzeug.contrib.cache import RedisCache
except ImportError:
pass
else:
def redis(app, args, kwargs):
kwargs.update(dict(
host=app.config.get('CACHE_REDIS_HOST', 'localhost'),
port=app.config.get('CACHE_REDIS_PORT', 6379),
password=app.config.get('CACHE_REDIS_PASSWORD', None)
))
return RedisCache(*args, **kwargs)
|
from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache,
GAEMemcachedCache, FileSystemCache)
def null(app, args, kwargs):
return NullCache()
def simple(app, args, kwargs):
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return SimpleCache(*args, **kwargs)
def memcached(app, args, kwargs):
args.append(app.config['CACHE_MEMCACHED_SERVERS'])
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return MemcachedCache(*args, **kwargs)
def gaememcached(app, args, kwargs):
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return GAEMemcachedCache(*args, **kwargs)
def filesystem(app, args, kwargs):
args.append(app.config['CACHE_DIR'])
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return FileSystemCache(*args, **kwargs)
# RedisCache is supported since Werkzeug 0.7.
try:
from werkzeug.contrib.cache import RedisCache
except ImportError:
pass
else:
def redis(app, args, kwargs):
kwargs.update(dict(
host=app.config.get('CACHE_REDIS_HOST', 'localhost'),
port=app.config.get('CACHE_REDIS_PORT', 6379),
))
password = app.config.get('CACHE_REDIS_PASSWORD')
if password:
kwargs['password'] = password
return RedisCache(*args, **kwargs)
|
Make CACHE_REDIS_PASSWORD really optional, because it does not work with older Werkzeug.
|
Make CACHE_REDIS_PASSWORD really optional, because it does not work with older Werkzeug.
|
Python
|
bsd-3-clause
|
kazeeki/mezmorize,kazeeki/mezmorize,j-fuentes/flask-cache,ordbogen/flask-cache,j-fuentes/flask-cache,thadeusb/flask-cache,alexey-sveshnikov/flask-cache,ordbogen/flask-cache,alexey-sveshnikov/flask-cache,thadeusb/flask-cache,gerasim13/flask-cache,gerasim13/flask-cache
|
c9c0104456ef7d5dcda29db67788112a8435945b
|
scripts/createDataModel.py
|
scripts/createDataModel.py
|
# script :: creating a datamodel that fits mahout from ratings.dat
ratings_dat = open('../data/movielens-1m/ratings.dat', 'r')
ratings_csv = open('../data/movielens-1m/ratings_without_timestamp.txt', 'w')
for line in ratings_dat:
arr = line.split('::')
new_line = ','.join(arr[:3])+'\n';
ratings_csv.write(new_line)
ratings_dat.close()
ratings_csv.close()
|
#!/usr/bin/env python
# script :: creating a datamodel that fits mahout from ratings.dat
ratings_dat = open('../data/movielens-1m/users.dat', 'r')
ratings_csv = open('../data/movielens-1m/users.txt', 'w')
for line in ratings_dat:
arr = line.split('::')
new_line = '\t'.join(arr)
ratings_csv.write(new_line)
ratings_dat.close()
ratings_csv.close()
|
Convert data delimiter from :: to tab character.
|
Convert data delimiter from :: to tab character.
|
Python
|
mit
|
monsendag/goldfish,ntnu-smartmedia/goldfish,ntnu-smartmedia/goldfish,monsendag/goldfish,ntnu-smartmedia/goldfish,monsendag/goldfish
|
a71bd3f953b0363df82d1e44b0d6df6cbe4d449b
|
vocab.py
|
vocab.py
|
import fire
import json
import sys
from source import VocabularyCom
from airtable import Airtable
class CLI:
class source:
"""Import word lists from various sources"""
def vocabulary_com(self, list_url, pretty=False):
result = VocabularyCom().collect(list_url)
if pretty:
print json.dumps(result, indent=4, sort_keys=True)
else:
json.dump(result, sys.stdout)
class airtable:
"""Sync lists to Airtable"""
def load(self, list_url, endpoint, key):
airtable = Airtable(endpoint, key)
words = VocabularyCom().collect(list_url)
airtable.load(words)
print 'List loaded to Airtable.'
def load_from_stdin(self, endpoint, key):
words = json.load(sys.stdin)
airtable = Airtable(endpoint, key)
airtable.load(words)
print 'List loaded to Airtable.'
if __name__ == '__main__':
fire.Fire(CLI)
|
import fire
import json
import sys
from source import VocabularyCom
from airtable import Airtable
class CLI:
class source:
"""Import word lists from various sources"""
def vocabulary_com(self, list_url, pretty=False):
result = VocabularyCom().collect(list_url)
if pretty:
print json.dumps(result, indent=4, sort_keys=True)
else:
json.dump(result, sys.stdout)
class airtable:
"""Sync lists to Airtable"""
def load(self, list_url, endpoint, key):
words = VocabularyCom().collect(list_url)
self._load(words, endpoint, key)
def load_from_stdin(self, endpoint, key):
words = json.load(sys.stdin)
self._load(words, endpoint, key)
def _load(self, words, endpoint, key):
airtable = Airtable(endpoint, key)
airtable.load(words)
print 'Loaded %d terms to Airtable.' % len(words)
if __name__ == '__main__':
fire.Fire(CLI)
|
Print number of terms loaded to Airtable.
|
Print number of terms loaded to Airtable.
|
Python
|
mit
|
zqureshi/vocab
|
c354d130cb542c2a5d57e519ce49175daa597e9c
|
froide/accesstoken/apps.py
|
froide/accesstoken/apps.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AccessTokenConfig(AppConfig):
name = 'froide.accesstoken'
verbose_name = _('Secret Access Token')
def ready(self):
from froide.account import account_canceled
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import AccessToken
if user is None:
return
AccessToken.objects.filter(user=user).delete()
|
import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AccessTokenConfig(AppConfig):
name = 'froide.accesstoken'
verbose_name = _('Secret Access Token')
def ready(self):
from froide.account import account_canceled
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import AccessToken
if user is None:
return
AccessToken.objects.filter(user=user).delete()
def export_user_data(user):
from .models import AccessToken
access_tokens = (
AccessToken.objects.filter(user=user)
)
if access_tokens:
yield ('access_tokens.json', json.dumps([
{
'purpose': a.purpose,
'timestamp': a.timestamp.isoformat(),
}
for a in access_tokens]).encode('utf-8')
)
|
Add user data export for accesstokens
|
Add user data export for accesstokens
|
Python
|
mit
|
fin/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide
|
9ad1929ee16a805acb9e8fbc57312466fdb1770e
|
cnxepub/tests/scripts/test_collated_single_html.py
|
cnxepub/tests/scripts/test_collated_single_html.py
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2016, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
import mimetypes
import os.path
import tempfile
import unittest
try:
from unittest import mock
except ImportError:
import mock
from lxml import etree
from ...html_parsers import HTML_DOCUMENT_NAMESPACES
from ...testing import TEST_DATA_DIR, captured_output
class CollatedSingleHTMLTestCase(unittest.TestCase):
maxDiff = None
@property
def target(self):
from ...scripts.collated_single_html.main import main
return main
@property
def path_to_xhtml(self):
return os.path.join(TEST_DATA_DIR, 'desserts-single-page.xhtml')
def test_valid(self):
return_code = self.target([self.path_to_xhtml])
self.assertEqual(return_code, 0)
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2016, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
import io
import mimetypes
import os.path
import sys
import tempfile
import unittest
from lxml import etree
from ...html_parsers import HTML_DOCUMENT_NAMESPACES
from ...testing import TEST_DATA_DIR, captured_output
class CollatedSingleHTMLTestCase(unittest.TestCase):
maxDiff = None
@property
def target(self):
from ...scripts.collated_single_html.main import main
return main
@property
def path_to_xhtml(self):
return os.path.join(TEST_DATA_DIR, 'desserts-single-page.xhtml')
def test_valid(self):
return_code = self.target([self.path_to_xhtml])
self.assertEqual(return_code, 0)
def test_valid_with_tree(self):
# Capture stdout
orig_stdout = sys.stdout
self.addCleanup(setattr, sys, 'stdout', orig_stdout)
stdout = sys.stdout = io.BytesIO()
return_code = self.target([self.path_to_xhtml, '-d'])
self.assertEqual(return_code, 0)
stdout.seek(0)
self.assertIn('Fruity', stdout.read())
|
Add a test for the validate-collated tree output
|
Add a test for the validate-collated tree output
|
Python
|
agpl-3.0
|
Connexions/cnx-epub,Connexions/cnx-epub,Connexions/cnx-epub
|
eba354cfaa96754b814daeb7fa453e538b07a879
|
krcurrency/utils.py
|
krcurrency/utils.py
|
""":mod:`krcurrency.utils` --- Helpers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from bs4 import BeautifulSoup as BS
import requests
__all__ = 'request',
def request(url, encoding='utf-8', parselib='lxml'):
"""url๋ก ์์ฒญํ ํ ๋๋ ค๋ฐ์ ๊ฐ์ BeautifulSoup ๊ฐ์ฒด๋ก ๋ณํํด์ ๋ฐํํฉ๋๋ค.
"""
r = requests.get(url)
if r.status_code != 200:
return None
soup = None
try:
soup = BS(r.text, parselib)
except Exception as e:
pass
return soup
|
""":mod:`krcurrency.utils` --- Helpers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from bs4 import BeautifulSoup as BS
import requests
__all__ = 'request', 'tofloat',
def request(url, encoding='utf-8', parselib='lxml'):
"""url๋ก ์์ฒญํ ํ ๋๋ ค๋ฐ์ ๊ฐ์ BeautifulSoup ๊ฐ์ฒด๋ก ๋ณํํด์ ๋ฐํํฉ๋๋ค.
"""
r = requests.get(url)
if r.status_code != 200:
return None
soup = None
try:
soup = BS(r.text, parselib)
except Exception as e:
pass
return soup
def tofloat(text):
transformed = None
try:
text = text.replace(',', '')
transformed = float(text)
except:
pass
return transformed
|
Add tofloat function that transforms from any float-based string into float
|
Add tofloat function that transforms from any float-based string into float
|
Python
|
mit
|
ssut/py-krcurrency
|
c1da1e8d15990efa7b30de241e3604bc824792dc
|
py101/introduction/__init__.py
|
py101/introduction/__init__.py
|
""""
Introduction Adventure
Author: igui
"""
import codecs
import io
import sys
import unittest
from story.adventures import AdventureVerificationError, BaseAdventure
from story.translation import gettext as _
class TestOutput(unittest.TestCase):
"Introduction Adventure test"
def __init__(self, sourcefile):
"Inits the test"
super(TestOutput, self).__init__()
self.sourcefile = sourcefile
def setUp(self):
self.__old_stdout = sys.stdout
sys.stdout = self.__mockstdout = io.StringIO()
def tearDown(self):
sys.stdout = self.__old_stdout
self.__mockstdout.close()
@staticmethod
def mock_print(stringy):
"Mock function"
pass
def runTest(self):
"Makes a simple test of the output"
raw_program = codecs.open(self.sourcefile).read()
code = compile(raw_program, self.sourcefile, 'exec', optimize=0)
exec(code)
self.assertEqual(
self.__mockstdout.getvalue().lower().strip(),
'hello world',
"Should have printed 'Hello World'"
)
class Adventure(BaseAdventure):
"Introduction Adventure"
title = _('Introduction')
@classmethod
def test(cls, sourcefile):
"Test against the provided file"
suite = unittest.TestSuite()
suite.addTest(TestOutput(sourcefile))
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
raise AdventureVerificationError()
|
""""
Introduction Adventure
Author: Ignacio Avas (iavas@sophilabs.com)
"""
import codecs
import io
import sys
import unittest
from story.adventures import AdventureVerificationError, BaseAdventure
from story.translation import gettext as _
class TestOutput(unittest.TestCase):
"Introduction Adventure test"
def __init__(self, sourcefile):
"Inits the test"
super(TestOutput, self).__init__()
self.sourcefile = sourcefile
def setUp(self):
self.__old_stdout = sys.stdout
sys.stdout = self.__mockstdout = io.StringIO()
def tearDown(self):
sys.stdout = self.__old_stdout
self.__mockstdout.close()
@staticmethod
def mock_print(stringy):
"Mock function"
pass
def runTest(self):
"Makes a simple test of the output"
raw_program = codecs.open(self.sourcefile).read()
code = compile(raw_program, self.sourcefile, 'exec', optimize=0)
exec(code)
self.assertEqual(
self.__mockstdout.getvalue().lower().strip(),
'hello world',
"Should have printed 'Hello World'"
)
class Adventure(BaseAdventure):
"Introduction Adventure"
title = _('Introduction')
@classmethod
def test(cls, sourcefile):
"Test against the provided file"
suite = unittest.TestSuite()
suite.addTest(TestOutput(sourcefile))
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
raise AdventureVerificationError()
|
Correct Author string in module
|
Correct Author string in module
|
Python
|
mit
|
sophilabs/py101
|
2651ddf1946ec489195ec9c3fb23e00e5735c79c
|
sites/cozylan/extension.py
|
sites/cozylan/extension.py
|
"""
Site-specific code extension
"""
from __future__ import annotations
from typing import Any
from flask import g
from byceps.services.seating import seat_service
from byceps.services.ticketing import ticket_service
def template_context_processor() -> dict[str, Any]:
"""Extend template context."""
if g.party_id is None:
return {}
sale_stats = ticket_service.get_ticket_sale_stats(g.party_id)
seat_utilization = seat_service.get_seat_utilization(g.party_id)
return {
'ticket_sale_stats': sale_stats,
'seat_utilization': seat_utilization,
}
|
"""
Site-specific code extension
"""
from __future__ import annotations
from typing import Any
from flask import g
from byceps.services.seating import seat_service
from byceps.services.ticketing import ticket_service
def template_context_processor() -> dict[str, Any]:
"""Extend template context."""
context = {}
if g.party_id is not None:
sale_stats = ticket_service.get_ticket_sale_stats(g.party_id)
seat_utilization = seat_service.get_seat_utilization(g.party_id)
context['ticket_sale_stats'] = sale_stats
context['seat_utilization'] = seat_utilization
return context
|
Restructure context assembly for CozyLAN site
|
Restructure context assembly for CozyLAN site
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
624ce97b011100cc1aac9446c7f1c8a97eae5f34
|
workshops/migrations/0040_add_country_to_online_events.py
|
workshops/migrations/0040_add_country_to_online_events.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_country_to_online_events(apps, schema_editor):
"""Add an 'Online' country to all events tagged with 'online' tag."""
Event = apps.get_model('workshops', 'Event')
Tag = apps.get_model('workshops', 'Tag')
online, _ = Tag.objects.get_or_create(
name='online',
defaults={'details': 'Events taking place entirely online'},
)
Event.objects.filter(country__isnull=True, tags__in=[online]) \
.update(country='W3')
class Migration(migrations.Migration):
dependencies = [
('workshops', '0039_add_permission_groups'),
]
operations = [
migrations.RunPython(add_country_to_online_events),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_country_to_online_events(apps, schema_editor):
"""Add an 'Online' country to all events tagged with 'online' tag."""
Event = apps.get_model('workshops', 'Event')
Tag = apps.get_model('workshops', 'Tag')
online, _ = Tag.objects.get_or_create(
name='online',
defaults={'details': 'Events taking place entirely online'},
)
# Oceanic Pole of Inaccessibility coordinates:
# https://en.wikipedia.org/wiki/Pole_of_inaccessibility#Oceanic_pole_of_inaccessibility
latitude = -48.876667
longitude = -123.393333
Event.objects.filter(country__isnull=True, tags__in=[online]) \
.update(country='W3', latitude=latitude, longitude=longitude,
venue='Internet')
class Migration(migrations.Migration):
dependencies = [
('workshops', '0039_add_permission_groups'),
]
operations = [
migrations.RunPython(add_country_to_online_events),
]
|
Migrate online events to the Pole of Inaccessibility lat/long
|
Migrate online events to the Pole of Inaccessibility lat/long
...and 'internet' as a venue.
|
Python
|
mit
|
pbanaszkiewicz/amy,vahtras/amy,swcarpentry/amy,swcarpentry/amy,wking/swc-amy,vahtras/amy,wking/swc-amy,wking/swc-amy,vahtras/amy,pbanaszkiewicz/amy,wking/swc-amy,pbanaszkiewicz/amy,swcarpentry/amy
|
9ad98b4bbed0c67f25576187996e7e1d534f6a90
|
mammoth/__init__.py
|
mammoth/__init__.py
|
from .results import Result
from . import docx, conversion, style_reader
def convert_to_html(fileobj):
return docx.read(fileobj).bind(lambda document:
conversion.convert_document_element_to_html(document, styles=_create_default_styles())
)
def _create_default_styles():
lines = filter(None, map(lambda line: line.strip(), _default_styles.split("\n")))
return map(style_reader.read_style, lines)
_default_styles = """
p:unordered-list(1) => ul > li:fresh
"""
|
from .results import Result
from . import docx, conversion, style_reader
def convert_to_html(fileobj):
return docx.read(fileobj).bind(lambda document:
conversion.convert_document_element_to_html(document, styles=_create_default_styles())
)
def _create_default_styles():
lines = filter(None, map(lambda line: line.strip(), _default_styles.split("\n")))
return map(style_reader.read_style, lines)
_default_styles = """
p.Heading1 => h1:fresh
p.Heading2 => h2:fresh
p.Heading3 => h3:fresh
p.Heading4 => h4:fresh
p:unordered-list(1) => ul > li:fresh
p:unordered-list(2) => ul|ol > li > ul > li:fresh
p:unordered-list(3) => ul|ol > li > ul|ol > li > ul > li:fresh
p:unordered-list(4) => ul|ol > li > ul|ol > li > ul|ol > li > ul > li:fresh
p:unordered-list(5) => ul|ol > li > ul|ol > li > ul|ol > li > ul|ol > li > ul > li:fresh
p:ordered-list(1) => ol > li:fresh
p:ordered-list(2) => ul|ol > li > ol > li:fresh
p:ordered-list(3) => ul|ol > li > ul|ol > li > ol > li:fresh
p:ordered-list(4) => ul|ol > li > ul|ol > li > ul|ol > li > ol > li:fresh
p:ordered-list(5) => ul|ol > li > ul|ol > li > ul|ol > li > ul|ol > li > ol > li:fresh
"""
|
Add full list of default styles
|
Add full list of default styles
|
Python
|
bsd-2-clause
|
mwilliamson/python-mammoth,JoshBarr/python-mammoth
|
afd6b5b29b60c59689e0a1be38a0483a7e4db312
|
miniraf/__init__.py
|
miniraf/__init__.py
|
import argparse
import astropy.io.fits as fits
import numpy as np
import calc
import combine
if __name__=="__main__":
argparser = argparse.ArgumentParser()
subparsers = argparser.add_subparsers(help="sub-command help")
calc.create_parser(subparsers)
combine.create_parser(subparsers)
args = argparser.parse_args()
print(args)
args.func(args)
|
import argparse
import calc
import combine
from combine import stack_fits_data
from calc import load_fits_data
def _argparse():
argparser = argparse.ArgumentParser()
subparsers = argparser.add_subparsers(help="sub-command help")
calc.create_parser(subparsers)
combine.create_parser(subparsers)
return argparser.parse_args()
def main():
args = _argparse()
args.func(args)
if __name__=="__main__":
main()
|
Create main() entry point for final script
|
Create main() entry point for final script
Signed-off-by: Lizhou Sha <d6acb26e253550574bc1141efa0eb5e6de15daeb@mit.edu>
|
Python
|
mit
|
vulpicastor/miniraf
|
60bb1425e94e15b59a05b485113cc68ed0146ac8
|
nbtutor/__init__.py
|
nbtutor/__init__.py
|
# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
from traitlets import Unicode
class ClearExercisePreprocessor(Preprocessor):
solutions_dir = Unicode("_solutions").tag(config=True)
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
self.solutions_dir, resources['metadata']['name'] + str(cell['execution_count']) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
return cell, resources
|
# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
from traitlets import Unicode
class ClearExercisePreprocessor(Preprocessor):
solutions_dir = Unicode("_solutions").tag(config=True)
def __init__(self, **kw):
if not os.path.exists(self.solutions_dir):
os.makedirs(self.solutions_dir)
super(Preprocessor, self).__init__(**kw)
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
self.solutions_dir, resources['metadata']['name'] + str(cell['execution_count']) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
return cell, resources
|
Create solutions directory if it does not exist
|
Create solutions directory if it does not exist
|
Python
|
bsd-2-clause
|
jorisvandenbossche/nbtutor,jorisvandenbossche/nbtutor
|
09195f50e328d3aee4cc60f0702d8605ea520eb3
|
tests/sentry/utils/models/tests.py
|
tests/sentry/utils/models/tests.py
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
self.assertFalse(inst.has_changed('foo'))
|
Add missing assertion in test
|
Add missing assertion in test
|
Python
|
bsd-3-clause
|
NickPresta/sentry,jokey2k/sentry,1tush/sentry,zenefits/sentry,SilentCircle/sentry,wujuguang/sentry,ifduyue/sentry,Kryz/sentry,JamesMura/sentry,Natim/sentry,NickPresta/sentry,BuildingLink/sentry,rdio/sentry,BuildingLink/sentry,ngonzalvez/sentry,JamesMura/sentry,mvaled/sentry,JackDanger/sentry,SilentCircle/sentry,ifduyue/sentry,beeftornado/sentry,hongliang5623/sentry,nicholasserra/sentry,NickPresta/sentry,camilonova/sentry,fotinakis/sentry,jokey2k/sentry,fuziontech/sentry,BuildingLink/sentry,korealerts1/sentry,looker/sentry,wong2/sentry,ifduyue/sentry,BayanGroup/sentry,gencer/sentry,beni55/sentry,mvaled/sentry,SilentCircle/sentry,wujuguang/sentry,imankulov/sentry,jean/sentry,JTCunning/sentry,1tush/sentry,looker/sentry,songyi199111/sentry,zenefits/sentry,jean/sentry,1tush/sentry,ngonzalvez/sentry,TedaLIEz/sentry,daevaorn/sentry,drcapulet/sentry,NickPresta/sentry,BayanGroup/sentry,BuildingLink/sentry,JackDanger/sentry,camilonova/sentry,vperron/sentry,beeftornado/sentry,gencer/sentry,kevinastone/sentry,nicholasserra/sentry,mitsuhiko/sentry,Natim/sentry,looker/sentry,mvaled/sentry,camilonova/sentry,ewdurbin/sentry,korealerts1/sentry,songyi199111/sentry,Kryz/sentry,alexm92/sentry,Natim/sentry,daevaorn/sentry,argonemyth/sentry,ifduyue/sentry,rdio/sentry,kevinastone/sentry,gencer/sentry,jean/sentry,daevaorn/sentry,argonemyth/sentry,ewdurbin/sentry,imankulov/sentry,wujuguang/sentry,fotinakis/sentry,argonemyth/sentry,imankulov/sentry,TedaLIEz/sentry,vperron/sentry,drcapulet/sentry,songyi199111/sentry,BayanGroup/sentry,daevaorn/sentry,kevinlondon/sentry,gg7/sentry,beni55/sentry,gencer/sentry,vperron/sentry,rdio/sentry,pauloschilling/sentry,mvaled/sentry,mvaled/sentry,llonchj/sentry,boneyao/sentry,Kryz/sentry,gg7/sentry,BuildingLink/sentry,jean/sentry,looker/sentry,pauloschilling/sentry,jean/sentry,mitsuhiko/sentry,zenefits/sentry,zenefits/sentry,alexm92/sentry,pauloschilling/sentry,alexm92/sentry,hongliang5623/sentry,mvaled/sentry,wong2/sentry,JTCunning/sentry,beeftornado/sentry,beni55/sentry,JamesMura/sentry,jokey2k/sentry,fuziontech/sentry,korealerts1/sentry,JamesMura/sentry,JTCunning/sentry,llonchj/sentry,JamesMura/sentry,JackDanger/sentry,drcapulet/sentry,fuziontech/sentry,kevinlondon/sentry,gg7/sentry,zenefits/sentry,boneyao/sentry,felixbuenemann/sentry,SilentCircle/sentry,kevinastone/sentry,TedaLIEz/sentry,looker/sentry,rdio/sentry,felixbuenemann/sentry,boneyao/sentry,ewdurbin/sentry,gencer/sentry,ngonzalvez/sentry,ifduyue/sentry,felixbuenemann/sentry,fotinakis/sentry,fotinakis/sentry,nicholasserra/sentry,hongliang5623/sentry,llonchj/sentry,wong2/sentry,kevinlondon/sentry
|
57d49b185d1daf0e6a27e0daee8960c2816615cc
|
alg_kruskal_minimum_spanning_tree.py
|
alg_kruskal_minimum_spanning_tree.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def kruskal():
"""Kruskal's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): TBD.
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Kruskal\'s minimum spanning tree:')
pass
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def kruskal():
"""Kruskal's algorithm for minimum spanning tree
in weighted graph.
Time complexity for graph G(V, E):
O(|E|+|V|+|E|log(|V|)) = O(|E|log(|V|^2)) = O(|E|log(|V|)).
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Kruskal\'s minimum spanning tree:')
pass
if __name__ == '__main__':
main()
|
Revise doc string and add time complexity
|
Revise doc string and add time complexity
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
c1dc5494c461677e15be52576c55585742ad4a7a
|
bluebottle/bb_follow/migrations/0003_auto_20180530_1621.py
|
bluebottle/bb_follow/migrations/0003_auto_20180530_1621.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-05-30 14:21
from __future__ import unicode_literals
from django.db import migrations
def fix_followers(apps, schema_editor):
Donation = apps.get_model('donations', 'Donation')
Follow = apps.get_model('bb_follow', 'Follow')
ContentType = apps.get_model('contenttypes', 'ContentType')
for donation in Donation.objects.\
filter(order__status__in=['success', 'pending']).\
exclude(order__order_type='recurring').all():
user = donation.order.user
followed_object = donation.project
content_type = ContentType.objects.get_for_model(followed_object)
# A Follow object should link the project to the user, not the
# donation and the user
if user and followed_object and user != followed_object.owner:
if not Follow.objects.filter(user=user, object_id=followed_object.id, content_type=content_type).count():
Follow.objects.create(user=user, object_id=followed_object.id, content_type=content_type)
def dummy(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('bb_follow', '0002_follow_user'),
]
operations = [
migrations.RunPython(fix_followers, dummy)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-05-30 14:21
from __future__ import unicode_literals
from django.db import migrations
def fix_followers(apps, schema_editor):
Donation = apps.get_model('donations', 'Donation')
Follow = apps.get_model('bb_follow', 'Follow')
ContentType = apps.get_model('contenttypes', 'ContentType')
for donation in Donation.objects.\
filter(order__status__in=['success', 'pending']).\
exclude(order__order_type='recurring').all():
user = donation.order.user
followed_object = donation.project
content_type = ContentType.objects.get_for_model(followed_object)
# A Follow object should link the project to the user, not the
# donation and the user
if user and followed_object and user != followed_object.owner:
if not Follow.objects.filter(user=user, object_id=followed_object.id, content_type=content_type).count():
Follow.objects.create(user=user, object_id=followed_object.id, content_type=content_type)
def dummy(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('bb_follow', '0002_follow_user'),
('donations', '0008_auto_20170927_1021')
]
operations = [
migrations.RunPython(fix_followers, dummy)
]
|
Add donation migration dependancy in bb_follow
|
Add donation migration dependancy in bb_follow
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
1c5a4afa06f56ca8fd7c36b633b7f73d259f1281
|
lib/filesystem/__init__.py
|
lib/filesystem/__init__.py
|
import os
__author__ = 'mfliri'
def create_directory(output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
|
import os
def create_directory(output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
|
Remove author notice and add newline at end of file
|
Remove author notice and add newline at end of file
|
Python
|
mit
|
alphagov/transactions-explorer,gds-attic/transactions-explorer,alphagov/transactions-explorer,gds-attic/transactions-explorer,alphagov/transactions-explorer,alphagov/transactions-explorer,alphagov/transactions-explorer,gds-attic/transactions-explorer,gds-attic/transactions-explorer,gds-attic/transactions-explorer
|
96a6b929d80bd5ad8a7bf5d09955b3e45e5bbe56
|
test/test_Spectrum.py
|
test/test_Spectrum.py
|
#!/usr/bin/env python
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
from hypothesis import given
import hypothesis.strategies as st
@given(st.lists(st.floats()), st.lists(st.floats()), st.booleans())
def test_spectrum_assigns_hypothesis_data(y, x, z):
spec = Spectrum.Spectrum(y, x, z)
assert spec.flux == y
assert spec.xaxis == x
assert spec.calibrated == z
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
calib_val = 0
spec = Spectrum.Spectrum(y, x, calibrated=calib_val)
assert spec.flux == y
assert spec.xaxis == x
assert spec.calibrated == calib_val
|
#!/usr/bin/env python
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
from hypothesis import given
import hypothesis.strategies as st
@given(st.lists(st.floats()), st.lists(st.floats()), st.booleans())
def test_spectrum_assigns_hypothesis_data(y, x, z):
spec = Spectrum.Spectrum(y, x, z)
assert spec.flux == y
assert spec.xaxis == x
assert spec.calibrated == z
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
calib_val = 0
spec = Spectrum.Spectrum(y, x, calibrated=calib_val)
assert spec.flux == y
assert spec.xaxis == x
assert spec.calibrated == calib_val
@given(st.lists(st.floats()), st.lists(st.floats()), st.booleans(), st.floats(), st.floats())
def test_wav_select(y, x, calib, wav_min, wav_max):
# Create specturm
spec = Spectrum.Spectrum(y, xaxis=x, calibrated=calib)
# Select wavelength values
spec.wav_select(wav_min, wav_max)
# All values in selected spectrum should be less than the max and greater than the min value.
if isinstance(spec.xaxis, list):
assert all([xval >= wav_min for xval in spec.xaxis])
assert all([xval <= wav_max for xval in spec.xaxis])
else:
assert all(spec.xaxis >= wav_min)
assert all(spec.xaxis <= wav_max)
##Also need to test asignment!
# spec2 = spec.wav_selector()
|
Test property of wavelength selection
|
Test property of wavelength selection
That afterwards the values are all above and below the min and max
values used.
|
Python
|
mit
|
jason-neal/spectrum_overload,jason-neal/spectrum_overload,jason-neal/spectrum_overload
|
c61e595098cd4b03828a81db98fb1e2b91b2eec0
|
anna/model/utils.py
|
anna/model/utils.py
|
import tensorflow as tf
def rnn_cell(num_units, dropout, mode, residual=False, name=None, reuse=None):
dropout = dropout if mode == tf.contrib.learn.ModeKeys.TRAIN else 0.0
cell = tf.nn.rnn_cell.GRUCell(num_units, name=name, reuse=reuse)
if dropout > 0.0:
keep_prop = (1.0 - dropout)
cell = tf.nn.rnn_cell.DropoutWrapper(
cell=cell,
input_keep_prob=keep_prop,
output_keep_prob=keep_prop,
state_keep_prob=keep_prop
)
if residual:
cell = tf.nn.rnn_cell.ResidualWrapper(cell)
return cell
|
import tensorflow as tf
def rnn_cell(num_units, dropout, mode, residual=False, name=None, reuse=None):
dropout = dropout if mode == tf.contrib.learn.ModeKeys.TRAIN else 0.0
cell = tf.nn.rnn_cell.GRUCell(num_units, name=name, reuse=reuse)
if dropout > 0.0:
keep_prop = (1.0 - dropout)
cell = tf.nn.rnn_cell.DropoutWrapper(
cell=cell,
input_keep_prob=keep_prop,
)
if residual:
cell = tf.nn.rnn_cell.ResidualWrapper(cell)
return cell
|
Remove dropout from output/state in rnn cells
|
Remove dropout from output/state in rnn cells
|
Python
|
mit
|
jpbottaro/anna
|
66946f72d243f1836df0dbd8917f204011ec1701
|
hs_core/autocomplete_light_registry.py
|
hs_core/autocomplete_light_registry.py
|
from autocomplete_light import shortcuts as autocomplete_light
from django.contrib.auth.models import User, Group
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['username', 'first_name', 'last_name']
split_words = True
def choices_for_request(self):
self.choices = self.choices.filter(is_active=True)
return super(UserAutocomplete, self).choices_for_request()
def choice_label(self, choice):
label = ""
if choice.first_name:
label += choice.first_name
if choice.last_name:
if choice.first_name:
label += " "
label += choice.last_name
if choice.userprofile.organization:
if choice.first_name or choice.last_name:
label += ", "
label += choice.userprofile.organization
if choice.username:
label += "".join([" (", choice.username, ")"])
return label
autocomplete_light.register(User, UserAutocomplete)
class GroupAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields=['name']
def choices_for_request(self):
self.choices = self.choices.filter(gaccess__active=True).exclude(name='Hydroshare Author')
return super(GroupAutocomplete, self).choices_for_request()
autocomplete_light.register(Group, GroupAutocomplete)
|
from autocomplete_light import shortcuts as autocomplete_light
from django.contrib.auth.models import User, Group
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['username', 'first_name', 'last_name']
split_words = True
def choices_for_request(self):
self.choices = self.choices.filter(is_active=True)
return super(UserAutocomplete, self).choices_for_request()
def choice_label(self, choice):
label = " ".join([choice.first_name or "", choice.userprofile.middle_name or "", choice.last_name or ""])
if choice.userprofile.organization:
if choice.first_name or choice.last_name:
label += ", "
label += choice.userprofile.organization
if choice.username:
label += "".join([" (", choice.username, ")"])
return label
autocomplete_light.register(User, UserAutocomplete)
class GroupAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields=['name']
def choices_for_request(self):
self.choices = self.choices.filter(gaccess__active=True).exclude(name='Hydroshare Author')
return super(GroupAutocomplete, self).choices_for_request()
autocomplete_light.register(Group, GroupAutocomplete)
|
Add middle name display to autocomplete widget
|
Add middle name display to autocomplete widget
|
Python
|
bsd-3-clause
|
hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare
|
f5d36900f7b0503a60a526fd70b57ecb91625fa0
|
armstrong/core/arm_sections/views.py
|
armstrong/core/arm_sections/views.py
|
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self):
return self.get_section()
def get_section(self):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self, queryset=None):
return self.get_section(queryset=queryset)
def get_section(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
return get_object_or_404(queryset, full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
Handle queryset argument to get_object
|
Handle queryset argument to get_object
|
Python
|
apache-2.0
|
texastribune/armstrong.core.tt_sections,texastribune/armstrong.core.tt_sections,armstrong/armstrong.core.arm_sections,armstrong/armstrong.core.arm_sections,texastribune/armstrong.core.tt_sections
|
7e766747dbda4548b63b278e062335c8a10fe008
|
src/vimapt/library/vimapt/data_format/yaml.py
|
src/vimapt/library/vimapt/data_format/yaml.py
|
from pureyaml import dump as dumps
from pureyaml import load as loads
__all__ = ['dumps', 'loads']
|
from __future__ import absolute_import
import functools
from yaml import dump, Dumper, load, Loader
dumps = functools.partial(dump, Dumper=Dumper)
loads = functools.partial(load, Loader=Loader)
__all__ = ['dumps', 'loads']
|
Use PyYAML as YAML's loader and dumper
|
Use PyYAML as YAML's loader and dumper
|
Python
|
mit
|
howl-anderson/vimapt,howl-anderson/vimapt
|
f600ec497a6ff20c4cd8c983e27482fc77ab4deb
|
moksha/api/hub/consumer.py
|
moksha/api/hub/consumer.py
|
"""
Consumers
=========
A `Consumer` is a simple consumer of messages. Based on a given `routing_key`,
your consumer's :meth:`consume` method will be called with the message.
Example consumers:
-tapping into a koji build, and sending a notification?
- hook into a given RSS feed and save data in a DB?
Adding a new consumer
---------------------
Adding a new Consumer to Moksha is as easy as adding it to the `[moksha.consumer]`
entry-point in your `setup.py` file::
[moksha.consumer]
myconsumer = myproject.module:MyConsumer
"""
class Consumer(object):
queue = None
def consume(self, message):
raise NotImplementedError
|
# This file is part of Moksha.
#
# Moksha is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Moksha is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Moksha. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2008, Red Hat, Inc.
# Authors: Luke Macken <lmacken@redhat.com>
class Consumer(object):
""" A message consumer """
topic = None
def consume(self, message):
raise NotImplementedError
|
Update our message Consumer api to consume a `topic`, not a `queue`.
|
Update our message Consumer api to consume a `topic`, not a `queue`.
|
Python
|
apache-2.0
|
lmacken/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha,lmacken/moksha,pombredanne/moksha,mokshaproject/moksha,ralphbean/moksha,mokshaproject/moksha,mokshaproject/moksha,ralphbean/moksha,ralphbean/moksha,pombredanne/moksha,lmacken/moksha
|
357a445021bd459cc0196269033ea181594a1456
|
UliEngineering/Physics/NTC.py
|
UliEngineering/Physics/NTC.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Utilities regarding NTC thermistors
See http://www.vishay.com/docs/29053/ntcintro.pdf for details
"""
from UliEngineering.Physics.Temperature import zero_point_celsius, normalize_temperature
from UliEngineering.EngineerIO import normalize_numeric
from UliEngineering.Units import Unit
import numpy as np
__all__ = ["ntc_resistance"]
def ntc_resistance(r25, b25, t) -> Unit("โฆ"):
"""
Compute the NTC resistance by temperature and NTC parameters
Parameters
----------
r25 : float or EngineerIO string
The NTC resistance at 25ยฐC, sometimes also called "nominal resistance"
b25: float or EngineerIO string
The NTC b-constant (e.g. b25/50, b25/85 or b25/100)
t : temperature
The temperature. Will be interpreted using normalize_temperature()
"""
# Normalize inputs
r25 = normalize_numeric(r25)
b25 = normalize_numeric(b25)
t = normalize_temperature(t) # t is now in Kelvins
# Compute resistance
return r25 * np.exp(b25 * (1./t - 1./(25. + zero_point_celsius)))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Utilities regarding NTC thermistors
See http://www.vishay.com/docs/29053/ntcintro.pdf for details
"""
from UliEngineering.Physics.Temperature import normalize_temperature
from UliEngineering.EngineerIO import normalize_numeric
from UliEngineering.Units import Unit
import numpy as np
from scipy.constants import zero_Celsius
__all__ = ["ntc_resistance"]
def ntc_resistance(r25, b25, t) -> Unit("โฆ"):
"""
Compute the NTC resistance by temperature and NTC parameters
Parameters
----------
r25 : float or EngineerIO string
The NTC resistance at 25ยฐC, sometimes also called "nominal resistance"
b25: float or EngineerIO string
The NTC b-constant (e.g. b25/50, b25/85 or b25/100)
t : temperature
The temperature. Will be interpreted using normalize_temperature()
"""
# Normalize inputs
r25 = normalize_numeric(r25)
b25 = normalize_numeric(b25)
t = normalize_temperature(t) # t is now in Kelvins
# Compute resistance
return r25 * np.exp(b25 * (1./t - 1./(25. + zero_Celsius)))
|
Fix build error due to replacing zero_point_celsius by scipy equivalent
|
Fix build error due to replacing zero_point_celsius by scipy equivalent
|
Python
|
apache-2.0
|
ulikoehler/UliEngineering
|
cddb0ae5c9c2d96c5902943f8b341ab2b698235f
|
paveldedik/forms.py
|
paveldedik/forms.py
|
# -*- coding: utf-8 -*-
from flask.ext.mongoengine.wtf import model_form
from paveldedik.models import User, Post
post_args = {
'title': {'label': u'Title'},
'leading': {'label': u'Leading'},
'content': {'label': u'Content'},
}
UserForm = model_form(User)
PostForm = model_form(Post, field_args=post_args)
|
# -*- coding: utf-8 -*-
from flask.ext.mongoengine.wtf import model_form
from paveldedik.models import User, Post
#: Model the user form. Additional field arguments can be included using
#: the key-word argument ``field_args``. For more information about using
#: WTForms follow `this link<http://flask.pocoo.org/snippets/60/>`_.
UserForm = model_form(User)
#: Model the post form. The attribute ``post_is`` must be excluded so that
#: the field is not required during form validation and it is not rewritten
#: when calling `populate_obj` on the :class:`models.Post` instance.
PostForm = model_form(Post, exclude=['post_id'])
|
Exclude post_id from the wtform.
|
Exclude post_id from the wtform.
|
Python
|
mit
|
paveldedik/blog,paveldedik/blog
|
bbff08c49df269ad24851d4264fd3f1dbd141358
|
test/contrib/test_securetransport.py
|
test/contrib/test_securetransport.py
|
# -*- coding: utf-8 -*-
import contextlib
import socket
import ssl
import pytest
try:
from urllib3.contrib.securetransport import WrappedSocket
except ImportError:
pass
def setup_module():
try:
from urllib3.contrib.securetransport import inject_into_urllib3
inject_into_urllib3()
except ImportError as e:
pytest.skip('Could not import SecureTransport: %r' % e)
def teardown_module():
try:
from urllib3.contrib.securetransport import extract_from_urllib3
extract_from_urllib3()
except ImportError:
pass
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1 # noqa: F401
from ..with_dummyserver.test_socketlevel import ( # noqa: F401
TestSNI, TestSocketClosing, TestClientCerts
)
def test_no_crash_with_empty_trust_bundle():
with contextlib.closing(socket.socket()) as s:
ws = WrappedSocket(s)
with pytest.raises(ssl.SSLError):
ws._custom_validate(True, b"")
|
# -*- coding: utf-8 -*-
import contextlib
import socket
import ssl
import pytest
try:
from urllib3.contrib.securetransport import WrappedSocket
except ImportError:
pass
def setup_module():
try:
from urllib3.contrib.securetransport import inject_into_urllib3
inject_into_urllib3()
except ImportError as e:
pytest.skip('Could not import SecureTransport: %r' % e)
def teardown_module():
try:
from urllib3.contrib.securetransport import extract_from_urllib3
extract_from_urllib3()
except ImportError:
pass
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1 # noqa: F401
from ..with_dummyserver.test_socketlevel import ( # noqa: F401
TestSNI, TestSocketClosing, TestClientCerts
)
def test_no_crash_with_empty_trust_bundle():
with contextlib.closing(socket.socket()) as s:
ws = WrappedSocket(s)
with pytest.raises(ssl.SSLError):
ws._custom_validate(True, b"")
|
Fix whitespace issue in SecureTransport test
|
Fix whitespace issue in SecureTransport test
|
Python
|
mit
|
urllib3/urllib3,sigmavirus24/urllib3,sigmavirus24/urllib3,urllib3/urllib3
|
141b46d1f5178df7e110aee7b2b50ce6f5b44b7a
|
contrib/python-copper/t/test_wsgi.py
|
contrib/python-copper/t/test_wsgi.py
|
# -*- coding: utf-8 -*-
from copper.wsgi_support import wsgi
def test_http_handler(copper_client, copper_http_client):
def application(environ, start_response):
message = 'Hello, %s!' % (environ['PATH_INFO'],)
start_response('200 OK', [
('Content-Type', 'text/plain; charset=UTF-8'),
('Content-Length', '%d' % len(message)),
])
return [message]
with copper_client.publish('http:/hello/', wsgi(application)):
result = copper_http_client.open('copper:///hello/world').read()
assert result == 'Hello, /hello/world!'
result = copper_http_client.open('copper:///hello/foobar').read()
assert result == 'Hello, /hello/foobar!'
res = copper_http_client.open('copper:///hello')
assert res.code == 404
res = copper_http_client.open('copper:///foobar')
assert res.code == 404
|
# -*- coding: utf-8 -*-
from copper.wsgi_support import wsgi
def test_http_handler(copper_client, copper_http_client):
def application(environ, start_response):
message = 'Hello, %s!' % (environ['PATH_INFO'],)
start_response('200 OK', [
('Content-Type', 'text/plain; charset=UTF-8'),
('Content-Length', '%d' % len(message)),
])
return [message]
with copper_client.publish('http:hello', wsgi(application)):
result = copper_http_client.open('copper:///hello/world').read()
assert result == 'Hello, /world!'
result = copper_http_client.open('copper:///hello/foobar').read()
assert result == 'Hello, /foobar!'
res = copper_http_client.open('copper:///hello')
assert res.code == 404
res = copper_http_client.open('copper:///foobar')
assert res.code == 404
|
Fix python tests re: http routing
|
Fix python tests re: http routing
|
Python
|
mit
|
snaury/copper,snaury/copper,snaury/copper
|
4bc55a6b1bdef357acd24e6aba34a57f689e9da0
|
bokeh/command/subcommands/__init__.py
|
bokeh/command/subcommands/__init__.py
|
def _collect():
from importlib import import_module
from os import listdir
from os.path import dirname
from ..subcommand import Subcommand
results = []
for file in listdir(dirname(__file__)):
if not file.endswith(".py") or file in ("__init__.py", "__main__.py"):
continue
modname = file.rstrip(".py")
mod = import_module("." + modname, __package__)
for name in dir(mod):
attr = getattr(mod, name)
if isinstance(attr, type) and issubclass(attr, Subcommand):
if not hasattr(attr, 'name'): continue # excludes abstract bases
results.append(attr)
return results
all = _collect()
del _collect
|
def _collect():
from importlib import import_module
from os import listdir
from os.path import dirname
from ..subcommand import Subcommand
results = []
for file in listdir(dirname(__file__)):
if not file.endswith(".py") or file in ("__init__.py", "__main__.py"):
continue
modname = file.rstrip(".py")
mod = import_module("." + modname, __package__)
for name in dir(mod):
attr = getattr(mod, name)
if isinstance(attr, type) and issubclass(attr, Subcommand):
if not hasattr(attr, 'name'): continue # excludes abstract bases
results.append(attr)
results = sorted(results, key=lambda attr: attr.name)
return results
all = _collect()
del _collect
|
Sort subcommands.all so the tested results are deterministic
|
Sort subcommands.all so the tested results are deterministic
|
Python
|
bsd-3-clause
|
phobson/bokeh,clairetang6/bokeh,aiguofer/bokeh,jakirkham/bokeh,msarahan/bokeh,mindriot101/bokeh,philippjfr/bokeh,schoolie/bokeh,stonebig/bokeh,azjps/bokeh,percyfal/bokeh,bokeh/bokeh,draperjames/bokeh,percyfal/bokeh,draperjames/bokeh,msarahan/bokeh,ptitjano/bokeh,msarahan/bokeh,quasiben/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,timsnyder/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,bokeh/bokeh,justacec/bokeh,dennisobrien/bokeh,ericmjl/bokeh,philippjfr/bokeh,dennisobrien/bokeh,aavanian/bokeh,azjps/bokeh,clairetang6/bokeh,ptitjano/bokeh,ericmjl/bokeh,azjps/bokeh,timsnyder/bokeh,timsnyder/bokeh,rs2/bokeh,mindriot101/bokeh,clairetang6/bokeh,jakirkham/bokeh,DuCorey/bokeh,bokeh/bokeh,azjps/bokeh,mindriot101/bokeh,aavanian/bokeh,aavanian/bokeh,azjps/bokeh,bokeh/bokeh,clairetang6/bokeh,aavanian/bokeh,philippjfr/bokeh,justacec/bokeh,aiguofer/bokeh,mindriot101/bokeh,schoolie/bokeh,draperjames/bokeh,phobson/bokeh,DuCorey/bokeh,aiguofer/bokeh,philippjfr/bokeh,Karel-van-de-Plassche/bokeh,dennisobrien/bokeh,rs2/bokeh,stonebig/bokeh,ericmjl/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,jakirkham/bokeh,DuCorey/bokeh,jakirkham/bokeh,ptitjano/bokeh,quasiben/bokeh,percyfal/bokeh,percyfal/bokeh,justacec/bokeh,KasperPRasmussen/bokeh,DuCorey/bokeh,rs2/bokeh,phobson/bokeh,justacec/bokeh,ericmjl/bokeh,Karel-van-de-Plassche/bokeh,stonebig/bokeh,schoolie/bokeh,phobson/bokeh,rs2/bokeh,bokeh/bokeh,timsnyder/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,ptitjano/bokeh,phobson/bokeh,dennisobrien/bokeh,philippjfr/bokeh,rs2/bokeh,KasperPRasmussen/bokeh,quasiben/bokeh,msarahan/bokeh,aiguofer/bokeh,stonebig/bokeh,DuCorey/bokeh,draperjames/bokeh,jakirkham/bokeh,aavanian/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,dennisobrien/bokeh,timsnyder/bokeh,draperjames/bokeh,schoolie/bokeh,KasperPRasmussen/bokeh
|
4fce2955ce76c1f886b2a234fe9d0c576843fefd
|
Dice.py
|
Dice.py
|
import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
class DiceBag(object):
def __init__(self):
self.dice = []
self.dice_roll = []
def add_die_obj(self, die_obj):
self.dice.append(die_obj)
def remove_die(self, die_obj):
self.dice.remove(die_obj)
def remove_die_index(self, index):
del self.dice[index]
def add_die_notation(self, standard_die_notation):
lst_notation = standard_die_notation.split("d")
i = 0
while (i<int(lst_notation[0])):
die1 = Die(int(lst_notation[1]))
self.dice.append(die1)
i = i +1
def roll_all(self):
for obj in self.dice:
obj.roll_die()
self.dice_roll.append(obj.get_die_face())
def get_dice_roll(self):
return self.dice_roll
|
import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
class DiceBag(object):
def __init__(self):
self.dice = []
self.dice_roll = []
def add_die_obj(self, die_obj):
self.dice.append(die_obj)
def remove_die(self, die_obj):
self.dice.remove(die_obj)
def remove_die_index(self, index):
del self.dice[index]
def add_die_notation(self, standard_die_notation):
lst_notation = standard_die_notation.split("d")
i = 0
while (i<int(lst_notation[0])):
die1 = Die(int(lst_notation[1]))
self.dice.append(die1)
i = i +1
def roll_all(self):
for obj in self.dice:
obj.roll_die()
self.dice_roll.append(obj.get_die_face())
def hold_all(self, held):
for obj in self.dice:
obj.change_held(held)
def get_dice_roll(self):
return self.dice_roll
|
Add hold all function to dicebag
|
Add hold all function to dicebag
|
Python
|
mit
|
achyutreddy24/DiceGame
|
628de346d3cf22342bf09e9ad3337a4408ed5662
|
properties/files.py
|
properties/files.py
|
from __future__ import absolute_import, unicode_literals, print_function, division
from builtins import open
from future import standard_library
standard_library.install_aliases()
import six
import json, numpy as np, os, io
from .base import Property
from . import exceptions
class File(Property):
mode = 'r' #: mode for opening the file.
def validator(self, instance, value):
if hasattr(value, 'read'):
prev = getattr(self, '_p_' + self.name, None)
if prev is not None and value is not prev:
prev.close()
return value
if isinstance(value, six.string_types) and os.path.isfile(value):
return open(value, self.mode)
raise ValueError('The value for "%s" must be an open file or a string.'%self.name)
class Image(File):
def validator(self, instance, value):
import png
if getattr(value, '__valid__', False):
return value
reader = png.Reader(value)
reader.validate_signature()
output = io.BytesIO()
output.name = 'texture.png'
output.__valid__ = True
if hasattr(value, 'read'):
fp = value
fp.seek(0)
else:
fp = open(value, 'rb')
output.write(fp.read())
output.seek(0)
fp.close()
return output
|
from __future__ import absolute_import, print_function, division
from builtins import open
from future import standard_library
standard_library.install_aliases()
import six
import json, numpy as np, os, io
from .base import Property
from . import exceptions
class File(Property):
mode = 'r' #: mode for opening the file.
def validator(self, instance, value):
if hasattr(value, 'read'):
prev = getattr(self, '_p_' + self.name, None)
if prev is not None and value is not prev:
prev.close()
return value
if isinstance(value, six.string_types) and os.path.isfile(value):
return open(value, self.mode)
raise ValueError('The value for "%s" must be an open file or a string.'%self.name)
class Image(File):
def validator(self, instance, value):
import png
if getattr(value, '__valid__', False):
return value
if hasattr(value, 'read'):
png.Reader(value).validate_signature()
else:
with open(value, 'rb') as v:
png.Reader(v).validate_signature()
output = io.BytesIO()
output.name = 'texture.png'
output.__valid__ = True
if hasattr(value, 'read'):
fp = value
fp.seek(0)
else:
fp = open(value, 'rb')
output.write(fp.read())
output.seek(0)
fp.close()
return output
|
Fix png for python 2/3 compatibility
|
Fix png for python 2/3 compatibility
|
Python
|
mit
|
aranzgeo/properties,3ptscience/properties
|
7fcccea5d7fdfb823d17f1db56f5ece42ef2fd8b
|
tools/bundle.py
|
tools/bundle.py
|
#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = os.path.join(path, f)
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
|
#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = path + '/' + f
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
|
Stop using os.path.join, because Visual Studio can actually handle forward slash style paths, and the os.path method was creating mixed \\ and / style paths, b0rking everything.
|
Stop using os.path.join, because Visual Studio can actually handle forward
slash style paths, and the os.path method was creating mixed \\ and /
style paths, b0rking everything.
|
Python
|
apache-2.0
|
kans/zirgo,kans/zirgo,kans/zirgo
|
dfd6793f16d0128b3d143d0f1ebc196bb79505c2
|
chnnlsdmo/chnnlsdmo/models.py
|
chnnlsdmo/chnnlsdmo/models.py
|
from django.db import models
from django.contrib.auth.models import User
class Voter(models.Model):
'''
Models someone who may vote
'''
user = models.OneToOneField(User)
def __str__(self):
return self.user.username
class Flag(models.Model):
'''
Models a flag which may be voted on
'''
name = models.CharField(max_length=200)
designer = models.CharField(max_length=200)
image_url = models.URLField(max_length=1024)
def __str__(self):
return self.name
class Vote(models.Model):
'''
Models a single vote cast by a `Voter` for a `Flag`
'''
flag = models.ForeignKey(Flag, on_delete=models.CASCADE)
voter = models.ForeignKey(Voter, on_delete=models.CASCADE)
|
from django.db import models
from django.contrib.auth.models import User
class Voter(models.Model):
'''
Models someone who may vote
'''
user = models.OneToOneField(User)
def __str__(self):
return self.user.username
class Flag(models.Model):
'''
Models a flag which may be voted on
'''
name = models.CharField(max_length=200)
designer = models.CharField(max_length=200)
image_url = models.URLField(max_length=1024)
def __str__(self):
return self.name
class Vote(models.Model):
'''
Models a single vote cast by a `Voter` for a `Flag`
'''
flag = models.ForeignKey(Flag, on_delete=models.CASCADE)
voter = models.ForeignKey(Voter, on_delete=models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return "{0} voted for {1} at {2}".format( self.voter.user.username,
self.flag.name,
self.created )
|
Add date/time created timestamp to Vote model
|
Add date/time created timestamp to Vote model
|
Python
|
bsd-3-clause
|
shearichard/django-channels-demo,shearichard/django-channels-demo,shearichard/django-channels-demo
|
34d7a7ea41843ef4761804e973ec9ded1bb2a03b
|
cla_backend/apps/cla_butler/management/commands/reverthousekeeping.py
|
cla_backend/apps/cla_butler/management/commands/reverthousekeeping.py
|
# -*- coding: utf-8 -*-
import os
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from cla_eventlog.models import Log
from cla_provider.models import Feedback
from complaints.models import Complaint
from diagnosis.models import DiagnosisTraversal
from legalaid.models import (
Case, EligibilityCheck, CaseNotesHistory, Person, Income, Savings,
Deductions, PersonalDetails, ThirdPartyDetails, AdaptationDetails,
CaseKnowledgebaseAssignment, EODDetails, EODDetailsCategory, Property
)
from timer.models import Timer
from ...qs_to_file import QuerysetToFile
MODELS = [
Deductions,
Income,
Savings,
Person,
AdaptationDetails,
PersonalDetails,
ThirdPartyDetails,
EligibilityCheck,
Property,
DiagnosisTraversal,
Case,
EODDetails,
EODDetailsCategory,
Complaint,
CaseKnowledgebaseAssignment,
Timer,
Feedback,
CaseNotesHistory,
Log,
LogEntry,
]
class Command(BaseCommand):
help = 'Attempts to re-load data that was deleted in the housekeeping'
def add_arguments(self, parser):
parser.add_argument('directory', nargs=1, type=str)
def handle(self, *args, **options):
d = args[0]
path = os.path.join(settings.TEMP_DIR, d)
filewriter = QuerysetToFile(path)
for model in MODELS:
self.stdout.write(model.__name__)
filewriter.load(model)
|
# -*- coding: utf-8 -*-
import os
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from cla_eventlog.models import Log
from cla_provider.models import Feedback
from complaints.models import Complaint
from diagnosis.models import DiagnosisTraversal
from legalaid.models import (
Case, EligibilityCheck, CaseNotesHistory, Person, Income, Savings,
Deductions, PersonalDetails, ThirdPartyDetails, AdaptationDetails,
CaseKnowledgebaseAssignment, EODDetails, EODDetailsCategory, Property
)
from timer.models import Timer
from ...qs_to_file import QuerysetToFile
MODELS = [
Deductions,
Income,
Savings,
Person,
AdaptationDetails,
PersonalDetails,
ThirdPartyDetails,
EligibilityCheck,
Property,
DiagnosisTraversal,
Case,
EODDetails,
EODDetailsCategory,
Complaint,
CaseKnowledgebaseAssignment,
Timer,
Feedback,
CaseNotesHistory,
Log,
LogEntry,
]
class Command(BaseCommand):
help = 'Attempts to re-load data that was deleted in the housekeeping'
def add_arguments(self, parser):
parser.add_argument('directory', nargs=1)
def handle(self, *args, **options):
path = os.path.join(settings.TEMP_DIR, args[0])
filewriter = QuerysetToFile(path)
for model in MODELS:
self.stdout.write(model.__name__)
filewriter.load(model)
|
Refactor args in manage task
|
Refactor args in manage task
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
f7777c858baf049af83bd39168d0640e4dedf29c
|
main.py
|
main.py
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
lastPingTime = 0
print(os.environ)
while True:
for message in slackClient.rtm_read():
if message["type"] == "team_join":
username = message["user"]["name"]
body = os.environ["WELCOME_MESSAGE"].format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=body, username="The New Ro-Bot", icon_emoji=":wave:",
link_names = True)
now = time.time()
if now - lastPingTime >= 3:
slackClient.server.ping()
lastPingTime = now
time.sleep(.1)
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
lastPingTime = 0
while True:
raise Exception(str(os.environ))
for message in slackClient.rtm_read():
if message["type"] == "team_join":
username = message["user"]["name"]
body = os.environ["WELCOME_MESSAGE"].format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=body, username="The New Ro-Bot", icon_emoji=":wave:",
link_names = True)
now = time.time()
if now - lastPingTime >= 3:
slackClient.server.ping()
lastPingTime = now
time.sleep(.1)
|
Change debug message to exception
|
Change debug message to exception
|
Python
|
mit
|
ollien/Slack-Welcome-Bot
|
f5cc3275a11c809bb6f5ab097414d0a5ccda2341
|
main.py
|
main.py
|
def main():
website = input("Input website(cnn, nytimes, bbc, nzherald): ")
url = input("Input url: ")
scraper(website, url)
def scraper(website, url):
print("%s, %s" % (website, url))
if __name__ == '__main__':
main()
|
def main():
website = input("Input website(cnn, nytimes, bbc, nzherald): ")
url = input("Input url: ")
scraper(website, url)
def scraper(website, url):
if ".com" not in url:
print("Invalid url")
exit()
print("%s, %s" % (website, url))
if __name__ == '__main__':
main()
|
Check for .com in url
|
Check for .com in url
|
Python
|
mit
|
Alex-Gurung/ScrapeTheNews
|
e3e98b0533460837c4ea2eac67c4281eb0ba0012
|
test/requests/parametrized_test.py
|
test/requests/parametrized_test.py
|
import logging
import unittest
from wqflask import app
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
self.es = Elasticsearch([self.es_url])
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"test@user.com"}}})
|
import logging
import unittest
from wqflask import app
from utility.elasticsearch_tools import get_elasticsearch_connection, get_user_by_unique_column
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
self.es = get_elasticsearch_connection()
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
from time import sleep
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"test@user.com"}}})
sleep(1)
|
Use existing code. Delay after delete.
|
Use existing code. Delay after delete.
* Use existing code to get the elasticsearch connection. This should
prevent tests from failing in case the way connections to
elasticsearch are made change.
* Delay a while after deleting to allow elasticsearch to re-index the
data, thus preventing subtle bugs in the test.
|
Python
|
agpl-3.0
|
DannyArends/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,genenetwork/genenetwork2,DannyArends/genenetwork2
|
cdb4fa00328f3bc5852b9cae799d4d3ed99f1280
|
pyramid_authsanity/util.py
|
pyramid_authsanity/util.py
|
from pyramid.interfaces import (
ISessionFactory,
)
from .interfaces import (
IAuthService,
IAuthSourceService,
)
def int_or_none(x):
return int(x) if x is not None else x
def kw_from_settings(settings, from_prefix='authsanity.'):
return dict((k.replace(from_prefix, ''), v) for (k, v) in settings.items() if k.startswith(from_prefix))
def add_vary_callback(vary_by):
def vary_add(request, response):
vary = set(response.vary if response.vary is not None else [])
vary |= set(vary_by)
response.vary = list(vary)
return vary_add
def _find_services(request):
sourcesvc = request.find_service(IAuthSourceService)
authsvc = request.find_service(IAuthService)
return (sourcesvc, authsvc)
def _session_registered(request):
registry = request.registry
factory = registry.queryUtility(ISessionFactory)
return (False if factory is None else True)
|
from pyramid.interfaces import (
ISessionFactory,
)
from .interfaces import (
IAuthService,
IAuthSourceService,
)
def int_or_none(x):
return int(x) if x is not None else x
def kw_from_settings(settings, from_prefix='authsanity.'):
return { k.replace(from_prefix, ''): v for k, v in settings.items() if k.startswith(from_prefix) }
def add_vary_callback(vary_by):
def vary_add(request, response):
vary = set(response.vary if response.vary is not None else [])
vary |= set(vary_by)
response.vary = list(vary)
return vary_add
def _find_services(request):
sourcesvc = request.find_service(IAuthSourceService)
authsvc = request.find_service(IAuthService)
return (sourcesvc, authsvc)
def _session_registered(request):
registry = request.registry
factory = registry.queryUtility(ISessionFactory)
return (False if factory is None else True)
|
Revert "Py 2.6 support is back"
|
Revert "Py 2.6 support is back"
This reverts commit 463c1ab6a7f5a7909b967e0dfa0320a77e166b95.
|
Python
|
isc
|
usingnamespace/pyramid_authsanity
|
977c8cc25c3978931e0d908589232db1bcac5b3f
|
fitizen/body_weight_workout/views.py
|
fitizen/body_weight_workout/views.py
|
# from datetime import datetime
from django.views.generic import RedirectView
from django.core.urlresolvers import reverse_lazy
from .models import BodyWeightWorkout
from braces import views
# Create your views here.
class CreateWorkout(
views.LoginRequiredMixin,
views.MessageMixin,
RedirectView
):
url = reverse_lazy('home')
login_url = reverse_lazy('login')
def get(self, request, *args, **kwargs):
user = request.user
workout = BodyWeightWorkout(user=user)
workout.save()
self.messages.success("New workout created!")
return super(CreateWorkout, self).get(request, *args, **kwargs)
|
from datetime import datetime
from django.utils import timezone
from django.shortcuts import redirect
from django.views.generic import View
from django.core.urlresolvers import reverse_lazy
from .models import BodyWeightWorkout
from braces import views
# Create your views here.
class CreateWorkout(
views.LoginRequiredMixin,
views.MessageMixin,
View
):
url = reverse_lazy('home')
login_url = reverse_lazy('login')
def get(self, request, *args, **kwargs):
now = timezone.now()
recent_workout = list(BodyWeightWorkout.objects.filter(user=request.user.id).datetimes('created', 'day', order='DESC')[:1])
difference = (now - recent_workout[0])
# check to see if they already worked out today
if difference.days == 0:
self.messages.success("You already worked out today!")
return redirect('home')
else:
user = request.user
workout = BodyWeightWorkout(user=user)
workout.save()
self.messages.success("New workout created!")
return redirect('home')
|
Create Workout now checks to see if you worked out once today already, if so tells user they already worked out on that day. fixed error where redirect would not re-instantiate get request to createview
|
Create Workout now checks to see if you worked out once today already, if so tells user they already worked out on that day. fixed error where redirect would not re-instantiate get request to createview
|
Python
|
mit
|
johnshiver/fitizen,johnshiver/fitizen
|
6567120249b82477bcf0ef82554b057f93618e7e
|
tools/gyp/find_mac_gcc_version.py
|
tools/gyp/find_mac_gcc_version.py
|
#!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major == 3 and minor >= 1:
return '4.2'
elif major == 4 and minor < 5:
return 'com.apple.compilers.llvmgcc42'
elif major == 4 and minor >= 5:
# XCode seems to select the specific clang version automatically
return 'com.apple.compilers.llvm.clang.1_0'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
|
#!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major >= 4:
return 'com.apple.compilers.llvmgcc42'
elif major == 3 and minor >= 1:
return '4.2'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
|
Revert "Use clang on mac if XCode >= 4.5"
|
Revert "Use clang on mac if XCode >= 4.5"
We cannot build v8 after this change because clang reports a warning in
v8/src/parser.cc about an unused field (and we turn warnings into errors).
We can enable this change again after we update to a new v8 version (this seems
to be fixed in v3.17).
Review URL: https://codereview.chromium.org//14477015
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@21984 260f80e4-7a28-3924-810f-c04153c831b5
|
Python
|
bsd-3-clause
|
dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk
|
8218b398731e8d9093a91de9bb127e2e933fa6db
|
json_editor/admin.py
|
json_editor/admin.py
|
import json
import copy
from django import forms
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
class JSONEditorWidget(forms.Widget):
template_name = 'django_json_editor/django_json_editor.html'
def __init__(self, schema, collapsed=True):
super().__init__()
self._schema = schema
self._collapsed = collapsed
def render(self, name, value, attrs=None):
if callable(self._schema):
schema = self._schema(self)
else:
schema = copy.copy(self._schema)
schema['title'] = ' '
schema['options'] = {'collapsed': int(self._collapsed)}
context = {
'name': name,
'schema': schema,
'data': json.loads(value),
}
return mark_safe(render_to_string(self.template_name, context))
class Media:
css = {'all': (
'django_json_editor/bootstrap/css/bootstrap.min.css',
'django_json_editor/fontawesome/css/font-awesome.min.css',
'django_json_editor/style.css',
)}
js = (
'django_json_editor/jquery/jquery.min.js',
'django_json_editor/bootstrap/js/bootstrap.min.js',
'django_json_editor/jsoneditor/jsoneditor.min.js',
)
|
import json
import copy
from django import forms
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
class JSONEditorWidget(forms.Widget):
template_name = 'django_json_editor/django_json_editor.html'
def __init__(self, schema, collapsed=True):
super().__init__()
self._schema = schema
self._collapsed = collapsed
def render(self, name, value, attrs=None):
if callable(self._schema):
schema = self._schema(self)
else:
schema = copy.copy(self._schema)
schema['title'] = ' '
schema['options'] = {'collapsed': int(self._collapsed)}
context = {
'name': name,
'schema': schema,
'data': value,
}
return mark_safe(render_to_string(self.template_name, context))
class Media:
css = {'all': (
'django_json_editor/bootstrap/css/bootstrap.min.css',
'django_json_editor/fontawesome/css/font-awesome.min.css',
'django_json_editor/style.css',
)}
js = (
'django_json_editor/jquery/jquery.min.js',
'django_json_editor/bootstrap/js/bootstrap.min.js',
'django_json_editor/jsoneditor/jsoneditor.min.js',
)
|
Load value from json field as string.
|
Load value from json field as string.
|
Python
|
mit
|
abogushov/django-admin-json-editor,abogushov/django-admin-json-editor
|
414f6e9174b8c7b88866319af19a5e36fcec643d
|
kk/admin/__init__.py
|
kk/admin/__init__.py
|
from django.contrib import admin
from kk.models import Hearing, Label, Introduction, Scenario, Comment
admin.site.register(Label)
admin.site.register(Hearing)
admin.site.register(Introduction)
admin.site.register(Scenario)
admin.site.register(Comment)
|
from django.contrib import admin
from kk import models
### Inlines
class IntroductionInline(admin.StackedInline):
model = models.Introduction
extra = 0
exclude = ["id"]
class ScenarioInline(admin.StackedInline):
model = models.Scenario
extra = 0
exclude = ["id"]
class HearingImageInline(admin.StackedInline):
model = models.HearingImage
extra = 0
class IntroductionImageInline(admin.StackedInline):
model = models.IntroductionImage
extra = 0
class ScenarioImageInline(admin.StackedInline):
model = models.ScenarioImage
extra = 0
### Admins
class HearingAdmin(admin.ModelAdmin):
inlines = [HearingImageInline, IntroductionInline, ScenarioInline]
class IntroductionAdmin(admin.ModelAdmin):
inlines = [IntroductionImageInline]
class ScenarioAdmin(admin.ModelAdmin):
inlines = [ScenarioImageInline]
### Wire it up!
admin.site.register(models.Label)
admin.site.register(models.Hearing, HearingAdmin)
admin.site.register(models.Introduction, IntroductionAdmin)
admin.site.register(models.Scenario, ScenarioAdmin)
|
Make the admin a little bit more palatable
|
Make the admin a little bit more palatable
Refs #25
|
Python
|
mit
|
stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi,City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,vikoivun/kerrokantasi,vikoivun/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi
|
b0029cffae96e25611d7387e699774de4d9682d3
|
corehq/apps/es/tests/utils.py
|
corehq/apps/es/tests/utils.py
|
import json
from nose.plugins.attrib import attr
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
|
import json
from nose.plugins.attrib import attr
from nose.tools import nottest
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
@nottest
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
|
Mark es_test decorator as nottest
|
Mark es_test decorator as nottest
Second try...
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
913ae38e48591000195166a93e18e96a82d1d222
|
lily/messaging/email/migrations/0013_fix_multple_default_templates.py
|
lily/messaging/email/migrations/0013_fix_multple_default_templates.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def fix_multiple_default_templates(apps, schema_editor):
# Some users have more than 1 default template.
# This shouldn't be possible, make sure is will be just 1.
User = apps.get_model('users', 'LilyUser')
DefaultEmailTemplate = apps.get_model('email', 'DefaultEmailTemplate')
print('\nFixing default template for the following users:')
for user in User.objects.all():
templates = DefaultEmailTemplate.objects.filter(user=user.pk).order_by('id')
if templates.count() > 1:
# User has more than one default template.
# Best guess would be that the user prefers the last set template to be the default.
# So remove all except the last one.
template_to_keep = templates.last()
templates.exclude(id=template_to_keep.id).delete()
print('%d:\t%s' % (user.pk, user.email))
class Migration(migrations.Migration):
dependencies = [
('email', '0012_auto_20160715_1423'),
]
operations = [
migrations.RunPython(fix_multiple_default_templates),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def fix_multiple_default_templates(apps, schema_editor):
# Some users have more than 1 default template.
# This shouldn't be possible, make sure is will be just 1.
User = apps.get_model('users', 'LilyUser')
DefaultEmailTemplate = apps.get_model('email', 'DefaultEmailTemplate')
for user in User.objects.all():
templates = DefaultEmailTemplate.objects.filter(user=user.pk).order_by('id')
if templates.count() > 1:
# User has more than one default template.
# Best guess would be that the user prefers the last set template to be the default.
# So remove all except the last one.
template_to_keep = templates.last()
templates.exclude(id=template_to_keep.id).delete()
class Migration(migrations.Migration):
dependencies = [
('email', '0012_auto_20160715_1423'),
]
operations = [
migrations.RunPython(fix_multiple_default_templates),
]
|
Remove print statements, not usefull anymore.
|
Remove print statements, not usefull anymore.
|
Python
|
agpl-3.0
|
HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily
|
517e22b331f63e80cb344e257789463627b44508
|
utilities/rename-random-number.py
|
utilities/rename-random-number.py
|
''' rename files in local directory with random integer names.
windows screen saver isn't very good at randomizing fotos shown.
Change file names regularly to provide more variety
'''
import os
import re
import random
random.seed()
new_names = set()
original_files = []
for entry in os.listdir():
if os.path.isfile(entry):
if re.match(".*jpg", entry):
original_files.append(entry)
for counter in range(0, len(original_files)):
new_value = random.randint(0,100000)
while new_value in new_names:
new_value = random.randint(0,100000)
new_names.add(new_value)
for of in original_files:
nf = str(new_names.pop()).zfill(6) + ".jpg"
try:
os.rename(of, nf)
except Exception as e:
print("{}: {}".format(of, e))
|
#! python3
''' rename files in local directory with random integer names.
windows screen saver isn't very good at randomizing fotos shown.
Change file names regularly to provide more variety
'''
import os
import re
import random
import time
random.seed()
new_names = set()
original_files = []
for entry in os.listdir():
if os.path.isfile(entry):
if re.match(".*jpg", entry):
original_files.append(entry)
for counter in range(0, len(original_files)):
new_value = random.randint(0,1000000000)
# Make sure the new names are unique
# -- note this is only the new set, the new name
# may still duplicate an old name. The set is
# to minimize this chance
while new_value in new_names:
new_value = random.randint(0,1000000000)
new_names.add(new_value)
for of in original_files:
nf = str(new_names.pop()).zfill(10) + ".jpg"
try:
os.rename(of, nf)
except Exception as e:
print("{}: {}".format(of, e))
time.sleep(5)
|
Increase namespace, sleep before cmd window closes
|
Increase namespace, sleep before cmd window closes
|
Python
|
mit
|
daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various
|
69ae2f5b825ae6a404d78120b60727b59dbbcbac
|
xos/model_policies/model_policy_ControllerSlice.py
|
xos/model_policies/model_policy_ControllerSlice.py
|
def handle(controller_slice):
from core.models import ControllerSlice, Slice
try:
my_status_code = int(controller_slice.backend_status[0])
try:
his_status_code = int(controller_slice.slice.backend_status[0])
except:
his_status_code = 0
if (my_status_code not in [0,his_status_code]):
controller_slice.slice.backend_status = controller_slice.backend_status
controller_slice.slice.save(update_fields = ['backend_status'])
except Exception,e:
print str(e)
pass
|
def handle(controller_slice):
from core.models import ControllerSlice, Slice
try:
my_status_code = int(controller_slice.backend_status[0])
try:
his_status_code = int(controller_slice.slice.backend_status[0])
except:
his_status_code = 0
fields = []
if (my_status_code not in [0,his_status_code]):
controller_slice.slice.backend_status = controller_slice.backend_status
fields+=['backend_status']
if (controller_slice.backend_register != controller_slice.slice.backend_register):
controller_slice.slice.backend_register = controller_slice.backend_register
fields+=['backend_register']
controller_slice.slice.save(update_fields = fields)
except Exception,e:
print str(e)
pass
|
Copy backend_register from ControllerSlice to Slice
|
Copy backend_register from ControllerSlice to Slice
|
Python
|
apache-2.0
|
jermowery/xos,xmaruto/mcord,xmaruto/mcord,jermowery/xos,cboling/xos,jermowery/xos,cboling/xos,cboling/xos,jermowery/xos,xmaruto/mcord,xmaruto/mcord,cboling/xos,cboling/xos
|
5d6d2a02963cadd9b0a5c148fb6906fa63148052
|
booster_bdd/features/environment.py
|
booster_bdd/features/environment.py
|
"""Module with code to be run before and after certain events during the testing."""
import os
from src.support import helpers
def before_all(_context):
"""Perform the setup before the first event."""
if not helpers.is_user_logged_in():
username = os.getenv("OSIO_USERNAME")
password = os.getenv("OSIO_PASSWORD")
assert username != ""
assert password != ""
print("Loggin user {} in...".format(username))
helpers.login_user(username, password)
|
"""Module with code to be run before and after certain events during the testing."""
import os
from src.support import helpers
def before_all(_context):
"""Perform the setup before the first event."""
if not helpers.is_user_logged_in():
username = os.getenv("OSIO_USERNAME")
password = os.getenv("OSIO_PASSWORD")
assert username is not None
assert password is not None
assert username != ""
assert password != ""
print("Loggin user {} in...".format(username))
helpers.login_user(username, password)
|
Check for env. variable existence
|
Check for env. variable existence
|
Python
|
apache-2.0
|
ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test
|
08113ee79785f394a1c5244cdb87bef9f7fc5ff3
|
catplot/__init__.py
|
catplot/__init__.py
|
__all__ = ['en_profile']
__version__ = '0.1.0'
|
__all__ = ['en_profile', 'functions', 'chem_parser']
__version__ = '0.1.0'
|
Add more modules to __all__
|
Add more modules to __all__
|
Python
|
mit
|
PytLab/catplot
|
338a6e8da75a5b950949638b1a810510419450e9
|
scripts/state_and_transition.py
|
scripts/state_and_transition.py
|
#!/usr/bin/env python
#
# Copyright 2017 Robot Garden, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# States and Transitions
from auto_number import AutoNumber
class STATE(AutoNumber):
Start = ()
Following_waypoint = ()
Avoiding_obstacle = ()
Driving_toward_cone = ()
Driving_away_from_cone = ()
Success = ()
Failure = ()
End = ()
class TRANSITION(AutoNumber):
obstacle_seen = ()
near_cone = ()
obstacle_cleared = ()
touched_cone = ()
passed_cone = ()
segment_timeout = ()
touched_last_cone = ()
passed_last_cone = ()
course_timeout = ()
|
#!/usr/bin/env python
#
# Copyright 2017 Robot Garden, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# States and Transitions
from auto_number import AutoNumber
class STATE(AutoNumber):
Start = ()
Following_waypoint = ()
Avoiding_obstacle = ()
Driving_toward_cone = ()
Driving_away_from_cone = ()
Success = ()
Failure = ()
End = ()
class TRANSITION(AutoNumber):
obstacle_seen = ()
near_cone = ()
obstacle_cleared = ()
touched_cone = ()
passed_cone = ()
segment_timeout = ()
touched_last_cone = ()
passed_last_cone = ()
course_timeout = ()
cleared_cone = ()
|
Add new state for driving away from cone
|
Add new state for driving away from cone
|
Python
|
apache-2.0
|
ProgrammingRobotsStudyGroup/robo_magellan,ProgrammingRobotsStudyGroup/robo_magellan,ProgrammingRobotsStudyGroup/robo_magellan,ProgrammingRobotsStudyGroup/robo_magellan
|
7f5392d2581e789917b8ba5352d821277d5de8ab
|
numpy/typing/_scalars.py
|
numpy/typing/_scalars.py
|
from typing import Union, Tuple, Any
import numpy as np
# NOTE: `_StrLike` and `_BytesLike` are pointless, as `np.str_` and `np.bytes_`
# are already subclasses of their builtin counterpart
_CharLike = Union[str, bytes]
_BoolLike = Union[bool, np.bool_]
_IntLike = Union[int, np.integer]
_FloatLike = Union[_IntLike, float, np.floating]
_ComplexLike = Union[_FloatLike, complex, np.complexfloating]
_NumberLike = Union[int, float, complex, np.number, np.bool_]
_ScalarLike = Union[
int,
float,
complex,
str,
bytes,
np.generic,
]
# `_VoidLike` is technically not a scalar, but it's close enough
_VoidLike = Union[Tuple[Any, ...], np.void]
|
from typing import Union, Tuple, Any
import numpy as np
# NOTE: `_StrLike` and `_BytesLike` are pointless, as `np.str_` and `np.bytes_`
# are already subclasses of their builtin counterpart
_CharLike = Union[str, bytes]
# The 6 `<X>Like` type-aliases below represent all scalars that can be
# coerced into `<X>` (with the casting rule `same_kind`)
_BoolLike = Union[bool, np.bool_]
_IntLike = Union[_BoolLike, int, np.integer]
_FloatLike = Union[_IntLike, float, np.floating]
_ComplexLike = Union[_FloatLike, complex, np.complexfloating]
_NumberLike = Union[int, float, complex, np.number, np.bool_]
_ScalarLike = Union[
int,
float,
complex,
str,
bytes,
np.generic,
]
# `_VoidLike` is technically not a scalar, but it's close enough
_VoidLike = Union[Tuple[Any, ...], np.void]
|
Add `_BoolLike` to the union defining `_IntLike`
|
ENH: Add `_BoolLike` to the union defining `_IntLike`
|
Python
|
bsd-3-clause
|
seberg/numpy,pdebuyl/numpy,mhvk/numpy,pbrod/numpy,madphysicist/numpy,endolith/numpy,mattip/numpy,simongibbons/numpy,numpy/numpy,endolith/numpy,jakirkham/numpy,rgommers/numpy,mattip/numpy,madphysicist/numpy,mhvk/numpy,jakirkham/numpy,pdebuyl/numpy,simongibbons/numpy,charris/numpy,simongibbons/numpy,anntzer/numpy,charris/numpy,numpy/numpy,mhvk/numpy,anntzer/numpy,jakirkham/numpy,anntzer/numpy,mattip/numpy,charris/numpy,madphysicist/numpy,pbrod/numpy,pbrod/numpy,jakirkham/numpy,endolith/numpy,seberg/numpy,pbrod/numpy,mhvk/numpy,jakirkham/numpy,rgommers/numpy,charris/numpy,mattip/numpy,rgommers/numpy,simongibbons/numpy,seberg/numpy,numpy/numpy,seberg/numpy,pbrod/numpy,pdebuyl/numpy,mhvk/numpy,numpy/numpy,pdebuyl/numpy,rgommers/numpy,endolith/numpy,madphysicist/numpy,madphysicist/numpy,simongibbons/numpy,anntzer/numpy
|
239e759eed720f884e492e47b82e64f25fdc215f
|
core/views.py
|
core/views.py
|
# views.py
from django.shortcuts import render
from wagtail.core.models import Page
from wagtail.search.models import Query
def search(request):
# Search
search_query = request.GET.get("q", None)
if search_query:
search_results = Page.objects.live().search(search_query)
# Log the query so Wagtail can suggest promoted results
Query.get(search_query).add_hit()
else:
search_results = Page.objects.none()
# Render template
return render(
request,
"core/search_results.html",
{
"search_query": search_query,
"search_results": search_results,
},
)
|
# views.py
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
from wagtail.core.models import Page
from wagtail.search.models import Query
def search(request):
# Search
search_query = request.GET.get("q", None)
page = request.GET.get("page", 1)
if search_query:
search_results = Page.objects.live().search(search_query)
# Log the query so Wagtail can suggest promoted results
Query.get(search_query).add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
# Render template
return render(
request,
"core/search_results.html",
{
"search_query": search_query,
"search_results": search_results,
},
)
|
Add pagination to the reinstated search view
|
Add pagination to the reinstated search view
|
Python
|
mit
|
springload/madewithwagtail,springload/madewithwagtail,springload/madewithwagtail,springload/madewithwagtail
|
f3d3c0ce81ba8717f5839b502e57d75ebbc1f6e7
|
meetuppizza/views.py
|
meetuppizza/views.py
|
from django.contrib.auth import authenticate, login, logout
from django.shortcuts import render, redirect
from meetuppizza.forms import RegistrationForm
from meetup.models import Meetup
from meetup.services.meetup_service import MeetupService
def index(request):
meetups = Meetup.objects.all()
meetup_presenters = []
for meetup in meetups:
service = MeetupService(meetup)
meetup_presenters.append(service.get_decorated_meetup())
return render(request, 'index.html', {"meetups": meetup_presenters})
|
from django.contrib.auth import authenticate, login, logout
from django.shortcuts import render, redirect
from meetuppizza.forms import RegistrationForm
from meetup.models import Meetup
from meetup.services.meetup_service import MeetupService
def index(request):
meetups = Meetup.objects.all()
meetup_presenters = [MeetupService(meetup).get_decorated_meetup() for meetup in meetups]
return render(request, 'index.html', {"meetups": meetup_presenters})
|
Use list comprehension to generate MeetupPresentor list in index view
|
Use list comprehension to generate MeetupPresentor list in index view
|
Python
|
mit
|
nicole-a-tesla/meetup.pizza,nicole-a-tesla/meetup.pizza
|
c6ef5bcac4d5daddac97ff30ff18645249928ac0
|
nap/engine.py
|
nap/engine.py
|
import json
try:
import msgpack
except ImportError:
pass
from decimal import Decimal
from datetime import date, datetime, time
class Engine(object):
# The list of content types we match
CONTENT_TYPES = []
def dumps(self, data): # pragma: no cover
'''How to serialiser an object'''
raise NotImplementedError
def loads(self, data): # pragma: no cover
'''How to deserialise a string'''
raise NotImplementedError
class JsonEngine(Engine):
CONTENT_TYPES = ['application/json',]
def dumps(self, data):
return json.dumps(data)
def loads(self, data):
return json.loads(data)
class MsgPackEngine(Engine):
CONTENT_TYPES = ['application/x-msgpack',]
def dumps(self, data):
return msgpack.dumps(data)
def loads(self, data):
return msgpack.loads(data)
|
import json
class Engine(object):
# The list of content types we match
CONTENT_TYPES = []
def dumps(self, data): # pragma: no cover
'''How to serialiser an object'''
raise NotImplementedError
def loads(self, data): # pragma: no cover
'''How to deserialise a string'''
raise NotImplementedError
class JsonEngine(Engine):
CONTENT_TYPES = ['application/json',]
def dumps(self, data):
return json.dumps(data)
def loads(self, data):
return json.loads(data)
try:
import msgpack
except ImportError:
pass
else:
class MsgPackEngine(Engine):
CONTENT_TYPES = ['application/x-msgpack',]
def dumps(self, data):
return msgpack.dumps(data)
def loads(self, data):
return msgpack.loads(data)
|
Remove unused imports Only define MsgPackEngine if we can import MsgPack
|
Remove unused imports
Only define MsgPackEngine if we can import MsgPack
|
Python
|
bsd-3-clause
|
MarkusH/django-nap,limbera/django-nap
|
ed69ace7f6065ec1b3dd2f2de3a0d5b56ac28366
|
climatemaps/data.py
|
climatemaps/data.py
|
import numpy
def import_climate_data():
ncols = 720
nrows = 360
digits = 5
with open('./data/cloud/ccld6190.dat') as filein:
lines = filein.readlines()
line_n = 0
grid_size = 0.50
xmin = 0.25
xmax = 360.25
ymin = -89.75
ymax = 90.25
lonrange = numpy.arange(xmin, xmax, grid_size)
latrange = numpy.arange(ymin, ymax, grid_size)
Z = numpy.zeros((int(latrange.shape[0]), int(lonrange.shape[0])))
print(len(lonrange))
print(len(latrange))
i = 0
for line in lines:
line_n += 1
if line_n < 3: # skip header
continue
if i >= nrows: # read one month
break
value = ''
values = []
counter = 1
j = 0
for char in line:
value += char
if counter % digits == 0:
Z[i][j] = float(value)
values.append(value)
value = ''
j += 1
counter += 1
i += 1
return latrange, lonrange, Z
|
import numpy
def import_climate_data():
ncols = 720
nrows = 360
digits = 5
monthnr = 3
with open('./data/cloud/ccld6190.dat', 'r') as filein:
lines = filein.readlines()
line_n = 0
grid_size = 0.50
xmin = 0.25
xmax = 360.25
ymin = -89.75
ymax = 90.25
lonrange = numpy.arange(xmin, xmax, grid_size)
latrange = numpy.arange(ymin, ymax, grid_size)
Z = numpy.zeros((int(latrange.shape[0]), int(lonrange.shape[0])))
print(len(lonrange))
print(len(latrange))
i = 0
rown = 0
for line in lines:
line_n += 1
if line_n < 3: # skip header
continue
if rown < (monthnr-1)*nrows or rown >= monthnr*nrows: # read one month
rown += 1
continue
value = ''
counter = 1
j = 0
for char in line:
value += char
if counter % digits == 0:
value = float(value)
if value < 0:
value = numpy.nan
Z[i][j] = value
value = ''
j += 1
counter += 1
i += 1
rown += 1
return latrange, lonrange, Z
|
Create argument to select month to import
|
Create argument to select month to import
|
Python
|
mit
|
bartromgens/climatemaps,bartromgens/climatemaps,bartromgens/climatemaps
|
c15174d9bd7728dd5d397e6de09291853e65ed4d
|
scripts/test_deployment.py
|
scripts/test_deployment.py
|
import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_post_images(expect, url):
params = {"key": "iw", "lines": ["test", "deployment"]}
response = requests.post(f"{url}/api/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/api/images/iw/test/deployment.png")
def test_get_image(expect, url):
response = requests.get(f"{url}/api/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/api/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
|
import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
|
Update tests for new API routes
|
Update tests for new API routes
|
Python
|
mit
|
jacebrowning/memegen,jacebrowning/memegen
|
057d7a95031ba8c51ae10ea1b742534fcb5e82a3
|
bidb/keys/tasks.py
|
bidb/keys/tasks.py
|
import celery
import subprocess
from bidb.utils.tempfile import TemporaryDirectory
from bidb.utils.subprocess import check_output2
from .models import Key
@celery.task(soft_time_limit=60)
def update_or_create_key(uid):
with TemporaryDirectory() as homedir:
try:
check_output2((
'gpg',
'--homedir', homedir,
'--keyserver', 'http://p80.pool.sks-keyservers.net/',
'--recv-keys', uid,
))
except subprocess.CalledProcessError as exc:
print "E: {}: {}".format(exc, exc.output)
return None, False
data = check_output2((
'gpg',
'--homedir', homedir,
'--with-colons',
'--fixed-list-mode',
'--fingerprint',
uid,
))
for line in data.splitlines():
if line.startswith('uid:'):
name = line.split(':')[9]
break
else:
raise ValueError("Could not parse name from key: {}".format(data))
return Key.objects.update_or_create(uid=uid, defaults={
'name': name,
})
@celery.task()
def refresh_all():
for x in Key.objects.all():
update_or_create_key.delay(x.uid)
|
import celery
import subprocess
from bidb.utils.tempfile import TemporaryDirectory
from bidb.utils.subprocess import check_output2
from .models import Key
@celery.task(soft_time_limit=60)
def update_or_create_key(uid):
with TemporaryDirectory() as homedir:
try:
check_output2((
'gpg',
'--homedir', homedir,
'--keyserver', 'pgpkeys.mit.edu',
'--recv-keys', uid,
))
except subprocess.CalledProcessError as exc:
print "E: {}: {}".format(exc, exc.output)
return None, False
data = check_output2((
'gpg',
'--homedir', homedir,
'--with-colons',
'--fixed-list-mode',
'--fingerprint',
uid,
))
for line in data.splitlines():
if line.startswith('uid:'):
name = line.split(':')[9]
break
else:
raise ValueError("Could not parse name from key: {}".format(data))
return Key.objects.update_or_create(uid=uid, defaults={
'name': name,
})
@celery.task()
def refresh_all():
for x in Key.objects.all():
update_or_create_key.delay(x.uid)
|
Use pgpkeys.mit.edu as our keyserver; seems to work.
|
Use pgpkeys.mit.edu as our keyserver; seems to work.
|
Python
|
agpl-3.0
|
lamby/buildinfo.debian.net,lamby/buildinfo.debian.net
|
63f40971f8bc4858b32b41595d14315d2261169f
|
proselint/checks/garner/mondegreens.py
|
proselint/checks/garner/mondegreens.py
|
# -*- coding: utf-8 -*-
"""Mondegreens.
---
layout: post
source: Garner's Modern American Usage
source_url: http://amzn.to/15wF76r
title: mondegreens
date: 2014-06-10 12:31:19
categories: writing
---
Points out preferred form.
"""
from tools import memoize, preferred_forms_check
@memoize
def check(text):
"""Suggest the preferred forms."""
err = "garner.mondegreens"
msg = "'{}' is the preferred form."
list = [
["a girl with colitis goes by", "a girl with kaleidascope eyes"],
["a partridge in a pear tree", "a part-red gingerbread tree"],
["attorney and not a republic", "attorney and notary public"],
["beck and call", "beckon call"],
["for all intents and purposes", "for all intensive purposes"],
["laid him on the green", "Lady Mondegreen"],
["Olive, the other reindeer", "all of the other reindeer"],
["to the manner born", "to the manor born"],
]
return preferred_forms_check(text, list, err, msg)
|
# -*- coding: utf-8 -*-
"""Mondegreens.
---
layout: post
source: Garner's Modern American Usage
source_url: http://amzn.to/15wF76r
title: mondegreens
date: 2014-06-10 12:31:19
categories: writing
---
Points out preferred form.
"""
from tools import memoize, preferred_forms_check
@memoize
def check(text):
"""Suggest the preferred forms."""
err = "garner.mondegreens"
msg = "'{}' is the preferred form."
list = [
["a girl with kaleidascope eyes", "a girl with colitis goes by"],
["a partridge in a pear tree", "a part-red gingerbread tree"],
["attorney and not a republic", "attorney and notary public"],
["beck and call", "beckon call"],
["for all intents and purposes", "for all intensive purposes"],
["laid him on the green", "Lady Mondegreen"],
["Olive, the other reindeer", "all of the other reindeer"],
["to the manner born", "to the manor born"],
]
return preferred_forms_check(text, list, err, msg)
|
Fix bug in mondegreen rule
|
Fix bug in mondegreen rule
(The correct versions should all be in the left column.)
|
Python
|
bsd-3-clause
|
jstewmon/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint
|
abaa882aaa1b7e251d989d60391bd2e06801c2a2
|
py/desiUtil/install/most_recent_tag.py
|
py/desiUtil/install/most_recent_tag.py
|
# License information goes here
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
def most_recent_tag(tags,username=None):
"""Scan an SVN tags directory and return the most recent tag.
Parameters
----------
tags : str
A URL pointing to an SVN tags directory.
username : str, optional
If set, pass the value to SVN's ``--username`` option.
Returns
-------
most_recent_tag : str
The most recent tag found in ``tags``.
"""
from subprocess import Popen, PIPE
command = ['svn']
if username is not None:
command += ['--username', username]
command += ['ls',tags]
proc = Popen(command,stdout=PIPE,stderr=PIPE)
out, err = proc.communicate()
try:
mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0])[-1]
except IndexError:
mrt = '0.0.0'
return mrt
|
# License information goes here
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
def most_recent_tag(tags,username=None):
"""Scan an SVN tags directory and return the most recent tag.
Parameters
----------
tags : str
A URL pointing to an SVN tags directory.
username : str, optional
If set, pass the value to SVN's ``--username`` option.
Returns
-------
most_recent_tag : str
The most recent tag found in ``tags``.
"""
from distutils.version import StrictVersion as V
from subprocess import Popen, PIPE
command = ['svn']
if username is not None:
command += ['--username', username]
command += ['ls',tags]
proc = Popen(command,stdout=PIPE,stderr=PIPE)
out, err = proc.communicate()
try:
mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0],
key=lambda x: V(x))[-1]
except IndexError:
mrt = '0.0.0'
return mrt
|
Add more careful version checks
|
Add more careful version checks
|
Python
|
bsd-3-clause
|
desihub/desiutil,desihub/desiutil
|
f60fe11653d71f278aa04e71a522a89fc86c284a
|
bse/api.py
|
bse/api.py
|
'''
Main interface to BSE functionality
'''
from . import io
def get_basis_set(name):
'''Reads a json basis set file given only the name
The path to the basis set file is taken to be the 'data' directory
in this project
'''
return io.read_table_basis_by_name(name)
def get_metadata(keys=None, key_filter=None):
if key_filter:
raise RuntimeError("key_filter not implemented")
avail_names = io.get_available_names()
metadata = {}
for n in avail_names:
bs = io.read_table_basis_by_name(n)
common_name = bs['basisSetName']
defined_elements = list(bs['basisSetElements'].keys())
function_types = set()
for e in bs['basisSetElements'].values():
for s in e['elementElectronShells']:
function_types.add(s['shellFunctionType'])
metadata[common_name] = {
'mangled_name': n,
'elements': defined_elements,
'functiontypes': list(function_types),
}
return metadata
|
'''
Main interface to BSE functionality
'''
from . import io
def get_basis_set(name):
'''Reads a json basis set file given only the name
The path to the basis set file is taken to be the 'data' directory
in this project
'''
return io.read_table_basis_by_name(name)
def get_metadata(keys=None, key_filter=None):
if key_filter:
raise RuntimeError("key_filter not implemented")
avail_names = io.get_available_names()
metadata = {}
for n in avail_names:
bs = io.read_table_basis_by_name(n)
displayname = bs['basisSetName']
defined_elements = list(bs['basisSetElements'].keys())
function_types = set()
for e in bs['basisSetElements'].values():
for s in e['elementElectronShells']:
function_types.add(s['shellFunctionType'])
metadata[n] = {
'displayname': displayname,
'elements': defined_elements,
'functiontypes': list(function_types),
}
return metadata
|
Switch which name is used as a metadata key
|
Switch which name is used as a metadata key
|
Python
|
bsd-3-clause
|
MOLSSI-BSE/basis_set_exchange
|
8d46e411b2e7091fc54c676665905da8ec6906f3
|
controllers/dotd.py
|
controllers/dotd.py
|
def form():
db.raw_log.uuid.default = uuid_generator()
db.raw_log.date.default = dbdate()
#don't display form items that are part of table, but not facing end user
db.raw_log.uuid.readable = db.raw_log.uuid.writable = False
db.raw_log.date.readable = db.raw_log.date.writable = False
form = SQLFORM(db.raw_log, showid=False, formstyle='divs').process()
if form.accepted:
redirect(URL('dotd', 'parsed', args=db.raw_log.uuid.default))
return dict(form=form)
def parsed():
if request.args:
uuid = request.args[0]
rows = db(db.raw_log.uuid==uuid).select()
if len(rows) == 0:
redirect(URL('form'))
for row in rows:
experience, obtained_items, proc_items, found_items, log_file, max_hit, hit_list=parser(row.data)
# hit_list=parser(row.data)
return locals()
else:
redirect(URL('form'))
|
def form():
db.raw_log.uuid.default = uuid_generator()
db.raw_log.date.default = dbdate()
#don't display form items that are part of table, but not facing end user
db.raw_log.uuid.readable = db.raw_log.uuid.writable = False
db.raw_log.date.readable = db.raw_log.date.writable = False
if form.accepted:
redirect(URL('dotd', 'parsed', args=db.raw_log.uuid.default))
return dict(form=form)
def parsed():
if request.args:
uuid = request.args[0]
rows = db(db.raw_log.uuid==uuid).select()
if len(rows) == 0:
redirect(URL('form'))
for row in rows:
experience, obtained_items, proc_items, found_items, log_file, max_hit, hit_list=parser(row.data)
# hit_list=parser(row.data)
return locals()
else:
redirect(URL('form'))
|
Remove selection of all raw_log rows, since it was used for debugging purposes only
|
Remove selection of all raw_log rows, since it was used for debugging
purposes only
|
Python
|
mit
|
tsunam/dotd_parser,tsunam/dotd_parser,tsunam/dotd_parser,tsunam/dotd_parser
|
47b52333a74aeeb0ec2d7184455f70aa07633e62
|
createGlyphsPDF.py
|
createGlyphsPDF.py
|
# Some configuration
page_format = 'A4'
newPage(page_format)
class RegisterGlyph(object):
def __init__(self, glyph):
self.glyph = glyph
print 'Registered', self.glyph.name
self.proportion_ratio = self.getProportionRatio()
def getProportionRatio(self):
print self.glyph.width
for g in CurrentFont():
glyph = RegisterGlyph(g)
|
# Some configuration
page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values
my_selection = CurrentFont() # May also be CurrentFont.selection or else
class RegisterGlyph(object):
def __init__(self, glyph):
self.glyph = glyph
print 'Registered', self.glyph.name
self.proportion_ratio = self.getProportionRatio()
def getProportionRatio(self):
print self.glyph.width
def createPage(self):
newPage(page_format)
for g in my_selection:
glyph = RegisterGlyph(g)
glyph.createPage()
|
Create Page for every glyph
|
Create Page for every glyph
|
Python
|
mit
|
AlphabetType/DrawBot-Scripts
|
10aaa22cbcbb844a4393ac9eae526c3e50c121ab
|
src/ggrc/migrations/versions/20131209164454_49c670c7d705_add_private_column_t.py
|
src/ggrc/migrations/versions/20131209164454_49c670c7d705_add_private_column_t.py
|
"""Add private column to programs table.
Revision ID: 49c670c7d705
Revises: a3afeab3302
Create Date: 2013-12-09 16:44:54.222398
"""
# revision identifiers, used by Alembic.
revision = '49c670c7d705'
down_revision = 'a3afeab3302'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'programs',
sa.Column('private', sa.Boolean(), default=False, nullable=False),
)
def downgrade():
op.drop_column('programs', 'private')
|
"""Add private column to programs table.
Revision ID: 49c670c7d705
Revises: a3afeab3302
Create Date: 2013-12-09 16:44:54.222398
"""
# revision identifiers, used by Alembic.
revision = '49c670c7d705'
down_revision = 'a3afeab3302'
from alembic import op
from sqlalchemy.sql import table, column
import sqlalchemy as sa
def upgrade():
op.add_column(
'programs',
sa.Column('private', sa.Boolean(), default=False, nullable=False),
)
programs_table = table('programs',
column('id', sa.Integer),
column('context_id', sa.Integer),
column('private', sa.Boolean),
)
op.execute(programs_table.update().values(private=True)\
.where(programs_table.c.context_id != None))
def downgrade():
op.drop_column('programs', 'private')
|
Make sure to properly set private for existing private programs.
|
Make sure to properly set private for existing private programs.
|
Python
|
apache-2.0
|
hyperNURb/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,vladan-m/ggrc-core,plamut/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,uskudnik/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,vladan-m/ggrc-core
|
63af9aa63dac1b3601ab5bfee5fd29b5e3602389
|
bonfiremanager/models.py
|
bonfiremanager/models.py
|
from django.db import models
class Event(models.Model):
name = models.CharField(max_length=1024, unique=True)
slug = models.SlugField(max_length=1024)
def __str__(self):
return self.name
class TimeSlot(models.Model):
event = models.ForeignKey(Event)
bookable = models.BooleanField(default=True)
end = models.DateTimeField()
name = models.CharField(max_length=1024)
start = models.DateTimeField()
def __str__(self):
return "{0} ({1})".format(self.name, self.event)
class Room(models.Model):
event = models.ForeignKey(Event)
directions = models.TextField()
name = models.CharField(max_length=1024)
def __str__(self):
return "{0} ({1})".format(self.name, self.event)
class Talk(models.Model):
room = models.ForeignKey(Room, null=True, blank=True)
description = models.TextField()
slug = models.SlugField(max_length=1024)
timeslot = models.IntegerField(default=0)
title = models.CharField(max_length=1024, unique=True)
def __str__(self):
return "{0} in {1}".format(self.title, self.room)
|
from django.db import models
class Event(models.Model):
name = models.CharField(max_length=1024, unique=True)
slug = models.SlugField(max_length=1024)
def __str__(self):
return self.name
class TimeSlot(models.Model):
event = models.ForeignKey(Event)
bookable = models.BooleanField(default=True)
end = models.DateTimeField()
name = models.CharField(max_length=1024)
start = models.DateTimeField()
def __str__(self):
return "{0} ({1})".format(self.name, self.event)
class Room(models.Model):
event = models.ForeignKey(Event)
directions = models.TextField()
name = models.CharField(max_length=1024)
def __str__(self):
return "{0} ({1})".format(self.name, self.event)
class Talk(models.Model):
room = models.ForeignKey(Room, null=True, blank=True)
timeslot = models.ForeignKey(TimeSlot, null=True, blank=True)
description = models.TextField()
slug = models.SlugField(max_length=1024)
title = models.CharField(max_length=1024, unique=True)
def __str__(self):
return "{0} in {1}".format(self.title, self.room)
|
Make timeslot a FK on talk model
|
Make timeslot a FK on talk model
|
Python
|
agpl-3.0
|
yamatt/bonfiremanager
|
2fb0678363479c790e5a63de8b92a19de3ac2359
|
src/Camera.py
|
src/Camera.py
|
from traits.api import HasTraits, Int, Str, Tuple, Array, Range
class CameraError(Exception):
def __init__(self, msg, cam):
self.msg = msg
self.camera_number = cam
def __str__(self):
return '{0} on camera {1}'.format(self.msg, self.camera_number)
class Camera(HasTraits):
camera_number = Int(-1)
id_string = Str()
resolution = Tuple(Int(), Int())
roi = Tuple(Int(), Int(), Int(), Int())
frame_rate = Range(1, 500, 30)
frame = Array()
def __enter__(self):
self.open()
return self
def __exit__(self, *args):
self.close()
return False # don't suppress exceptions
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def query_frame(self):
raise NotImplementedError()
def find_resolutions(self):
'''
Returns a list of resolution tuples that this camera supports.
'''
# Default: return the camera's own default resolution
return [self.resolution]
def configure(self):
"""Opens a dialog to set the camera's parameters."""
pass
|
from traits.api import HasTraits, Int, Str, Tuple, Array, Range
from traitsui.api import View, Label
class CameraError(Exception):
def __init__(self, msg, cam):
self.msg = msg
self.camera_number = cam
def __str__(self):
return '{0} on camera {1}'.format(self.msg, self.camera_number)
class Camera(HasTraits):
camera_number = Int(-1)
id_string = Str()
resolution = Tuple(Int(), Int())
roi = Tuple(Int(), Int(), Int(), Int())
frame_rate = Range(1, 500, 30)
frame = Array()
# Default configuration panel
view = View(Label('No settings to configure'))
def __enter__(self):
self.open()
return self
def __exit__(self, *args):
self.close()
return False # don't suppress exceptions
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def query_frame(self):
raise NotImplementedError()
def find_resolutions(self):
'''
Returns a list of resolution tuples that this camera supports.
'''
# Default: return the camera's own default resolution
return [self.resolution]
def configure(self):
"""Opens a dialog to set the camera's parameters."""
pass
|
Add default view for camera
|
Add default view for camera
|
Python
|
mit
|
ptomato/Beams
|
0a05f423ad591454a25c515d811556d10e5fc99f
|
Browser.py
|
Browser.py
|
from Zeroconf import *
import socket
class MyListener(object):
def __init__(self):
self.r = Zeroconf()
pass
def removeService(self, zeroconf, type, name):
print "Service", name, "removed"
def addService(self, zeroconf, type, name):
print "Service", name, "added"
print "Type is", type
info = self.r.getServiceInfo(type, name)
print "Address is", str(socket.inet_ntoa(info.getAddress()))
print "Port is", info.getPort()
print "Weight is", info.getWeight()
print "Priority is", info.getPriority()
print "Server is", info.getServer()
print "Text is", info.getText()
print "Properties are", info.getProperties()
if __name__ == '__main__':
print "Multicast DNS Service Discovery for Python Browser test"
r = Zeroconf()
print "1. Testing browsing for a service..."
type = "_http._tcp.local."
listener = MyListener()
browser = ServiceBrowser(r, type, listener)
|
from Zeroconf import *
import socket
class MyListener(object):
def __init__(self):
self.r = Zeroconf()
pass
def removeService(self, zeroconf, type, name):
print "Service", name, "removed"
def addService(self, zeroconf, type, name):
print "Service", name, "added"
print "Type is", type
info = self.r.getServiceInfo(type, name)
if info:
print "Address is", str(socket.inet_ntoa(info.getAddress()))
print "Port is", info.getPort()
print "Weight is", info.getWeight()
print "Priority is", info.getPriority()
print "Server is", info.getServer()
print "Text is", info.getText()
print "Properties are", info.getProperties()
if __name__ == '__main__':
print "Multicast DNS Service Discovery for Python Browser test"
r = Zeroconf()
print "1. Testing browsing for a service..."
type = "_http._tcp.local."
listener = MyListener()
browser = ServiceBrowser(r, type, listener)
|
Allow for the failure of getServiceInfo(). Not sure why it's happening, though.
|
Allow for the failure of getServiceInfo(). Not sure why it's happening,
though.
|
Python
|
lgpl-2.1
|
jantman/python-zeroconf,decabyte/python-zeroconf,nameoftherose/python-zeroconf,balloob/python-zeroconf,AndreaCensi/python-zeroconf,giupo/python-zeroconf,jstasiak/python-zeroconf,wmcbrine/pyzeroconf,basilfx/python-zeroconf,daid/python-zeroconf,gbiddison/python-zeroconf
|
e155d7b96c5b834f4c062b93cbd564a5317905f1
|
tools/po2js.py
|
tools/po2js.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "?";
%s""" % (unicode(os.path.basename(path)), unicode(time.asctime()), u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "%s";
%s""" % (unicode(os.path.basename(path)),
unicode(time.asctime()),
unicode(os.path.splitext(os.path.basename(path))[0]),
u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
|
Add the language code to the translated file
|
Add the language code to the translated file
|
Python
|
apache-2.0
|
operasoftware/dragonfly,operasoftware/dragonfly,operasoftware/dragonfly,operasoftware/dragonfly
|
afc658c6ae125042182976dd95af68881865a2da
|
handoverservice/handover_api/views.py
|
handoverservice/handover_api/views.py
|
from handover_api.models import User, Handover, Draft
from rest_framework import viewsets
from serializers import UserSerializer, HandoverSerializer, DraftSerializer
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = User.objects.all()
serializer_class = UserSerializer
class HandoverViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows handovers to be viewed or edited.
"""
queryset = Handover.objects.all()
serializer_class = HandoverSerializer
class DraftViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows drafts to be viewed or edited.
"""
queryset = Draft.objects.all()
serializer_class = DraftSerializer
|
from rest_framework import viewsets
from handover_api.models import User, Handover, Draft
from handover_api.serializers import UserSerializer, HandoverSerializer, DraftSerializer
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = User.objects.all()
serializer_class = UserSerializer
class HandoverViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows handovers to be viewed or edited.
"""
queryset = Handover.objects.all()
serializer_class = HandoverSerializer
class DraftViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows drafts to be viewed or edited.
"""
queryset = Draft.objects.all()
serializer_class = DraftSerializer
|
Update import of serializers for python3 compatibility
|
Update import of serializers for python3 compatibility
|
Python
|
mit
|
Duke-GCB/DukeDSHandoverService,Duke-GCB/DukeDSHandoverService,Duke-GCB/DukeDSHandoverService
|
a50edf34659acb63f1fa6dda5494812fa1c4ff7d
|
models/ras_pathway/run_ras_pathway.py
|
models/ras_pathway/run_ras_pathway.py
|
import sys
import pickle
from indra import reach
from indra.assemblers import GraphAssembler
if len(sys.argv) < 2:
process_type = 'text'
else:
process_type = sys.argv[1]
if process_type == 'text':
txt = open('ras_pathway.txt', 'rt').read()
rp = reach.process_text(txt, offline=True)
st = rp.statements
elif process_type == 'json':
rp = reach.process_json_file('reach_output.json')
st = rp.statements
else:
st = pickle.load(open('statements.pkl', 'rb'))
for s in st:
print '%s\t%s' % (s, s.evidence[0].text)
graphpr = {'rankdir': 'TD'}
nodepr = {'fontsize': 12, 'shape': 'plaintext', 'margin': '0,0', 'pad': 0}
ga = GraphAssembler(st, graph_properties=graphpr, node_properties=nodepr)
ga.make_model()
ga.save_dot('ras_pathway.dot')
ga.save_pdf('ras_pathway.pdf')
|
import sys
import pickle
from indra import trips
from indra import reach
from indra.assemblers import GraphAssembler
def process_reach(txt, reread):
if reread:
rp = reach.process_text(txt, offline=True)
st = rp.statements
else:
rp = reach.process_json_file('reach_output.json')
st = rp.statements
for s in st:
print('%s\t%s' % (s, s.evidence[0].text))
return st
def process_trips(txt, reread):
if reread:
tp = trips.process_text(txt)
st = tp.statements
else:
tp = trips.process_xml(open('trips_output.xml', 'r').read())
st = tp.statements
for s in st:
print('%s\t%s' % (s, s.evidence[0].text))
return st
def draw_graph(stmts):
graphpr = {'rankdir': 'TD'}
nodepr = {'fontsize': 12, 'shape': 'plaintext', 'margin': '0,0', 'pad': 0}
ga = GraphAssembler(st, graph_properties=graphpr, node_properties=nodepr)
ga.make_model()
ga.save_dot('ras_pathway.dot')
ga.save_pdf('ras_pathway.pdf')
if __name__ == '__main__':
reread = True
txt = open('ras_pathway.txt', 'rt').read()
print('-----')
print(txt)
print('-----')
if len(sys.argv) < 2:
print('Reader not specified')
sys.exit()
reader = sys.argv[1]
if reader == 'reach':
print('Using REACH')
stmts = process_reach(txt, reread)
elif reader == 'trips':
print('Using TRIPS')
stmts = process_trips(txt, reread)
|
Add TRIPS reading option to RAS pathway map
|
Add TRIPS reading option to RAS pathway map
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,pvtodorov/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,bgyori/indra,bgyori/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,bgyori/indra
|
c7825a2ec9be702b05c58118249fe13e7e231ecb
|
cheroot/test/conftest.py
|
cheroot/test/conftest.py
|
import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
yield from cheroot_server(cheroot.wsgi.Server)
@pytest.fixture(scope='module')
def native_server():
yield from cheroot_server(cheroot.server.HTTPServer)
|
import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
|
Drop `yield from` to keep compat w/ 2.7
|
Drop `yield from` to keep compat w/ 2.7
|
Python
|
bsd-3-clause
|
cherrypy/cheroot
|
2e3045ed1009a60fe6e236387cae68ddf63bb9b5
|
distarray/core/tests/test_distributed_array_protocol.py
|
distarray/core/tests/test_distributed_array_protocol.py
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_keys(self):
required_keys = set(("buffer", "dimdata"))
export_data = self.larr.__distarray__()
exported_keys = set(export_data.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_keys(self):
required_keys = set(("buffer", "dimdata"))
export_data = self.larr.__distarray__()
exported_keys = set(export_data.keys())
self.assertEqual(required_keys, exported_keys)
def test_export_buffer(self):
"""See if we actually export a buffer."""
export_data = self.larr.__distarray__()
memoryview(export_data['buffer'])
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
Test if `__distarray__()['buffer']` returns a buffer.
|
Test if `__distarray__()['buffer']` returns a buffer.
|
Python
|
bsd-3-clause
|
enthought/distarray,enthought/distarray,RaoUmer/distarray,RaoUmer/distarray
|
52443c468a446638171f45b080dcf62f73e62866
|
src/wirecloud_fiware/tests/selenium.py
|
src/wirecloud_fiware/tests/selenium.py
|
from wirecloudcommons.test import WirecloudSeleniumTestCase
__test__ = False
class FiWareSeleniumTestCase(WirecloudSeleniumTestCase):
tags = ('current',)
def test_add_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
def test_delete_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
self.delete_marketplace('fiware')
|
from wirecloudcommons.test import WirecloudSeleniumTestCase
__test__ = False
class FiWareSeleniumTestCase(WirecloudSeleniumTestCase):
def test_add_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
def test_delete_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
self.delete_marketplace('fiware')
|
Remove 'current' tag from FiWareSeleniumTestCase
|
Remove 'current' tag from FiWareSeleniumTestCase
|
Python
|
agpl-3.0
|
rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud
|
6fe2e1dfbce465fee8a12475b3bfcda3ea10594e
|
staticgen_demo/blog/staticgen_views.py
|
staticgen_demo/blog/staticgen_views.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
def _get_paginator(self, url):
response = self.client.get(url)
print response.status_code
print response.__dict__
if not response.status_code == 200:
pass
else:
try:
return response.context['paginator'], response.context['is_paginated']
except KeyError:
pass
return None, False
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
def _get_paginator(self, url):
response = self.client.get(url)
print 'status_code: %s' % response.status_code
if not response.status_code == 200:
pass
else:
context = {}
if hasattr(response, 'context_data'):
context = response.context_data
elif hasattr(response, 'context'):
context = response.context
print context
try:
return context['paginator'], response.context['is_paginated']
except KeyError:
pass
return None, False
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
Add print statements to debug BlogPostListView
|
Add print statements to debug BlogPostListView
|
Python
|
bsd-3-clause
|
mishbahr/staticgen-demo,mishbahr/staticgen-demo,mishbahr/staticgen-demo
|
abfe0538769145ac83031062ce3b22d2622f18bf
|
opwen_email_server/utils/temporary.py
|
opwen_email_server/utils/temporary.py
|
from contextlib import contextmanager
from contextlib import suppress
from os import close
from os import remove
from tempfile import mkstemp
def create_tempfilename() -> str:
file_descriptor, filename = mkstemp()
close(file_descriptor)
return filename
@contextmanager
def removing(path: str) -> str:
try:
yield path
finally:
_remove_if_exists(path)
def _remove_if_exists(path: str):
with suppress(FileNotFoundError):
remove(path)
|
from contextlib import contextmanager
from contextlib import suppress
from os import close
from os import remove
from tempfile import mkstemp
from typing import Generator
def create_tempfilename() -> str:
file_descriptor, filename = mkstemp()
close(file_descriptor)
return filename
@contextmanager
def removing(path: str) -> Generator[str, None, None]:
try:
yield path
finally:
_remove_if_exists(path)
def _remove_if_exists(path: str):
with suppress(FileNotFoundError):
remove(path)
|
Fix type annotation for context manager
|
Fix type annotation for context manager
|
Python
|
apache-2.0
|
ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver
|
d154cd852bdb02743e9752179559a91b9f1a7f8c
|
example/tests/unit/test_renderer_class_methods.py
|
example/tests/unit/test_renderer_class_methods.py
|
from django.contrib.auth import get_user_model
from rest_framework_json_api import serializers
from rest_framework_json_api.renderers import JSONRenderer
class ResourceSerializer(serializers.ModelSerializer):
class Meta:
fields = ('username',)
model = get_user_model()
def test_build_json_resource_obj():
resource = {
'pk': 1,
'username': 'Alice',
}
serializer = ResourceSerializer(data={'username': 'Alice'})
serializer.is_valid()
resource_instance = serializer.save()
output = {
'type': 'user',
'id': '1',
'attributes': {
'username': 'Alice'
},
}
assert JSONRenderer.build_json_resource_obj(
serializer.fields, resource, resource_instance, 'user') == output
def test_extract_attributes():
fields = {
'id': serializers.Field(),
'username': serializers.Field(),
'deleted': serializers.ReadOnlyField(),
}
resource = {'id': 1, 'deleted': None, 'username': 'jerel'}
expected = {
'username': 'jerel',
'deleted': None
}
assert sorted(JSONRenderer.extract_attributes(fields, resource)) == sorted(expected), 'Regular fields should be extracted'
assert sorted(JSONRenderer.extract_attributes(fields, {})) == sorted(
{'username': ''}), 'Should not extract read_only fields on empty serializer'
|
from django.contrib.auth import get_user_model
from rest_framework_json_api import serializers
from rest_framework_json_api.renderers import JSONRenderer
pytestmark = pytest.mark.django_db
class ResourceSerializer(serializers.ModelSerializer):
class Meta:
fields = ('username',)
model = get_user_model()
def test_build_json_resource_obj():
resource = {
'pk': 1,
'username': 'Alice',
}
serializer = ResourceSerializer(data={'username': 'Alice'})
serializer.is_valid()
resource_instance = serializer.save()
output = {
'type': 'user',
'id': '1',
'attributes': {
'username': 'Alice'
},
}
assert JSONRenderer.build_json_resource_obj(
serializer.fields, resource, resource_instance, 'user') == output
def test_extract_attributes():
fields = {
'id': serializers.Field(),
'username': serializers.Field(),
'deleted': serializers.ReadOnlyField(),
}
resource = {'id': 1, 'deleted': None, 'username': 'jerel'}
expected = {
'username': 'jerel',
'deleted': None
}
assert sorted(JSONRenderer.extract_attributes(fields, resource)) == sorted(expected), 'Regular fields should be extracted'
assert sorted(JSONRenderer.extract_attributes(fields, {})) == sorted(
{'username': ''}), 'Should not extract read_only fields on empty serializer'
|
Fix for Database access not allowed, use the "django_db" mark to enable it.
|
Fix for Database access not allowed, use the "django_db" mark to enable it.
|
Python
|
bsd-2-clause
|
django-json-api/django-rest-framework-json-api,martinmaillard/django-rest-framework-json-api,schtibe/django-rest-framework-json-api,pombredanne/django-rest-framework-json-api,scottfisk/django-rest-framework-json-api,Instawork/django-rest-framework-json-api,leo-naeka/rest_framework_ember,django-json-api/django-rest-framework-json-api,leo-naeka/django-rest-framework-json-api,abdulhaq-e/django-rest-framework-json-api,lukaslundgren/django-rest-framework-json-api,django-json-api/rest_framework_ember
|
d32ec29dfae5a3ea354266dfda0438d9c69398e3
|
daiquiri/wordpress/utils.py
|
daiquiri/wordpress/utils.py
|
from django.conf import settings
from .tasks import (
update_wordpress_user as update_wordpress_user_task,
update_wordpress_role as update_wordpress_role_task
)
def update_wordpress_user(user):
if not settings.ASYNC:
update_wordpress_user_task.apply((user.username, user.email, user.first_name, user.last_name), throw=True)
else:
update_wordpress_user_task.apply_async((user.username, user.email, user.first_name, user.last_name))
def update_wordpress_role(user):
if user.is_superuser:
wordpress_role = 'administrator'
elif user.groups.filter(name='wordpress_admin').exists():
wordpress_role = 'administrator'
elif user.groups.filter(name='wordpress_editor').exists():
wordpress_role = 'editor'
else:
wordpress_role = 'subscriber'
if not settings.ASYNC:
update_wordpress_role_task.apply((user.username, wordpress_role), throw=True)
else:
update_wordpress_role_task.apply_async((user.username, wordpress_role))
|
import random
import string
from django.conf import settings
from .tasks import (
update_wordpress_user as update_wordpress_user_task,
update_wordpress_role as update_wordpress_role_task
)
def update_wordpress_user(user):
if user.email:
email = user.email
else:
random_string = ''.join(random.choice(string.ascii_lowercase) for _ in range(8))
email = random_string + '@example.com'
if not settings.ASYNC:
update_wordpress_user_task.apply((user.username, email, user.first_name, user.last_name), throw=True)
else:
update_wordpress_user_task.apply_async((user.username, email, user.first_name, user.last_name))
def update_wordpress_role(user):
if user.is_superuser:
wordpress_role = 'administrator'
elif user.groups.filter(name='wordpress_admin').exists():
wordpress_role = 'administrator'
elif user.groups.filter(name='wordpress_editor').exists():
wordpress_role = 'editor'
else:
wordpress_role = 'subscriber'
if not settings.ASYNC:
update_wordpress_role_task.apply((user.username, wordpress_role), throw=True)
else:
update_wordpress_role_task.apply_async((user.username, wordpress_role))
|
Fix update_wordpress_user for missing email
|
Fix update_wordpress_user for missing email
|
Python
|
apache-2.0
|
aipescience/django-daiquiri,aipescience/django-daiquiri,aipescience/django-daiquiri
|
f6518a7bd554c87b4dcb68d1ca618babcf278c63
|
tests/extmod/machine1.py
|
tests/extmod/machine1.py
|
# test machine module
import machine
import uctypes
print(machine.mem8)
buf = bytearray(8)
addr = uctypes.addressof(buf)
machine.mem8[addr] = 123
print(machine.mem8[addr])
machine.mem16[addr] = 12345
print(machine.mem16[addr])
machine.mem32[addr] = 123456789
print(machine.mem32[addr])
try:
machine.mem16[1]
except ValueError:
print("ValueError")
try:
machine.mem16[1] = 1
except ValueError:
print("ValueError")
try:
del machine.mem8[0]
except TypeError:
print("TypeError")
|
# test machine module
try:
import machine
except ImportError:
print("SKIP")
import sys
sys.exit()
import uctypes
print(machine.mem8)
buf = bytearray(8)
addr = uctypes.addressof(buf)
machine.mem8[addr] = 123
print(machine.mem8[addr])
machine.mem16[addr] = 12345
print(machine.mem16[addr])
machine.mem32[addr] = 123456789
print(machine.mem32[addr])
try:
machine.mem16[1]
except ValueError:
print("ValueError")
try:
machine.mem16[1] = 1
except ValueError:
print("ValueError")
try:
del machine.mem8[0]
except TypeError:
print("TypeError")
|
Check that machine module exists and print SKIP if it doesn't.
|
tests: Check that machine module exists and print SKIP if it doesn't.
|
Python
|
mit
|
vitiral/micropython,lowRISC/micropython,adafruit/circuitpython,danicampora/micropython,ryannathans/micropython,pfalcon/micropython,mgyenik/micropython,adafruit/micropython,blazewicz/micropython,vitiral/micropython,chrisdearman/micropython,dxxb/micropython,dinau/micropython,tdautc19841202/micropython,utopiaprince/micropython,jmarcelino/pycom-micropython,tdautc19841202/micropython,noahchense/micropython,lowRISC/micropython,danicampora/micropython,neilh10/micropython,ahotam/micropython,vriera/micropython,kerneltask/micropython,mgyenik/micropython,selste/micropython,redbear/micropython,alex-robbins/micropython,adamkh/micropython,adafruit/micropython,danicampora/micropython,matthewelse/micropython,vitiral/micropython,cloudformdesign/micropython,mpalomer/micropython,torwag/micropython,cloudformdesign/micropython,galenhz/micropython,Timmenem/micropython,ganshun666/micropython,hiway/micropython,ganshun666/micropython,MrSurly/micropython,MrSurly/micropython-esp32,AriZuu/micropython,pramasoul/micropython,ryannathans/micropython,emfcamp/micropython,alex-robbins/micropython,ericsnowcurrently/micropython,omtinez/micropython,skybird6672/micropython,rubencabrera/micropython,swegener/micropython,feilongfl/micropython,turbinenreiter/micropython,puuu/micropython,noahwilliamsson/micropython,emfcamp/micropython,cloudformdesign/micropython,MrSurly/micropython,supergis/micropython,neilh10/micropython,ChuckM/micropython,ChuckM/micropython,bvernoux/micropython,utopiaprince/micropython,blazewicz/micropython,blmorris/micropython,xhat/micropython,hosaka/micropython,jlillest/micropython,martinribelotta/micropython,PappaPeppar/micropython,suda/micropython,mpalomer/micropython,kerneltask/micropython,xyb/micropython,alex-march/micropython,ryannathans/micropython,dinau/micropython,utopiaprince/micropython,martinribelotta/micropython,henriknelson/micropython,dhylands/micropython,noahwilliamsson/micropython,Timmenem/micropython,HenrikSolver/micropython,dxxb/micropython,MrSurly/micropython,pozetroninc/micropython,toolmacher/micropython,lbattraw/micropython,noahchense/micropython,skybird6672/micropython,infinnovation/micropython,dinau/micropython,ruffy91/micropython,turbinenreiter/micropython,Peetz0r/micropython-esp32,misterdanb/micropython,HenrikSolver/micropython,TDAbboud/micropython,Timmenem/micropython,jmarcelino/pycom-micropython,dhylands/micropython,drrk/micropython,mpalomer/micropython,lbattraw/micropython,toolmacher/micropython,tralamazza/micropython,tdautc19841202/micropython,kerneltask/micropython,ruffy91/micropython,chrisdearman/micropython,adafruit/circuitpython,pozetroninc/micropython,emfcamp/micropython,mianos/micropython,supergis/micropython,firstval/micropython,heisewangluo/micropython,redbear/micropython,blmorris/micropython,kostyll/micropython,micropython/micropython-esp32,torwag/micropython,kostyll/micropython,SHA2017-badge/micropython-esp32,adafruit/circuitpython,tobbad/micropython,cnoviello/micropython,noahwilliamsson/micropython,tuc-osg/micropython,micropython/micropython-esp32,pramasoul/micropython,skybird6672/micropython,torwag/micropython,blmorris/micropython,praemdonck/micropython,omtinez/micropython,hiway/micropython,micropython/micropython-esp32,xyb/micropython,mianos/micropython,pozetroninc/micropython,suda/micropython,ruffy91/micropython,martinribelotta/micropython,infinnovation/micropython,deshipu/micropython,matthewelse/micropython,heisewangluo/micropython,supergis/micropython,dhylands/micropython,adafruit/circuitpython,mpalomer/micropython,PappaPeppar/micropython,bvernoux/micropython,alex-march/micropython,mianos/micropython,EcmaXp/micropython,selste/micropython,martinribelotta/micropython,dxxb/micropython,feilongfl/micropython,galenhz/micropython,micropython/micropython-esp32,MrSurly/micropython-esp32,EcmaXp/micropython,tobbad/micropython,alex-robbins/micropython,ceramos/micropython,puuu/micropython,PappaPeppar/micropython,xyb/micropython,trezor/micropython,dhylands/micropython,selste/micropython,oopy/micropython,xuxiaoxin/micropython,neilh10/micropython,infinnovation/micropython,adafruit/circuitpython,stonegithubs/micropython,Peetz0r/micropython-esp32,kerneltask/micropython,torwag/micropython,skybird6672/micropython,tralamazza/micropython,matthewelse/micropython,hiway/micropython,ryannathans/micropython,tobbad/micropython,xhat/micropython,slzatz/micropython,noahchense/micropython,ceramos/micropython,deshipu/micropython,mianos/micropython,mhoffma/micropython,orionrobots/micropython,toolmacher/micropython,kerneltask/micropython,pfalcon/micropython,pozetroninc/micropython,misterdanb/micropython,galenhz/micropython,omtinez/micropython,jlillest/micropython,supergis/micropython,feilongfl/micropython,neilh10/micropython,Timmenem/micropython,mhoffma/micropython,PappaPeppar/micropython,hosaka/micropython,henriknelson/micropython,noahchense/micropython,martinribelotta/micropython,ChuckM/micropython,HenrikSolver/micropython,suda/micropython,ChuckM/micropython,rubencabrera/micropython,Peetz0r/micropython-esp32,deshipu/micropython,matthewelse/micropython,puuu/micropython,misterdanb/micropython,dxxb/micropython,PappaPeppar/micropython,danicampora/micropython,selste/micropython,pramasoul/micropython,tdautc19841202/micropython,turbinenreiter/micropython,firstval/micropython,blazewicz/micropython,HenrikSolver/micropython,ericsnowcurrently/micropython,noahchense/micropython,alex-robbins/micropython,deshipu/micropython,cnoviello/micropython,orionrobots/micropython,cnoviello/micropython,lowRISC/micropython,tuc-osg/micropython,torwag/micropython,xuxiaoxin/micropython,henriknelson/micropython,trezor/micropython,praemdonck/micropython,deshipu/micropython,jlillest/micropython,pramasoul/micropython,TDAbboud/micropython,vriera/micropython,matthewelse/micropython,HenrikSolver/micropython,xhat/micropython,pozetroninc/micropython,tralamazza/micropython,orionrobots/micropython,jmarcelino/pycom-micropython,adamkh/micropython,dmazzella/micropython,turbinenreiter/micropython,chrisdearman/micropython,TDAbboud/micropython,drrk/micropython,xhat/micropython,blazewicz/micropython,ChuckM/micropython,tuc-osg/micropython,SHA2017-badge/micropython-esp32,MrSurly/micropython-esp32,stonegithubs/micropython,galenhz/micropython,orionrobots/micropython,ryannathans/micropython,hosaka/micropython,lbattraw/micropython,firstval/micropython,ernesto-g/micropython,AriZuu/micropython,xyb/micropython,feilongfl/micropython,mianos/micropython,EcmaXp/micropython,noahwilliamsson/micropython,bvernoux/micropython,infinnovation/micropython,redbear/micropython,slzatz/micropython,slzatz/micropython,ahotam/micropython,dmazzella/micropython,ericsnowcurrently/micropython,pfalcon/micropython,redbear/micropython,feilongfl/micropython,misterdanb/micropython,vriera/micropython,lbattraw/micropython,ceramos/micropython,alex-robbins/micropython,henriknelson/micropython,AriZuu/micropython,hiway/micropython,lowRISC/micropython,adafruit/micropython,ericsnowcurrently/micropython,alex-march/micropython,dinau/micropython,skybird6672/micropython,Peetz0r/micropython-esp32,SHA2017-badge/micropython-esp32,Peetz0r/micropython-esp32,trezor/micropython,MrSurly/micropython,dmazzella/micropython,ernesto-g/micropython,danicampora/micropython,omtinez/micropython,xuxiaoxin/micropython,bvernoux/micropython,praemdonck/micropython,kostyll/micropython,ruffy91/micropython,ruffy91/micropython,rubencabrera/micropython,adamkh/micropython,oopy/micropython,alex-march/micropython,utopiaprince/micropython,AriZuu/micropython,pfalcon/micropython,lbattraw/micropython,utopiaprince/micropython,ahotam/micropython,suda/micropython,heisewangluo/micropython,tobbad/micropython,MrSurly/micropython-esp32,suda/micropython,adafruit/micropython,hiway/micropython,cwyark/micropython,cnoviello/micropython,xhat/micropython,drrk/micropython,TDAbboud/micropython,ganshun666/micropython,adamkh/micropython,ahotam/micropython,oopy/micropython,jlillest/micropython,swegener/micropython,rubencabrera/micropython,tralamazza/micropython,tuc-osg/micropython,infinnovation/micropython,swegener/micropython,blmorris/micropython,omtinez/micropython,trezor/micropython,stonegithubs/micropython,adafruit/circuitpython,pramasoul/micropython,misterdanb/micropython,hosaka/micropython,ganshun666/micropython,matthewelse/micropython,ceramos/micropython,mhoffma/micropython,emfcamp/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,orionrobots/micropython,pfalcon/micropython,cloudformdesign/micropython,mgyenik/micropython,bvernoux/micropython,firstval/micropython,redbear/micropython,dinau/micropython,cwyark/micropython,MrSurly/micropython,galenhz/micropython,henriknelson/micropython,emfcamp/micropython,alex-march/micropython,firstval/micropython,mgyenik/micropython,jmarcelino/pycom-micropython,ericsnowcurrently/micropython,rubencabrera/micropython,oopy/micropython,cwyark/micropython,mgyenik/micropython,ganshun666/micropython,TDAbboud/micropython,mhoffma/micropython,noahwilliamsson/micropython,adamkh/micropython,xuxiaoxin/micropython,chrisdearman/micropython,turbinenreiter/micropython,heisewangluo/micropython,puuu/micropython,cwyark/micropython,drrk/micropython,puuu/micropython,kostyll/micropython,selste/micropython,oopy/micropython,praemdonck/micropython,toolmacher/micropython,cnoviello/micropython,slzatz/micropython,swegener/micropython,blmorris/micropython,drrk/micropython,EcmaXp/micropython,mhoffma/micropython,ceramos/micropython,ahotam/micropython,stonegithubs/micropython,SHA2017-badge/micropython-esp32,vriera/micropython,SHA2017-badge/micropython-esp32,stonegithubs/micropython,dhylands/micropython,jlillest/micropython,tuc-osg/micropython,xyb/micropython,heisewangluo/micropython,xuxiaoxin/micropython,AriZuu/micropython,hosaka/micropython,dxxb/micropython,cwyark/micropython,kostyll/micropython,swegener/micropython,slzatz/micropython,dmazzella/micropython,Timmenem/micropython,vriera/micropython,tdautc19841202/micropython,chrisdearman/micropython,supergis/micropython,tobbad/micropython,blazewicz/micropython,ernesto-g/micropython,ernesto-g/micropython,mpalomer/micropython,toolmacher/micropython,lowRISC/micropython,vitiral/micropython,ernesto-g/micropython,EcmaXp/micropython,praemdonck/micropython,jmarcelino/pycom-micropython,adafruit/micropython,trezor/micropython,cloudformdesign/micropython,neilh10/micropython,vitiral/micropython
|
ddd4a0d1ba607f49f75f9516c378159f1204d9fb
|
readthedocs/rtd_tests/tests/test_search_json_parsing.py
|
readthedocs/rtd_tests/tests/test_search_json_parsing.py
|
import os
from django.test import TestCase
from search.parse_json import process_file
base_dir = os.path.dirname(os.path.dirname(__file__))
class TestHacks(TestCase):
def test_h2_parsing(self):
data = process_file(
os.path.join(
base_dir,
'files/api.fjson',
)
)
self.assertEqual(data['sections'][0]['id'], 'a-basic-api-client-using-slumber')
# Only capture h2's
for obj in data['sections']:
self.assertEqual(obj['content'][:5], '\n<h2>')
|
import os
from django.test import TestCase
from search.parse_json import process_file
base_dir = os.path.dirname(os.path.dirname(__file__))
class TestHacks(TestCase):
def test_h2_parsing(self):
data = process_file(
os.path.join(
base_dir,
'files/api.fjson',
)
)
self.assertEqual(data['sections'][1]['id'], 'a-basic-api-client-using-slumber')
# Only capture h2's after the first section
for obj in data['sections'][1:]:
self.assertEqual(obj['content'][:5], '\n<h2>')
|
Fix tests now that we have H1 capturing
|
Fix tests now that we have H1 capturing
|
Python
|
mit
|
wanghaven/readthedocs.org,wijerasa/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,d0ugal/readthedocs.org,takluyver/readthedocs.org,wanghaven/readthedocs.org,emawind84/readthedocs.org,KamranMackey/readthedocs.org,attakei/readthedocs-oauth,agjohnson/readthedocs.org,michaelmcandrew/readthedocs.org,asampat3090/readthedocs.org,dirn/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,michaelmcandrew/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,gjtorikian/readthedocs.org,KamranMackey/readthedocs.org,safwanrahman/readthedocs.org,GovReady/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,takluyver/readthedocs.org,stevepiercy/readthedocs.org,atsuyim/readthedocs.org,espdev/readthedocs.org,techtonik/readthedocs.org,davidfischer/readthedocs.org,fujita-shintaro/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,VishvajitP/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,royalwang/readthedocs.org,pombredanne/readthedocs.org,atsuyim/readthedocs.org,soulshake/readthedocs.org,SteveViss/readthedocs.org,rtfd/readthedocs.org,SteveViss/readthedocs.org,hach-que/readthedocs.org,royalwang/readthedocs.org,kdkeyser/readthedocs.org,KamranMackey/readthedocs.org,kenshinthebattosai/readthedocs.org,singingwolfboy/readthedocs.org,mrshoki/readthedocs.org,laplaceliu/readthedocs.org,kenwang76/readthedocs.org,emawind84/readthedocs.org,wijerasa/readthedocs.org,kenshinthebattosai/readthedocs.org,atsuyim/readthedocs.org,michaelmcandrew/readthedocs.org,asampat3090/readthedocs.org,mhils/readthedocs.org,cgourlay/readthedocs.org,GovReady/readthedocs.org,nikolas/readthedocs.org,LukasBoersma/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,fujita-shintaro/readthedocs.org,sils1297/readthedocs.org,safwanrahman/readthedocs.org,mhils/readthedocs.org,wijerasa/readthedocs.org,sunnyzwh/readthedocs.org,asampat3090/readthedocs.org,titiushko/readthedocs.org,CedarLogic/readthedocs.org,laplaceliu/readthedocs.org,emawind84/readthedocs.org,Tazer/readthedocs.org,hach-que/readthedocs.org,techtonik/readthedocs.org,jerel/readthedocs.org,jerel/readthedocs.org,kdkeyser/readthedocs.org,fujita-shintaro/readthedocs.org,atsuyim/readthedocs.org,pombredanne/readthedocs.org,kenwang76/readthedocs.org,attakei/readthedocs-oauth,mrshoki/readthedocs.org,stevepiercy/readthedocs.org,jerel/readthedocs.org,laplaceliu/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,clarkperkins/readthedocs.org,pombredanne/readthedocs.org,singingwolfboy/readthedocs.org,GovReady/readthedocs.org,kenshinthebattosai/readthedocs.org,royalwang/readthedocs.org,titiushko/readthedocs.org,VishvajitP/readthedocs.org,agjohnson/readthedocs.org,espdev/readthedocs.org,soulshake/readthedocs.org,d0ugal/readthedocs.org,kenwang76/readthedocs.org,sils1297/readthedocs.org,raven47git/readthedocs.org,takluyver/readthedocs.org,d0ugal/readthedocs.org,espdev/readthedocs.org,singingwolfboy/readthedocs.org,michaelmcandrew/readthedocs.org,emawind84/readthedocs.org,royalwang/readthedocs.org,takluyver/readthedocs.org,agjohnson/readthedocs.org,techtonik/readthedocs.org,rtfd/readthedocs.org,wanghaven/readthedocs.org,LukasBoersma/readthedocs.org,cgourlay/readthedocs.org,agjohnson/readthedocs.org,CedarLogic/readthedocs.org,gjtorikian/readthedocs.org,kenwang76/readthedocs.org,fujita-shintaro/readthedocs.org,sid-kap/readthedocs.org,wanghaven/readthedocs.org,CedarLogic/readthedocs.org,CedarLogic/readthedocs.org,soulshake/readthedocs.org,VishvajitP/readthedocs.org,LukasBoersma/readthedocs.org,dirn/readthedocs.org,wijerasa/readthedocs.org,Carreau/readthedocs.org,mrshoki/readthedocs.org,mhils/readthedocs.org,tddv/readthedocs.org,kenshinthebattosai/readthedocs.org,dirn/readthedocs.org,Carreau/readthedocs.org,gjtorikian/readthedocs.org,nikolas/readthedocs.org,nikolas/readthedocs.org,sid-kap/readthedocs.org,hach-que/readthedocs.org,Carreau/readthedocs.org,cgourlay/readthedocs.org,soulshake/readthedocs.org,espdev/readthedocs.org,sid-kap/readthedocs.org,singingwolfboy/readthedocs.org,attakei/readthedocs-oauth,clarkperkins/readthedocs.org,asampat3090/readthedocs.org,davidfischer/readthedocs.org,gjtorikian/readthedocs.org,attakei/readthedocs-oauth,sunnyzwh/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,Tazer/readthedocs.org,kdkeyser/readthedocs.org,davidfischer/readthedocs.org,laplaceliu/readthedocs.org,sunnyzwh/readthedocs.org,Carreau/readthedocs.org,istresearch/readthedocs.org,raven47git/readthedocs.org,nikolas/readthedocs.org,kdkeyser/readthedocs.org,SteveViss/readthedocs.org,GovReady/readthedocs.org,sils1297/readthedocs.org,sid-kap/readthedocs.org,mrshoki/readthedocs.org,VishvajitP/readthedocs.org,clarkperkins/readthedocs.org,d0ugal/readthedocs.org,sils1297/readthedocs.org,Tazer/readthedocs.org,tddv/readthedocs.org,mhils/readthedocs.org,KamranMackey/readthedocs.org,sunnyzwh/readthedocs.org,LukasBoersma/readthedocs.org,hach-que/readthedocs.org,dirn/readthedocs.org,raven47git/readthedocs.org
|
135a97a58a95c04d2635fff68d2c080413f1d804
|
tests/test_conditions.py
|
tests/test_conditions.py
|
import json
import unittest
import awacs.aws as aws
import awacs.s3 as s3
class TestConditions(unittest.TestCase):
def test_for_all_values(self):
c = aws.Condition(
aws.ForAllValuesStringLike(
"dynamodb:requestedAttributes", ["PostDateTime", "Message", "Tags"]
)
)
pd = aws.PolicyDocument(
Statement=[
aws.Statement(
Action=[s3.ListBucket],
Effect=aws.Allow,
Resource=[s3.ARN("myBucket")],
Condition=c,
)
]
)
self.assertEqual(
{
u"Statement": [
{
u"Action": [u"s3:ListBucket"],
u"Condition": {
u"ForAllValues:StringLike": {
u"dynamodb:requestedAttributes": [
u"PostDateTime",
u"Message",
u"Tags",
]
}
},
u"Effect": u"Allow",
u"Resource": [u"arn:aws:s3:::myBucket"],
}
]
},
json.loads(pd.to_json()),
)
|
import json
import unittest
import awacs.aws as aws
import awacs.s3 as s3
class TestConditions(unittest.TestCase):
def test_for_all_values(self):
c = aws.Condition(
aws.ForAllValuesStringLike(
"dynamodb:requestedAttributes", ["PostDateTime", "Message", "Tags"]
)
)
pd = aws.PolicyDocument(
Statement=[
aws.Statement(
Action=[s3.ListBucket],
Effect=aws.Allow,
Resource=[s3.ARN("myBucket")],
Condition=c,
)
]
)
self.assertEqual(
{
"Statement": [
{
"Action": ["s3:ListBucket"],
"Condition": {
"ForAllValues:StringLike": {
"dynamodb:requestedAttributes": [
"PostDateTime",
"Message",
"Tags",
]
}
},
"Effect": "Allow",
"Resource": ["arn:aws:s3:::myBucket"],
}
]
},
json.loads(pd.to_json()),
)
|
Remove 'u' prefix from strings
|
Remove 'u' prefix from strings
|
Python
|
bsd-2-clause
|
cloudtools/awacs
|
07cffdaa6e131c4f02c570de3925d6238656fc87
|
tests/test_invocation.py
|
tests/test_invocation.py
|
import sys
import subprocess
import re
def test_runpy_invoke():
"""
Ensure honcho can also be invoked using runpy (python -m)
"""
cmd = [sys.executable, '-m', 'honcho', 'version']
output = subprocess.check_output(cmd, universal_newlines=True)
assert re.match(r'honcho \d\.\d\.\d.*\n', output)
|
import sys
import subprocess
import re
import pytest
@pytest.mark.skipif(sys.version_info < (2, 7), reason="check_output not available")
def test_runpy_invoke():
"""
Ensure honcho can also be invoked using runpy (python -m)
"""
cmd = [sys.executable, '-m', 'honcho', 'version']
output = subprocess.check_output(cmd, universal_newlines=True)
assert re.match(r'honcho \d\.\d\.\d.*\n', output)
|
Disable test on Python 2.6.
|
Disable test on Python 2.6.
|
Python
|
mit
|
nickstenning/honcho,nickstenning/honcho
|
f052666502ef0108d991940ca713ebc0c5d0c036
|
MyBot.py
|
MyBot.py
|
from hlt import *
from networking import *
myID, gameMap = getInit()
sendInit("MyPythonBot")
while True:
moves = []
gameMap = getFrame()
for y in range(gameMap.height):
for x in range(gameMap.width):
location = Location(x, y)
if gameMap.getSite(location).owner == myID:
moves.append(Move(location, random.choice(DIRECTIONS)))
sendFrame(moves)
|
from hlt import *
from networking import *
myID, gameMap = getInit()
sendInit("dpetkerPythonBot")
def create_move(location):
site = gameMap.getSite(location)
# See if there's an enemy adjacent to us with less strength. If so, capture it
for d in CARDINALS:
neighbour_site = gameMap.getSite(location, d)
if neighbour_site.owner != myID and neighbour_site.strength < site.strength:
return Move(location, d)
# Don't move until we're sufficiently strong
if site.strength < site.production * 5:
return Move(location, STILL)
return Move(location, NORTH if random.random() > 0.5 else WEST)
while True:
moves = []
gameMap = getFrame()
for y in range(gameMap.height):
for x in range(gameMap.width):
location = Location(x, y)
if gameMap.getSite(location).owner == myID:
moves.append(create_move(location))
sendFrame(moves)
|
Improve my bot according to tutorial
|
Improve my bot according to tutorial
|
Python
|
mit
|
dpetker/halite,dpetker/halite
|
9eabdbc6b73661865c4d785cbc57d7ee51fe59cd
|
future/tests/test_imports_urllib.py
|
future/tests/test_imports_urllib.py
|
from __future__ import absolute_import, print_function
import unittest
import sys
class ImportUrllibTest(unittest.TestCase):
def test_urllib(self):
"""
This should perhaps fail: importing urllib first means that the import hooks
won't be consulted when importing urllib.response.
"""
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, print_function
import unittest
import sys
class ImportUrllibTest(unittest.TestCase):
def test_urllib(self):
import urllib
orig_file = urllib.__file__
from future.standard_library.urllib import response as urllib_response
self.assertEqual(orig_file, urllib.__file__)
print(urllib_response.__file__)
if __name__ == '__main__':
unittest.main()
|
Change urllib test to use an explicit import
|
Change urllib test to use an explicit import
|
Python
|
mit
|
QuLogic/python-future,michaelpacer/python-future,PythonCharmers/python-future,krischer/python-future,krischer/python-future,QuLogic/python-future,michaelpacer/python-future,PythonCharmers/python-future
|
cad4e7e9feaf7fefe9ef91dea18594b095861204
|
content_editor/models.py
|
content_editor/models.py
|
from types import SimpleNamespace
from django.db import models
__all__ = ("Region", "Template", "create_plugin_base")
class Region(SimpleNamespace):
key = ""
title = "unnamed"
inherited = False
class Template(SimpleNamespace):
key = ""
template_name = None
title = ""
regions = []
def create_plugin_base(content_base):
"""
This is purely an internal method. Here, we create a base class for
the concrete content types, which are built in
``create_plugin``.
The three fields added to build a concrete content type class/model
are ``parent``, ``region`` and ``ordering``.
"""
class PluginBase(models.Model):
parent = models.ForeignKey(
content_base,
related_name="%(app_label)s_%(class)s_set",
on_delete=models.CASCADE,
)
region = models.CharField(max_length=255)
ordering = models.IntegerField(default=0)
class Meta:
abstract = True
app_label = content_base._meta.app_label
ordering = ["ordering"]
def __str__(self):
return "%s<region=%s ordering=%s pk=%s>" % (
self._meta.label,
self.region,
self.ordering,
self.pk,
)
@classmethod
def get_queryset(cls):
return cls.objects.all()
return PluginBase
|
from types import SimpleNamespace
from django.db import models
__all__ = ("Region", "Template", "create_plugin_base")
class Region(SimpleNamespace):
key = ""
title = "unnamed"
inherited = False
class Template(SimpleNamespace):
key = ""
template_name = None
title = ""
regions = []
def create_plugin_base(content_base):
"""
Create and return a base class for plugins
The base class contains a ``parent`` foreign key and the required
``region`` and ``ordering`` fields.
"""
class PluginBase(models.Model):
parent = models.ForeignKey(
content_base,
related_name="%(app_label)s_%(class)s_set",
on_delete=models.CASCADE,
)
region = models.CharField(max_length=255)
ordering = models.IntegerField(default=0)
class Meta:
abstract = True
app_label = content_base._meta.app_label
ordering = ["ordering"]
def __str__(self):
return "%s<region=%s ordering=%s pk=%s>" % (
self._meta.label,
self.region,
self.ordering,
self.pk,
)
@classmethod
def get_queryset(cls):
return cls.objects.all()
return PluginBase
|
Fix the docstring of create_plugin_base: Not internal, it's the main API
|
Fix the docstring of create_plugin_base: Not internal, it's the main API
|
Python
|
bsd-3-clause
|
matthiask/django-content-editor,matthiask/django-content-editor,matthiask/django-content-editor,matthiask/django-content-editor
|
0ecff906f8d504576f00f28c46be6d4594008f38
|
parcels/interaction/distance_utils.py
|
parcels/interaction/distance_utils.py
|
import numpy as np
def fast_distance(lat1, lon1, lat2, lon2):
'''Compute the arc distance assuming the earth is a sphere.'''
g = np.sin(lat1)*np.sin(lat2)+np.cos(lat1)*np.cos(lat2)*np.cos(lon1-lon2)
return np.arccos(np.minimum(1, g))
def spherical_distance(depth1_m, lat1_deg, lon1_deg, depth2_m, lat2_deg,
lon2_deg):
"Compute the arc distance, uses degrees as input."
R_earth = 6371000
lat1 = np.pi*lat1_deg/180
lon1 = np.pi*lon1_deg/180
lat2 = np.pi*lat2_deg/180
lon2 = np.pi*lon2_deg/180
horiz_dist = R_earth*fast_distance(lat1, lon1, lat2, lon2)
vert_dist = np.abs(depth1_m-depth2_m)
return (vert_dist, horiz_dist)
|
import numpy as np
def fast_distance(lat1, lon1, lat2, lon2):
'''Compute the arc distance assuming the earth is a sphere.
This is not the only possible implementation. It was taken from:
https://www.mkompf.com/gps/distcalc.html
'''
g = np.sin(lat1)*np.sin(lat2)+np.cos(lat1)*np.cos(lat2)*np.cos(lon1-lon2)
return np.arccos(np.minimum(1, g))
def spherical_distance(depth1_m, lat1_deg, lon1_deg, depth2_m, lat2_deg,
lon2_deg):
"Compute the arc distance, uses degrees as input."
R_earth = 6371000
lat1 = np.pi*lat1_deg/180
lon1 = np.pi*lon1_deg/180
lat2 = np.pi*lat2_deg/180
lon2 = np.pi*lon2_deg/180
horiz_dist = R_earth*fast_distance(lat1, lon1, lat2, lon2)
vert_dist = np.abs(depth1_m-depth2_m)
return (vert_dist, horiz_dist)
|
Add link for distance computation
|
Add link for distance computation
|
Python
|
mit
|
OceanPARCELS/parcels,OceanPARCELS/parcels
|
c07234bb3142df96dc9e02a236975bc3de2415cc
|
nailgun/nailgun/test/test_plugin.py
|
nailgun/nailgun/test/test_plugin.py
|
# -*- coding: utf-8 -*-
from nailgun.test.base import BaseHandlers
class TestPluginStateMachine(BaseHandlers):
def test_attrs_creation(self):
pass
|
# -*- coding: utf-8 -*-
from nailgun.test.base import BaseHandlers
from nailgun.plugin.process import get_queue, PluginProcessor
from nailgun.api.models import Task
class TestPluginProcess(BaseHandlers):
def setUp(self):
super(TestPluginProcess, self).setUp()
self.plugin_processor = PluginProcessor()
self.plugin_processor.start()
def tearDown(self):
super(TestPluginProcess, self).tearDown()
self.plugin_processor.terminate()
def test_task_set_to_error_when_exception_raised(self):
queue = get_queue()
task = Task(name='install_plugin', cache={'plugin_id': -1})
self.env.db.add(task)
self.env.db.commit()
queue.put(task.uuid)
def check_task_status_is_error():
self.env.db.refresh(task)
return task.status == 'error'
self.env.wait_for_true(check_task_status_is_error, timeout=2)
self.assertEquals(task.progress, 100)
|
Implement plugin test on exception handling
|
Implement plugin test on exception handling
|
Python
|
apache-2.0
|
SmartInfrastructures/fuel-main-dev,ddepaoli3/fuel-main-dev,zhaochao/fuel-main,zhaochao/fuel-main,huntxu/fuel-main,prmtl/fuel-web,huntxu/fuel-web,huntxu/fuel-main,SmartInfrastructures/fuel-main-dev,huntxu/fuel-web,teselkin/fuel-main,ddepaoli3/fuel-main-dev,teselkin/fuel-main,SmartInfrastructures/fuel-web-dev,SergK/fuel-main,dancn/fuel-main-dev,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,SergK/fuel-main,zhaochao/fuel-web,dancn/fuel-main-dev,nebril/fuel-web,dancn/fuel-main-dev,AnselZhangGit/fuel-main,Fiware/ops.Fuel-main-dev,AnselZhangGit/fuel-main,nebril/fuel-web,SmartInfrastructures/fuel-main-dev,eayunstack/fuel-web,AnselZhangGit/fuel-main,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-main,SergK/fuel-main,prmtl/fuel-web,zhaochao/fuel-web,eayunstack/fuel-main,huntxu/fuel-web,zhaochao/fuel-web,SmartInfrastructures/fuel-web-dev,koder-ua/nailgun-fcert,koder-ua/nailgun-fcert,ddepaoli3/fuel-main-dev,SmartInfrastructures/fuel-web-dev,zhaochao/fuel-web,zhaochao/fuel-web,teselkin/fuel-main,stackforge/fuel-main,prmtl/fuel-web,AnselZhangGit/fuel-main,zhaochao/fuel-main,eayunstack/fuel-web,eayunstack/fuel-main,koder-ua/nailgun-fcert,stackforge/fuel-web,nebril/fuel-web,stackforge/fuel-web,zhaochao/fuel-main,ddepaoli3/fuel-main-dev,Fiware/ops.Fuel-main-dev,prmtl/fuel-web,SmartInfrastructures/fuel-main-dev,Fiware/ops.Fuel-main-dev,zhaochao/fuel-main,koder-ua/nailgun-fcert,huntxu/fuel-web,dancn/fuel-main-dev,stackforge/fuel-main,eayunstack/fuel-web,prmtl/fuel-web,stackforge/fuel-web,huntxu/fuel-web,huntxu/fuel-main,Fiware/ops.Fuel-main-dev,teselkin/fuel-main,nebril/fuel-web,eayunstack/fuel-web,stackforge/fuel-main
|
09edd3b548baaa4f6d1e31d5a9891f2b6eef45d6
|
noopy/project_template/dispatcher.py
|
noopy/project_template/dispatcher.py
|
from noopy import lambda_functions
from noopy.endpoint import Endpoint
from noopy.endpoint import methods
def dispatch(event, context):
print event
if event['type'] == 'APIGateway':
path = event['path']
method = getattr(methods, event['method'])
endpoint = Endpoint.endpoints[Endpoint(path, method)]
return endpoint(event.get('params', {}), context)
if event['type'] == 'Lambda':
funcs = [f for f in lambda_functions if f.func_name == event['function_name']]
if len(funcs) != 1:
raise ValueError('One and only one function "{}" needed.'.format(event['function_name']))
funcs[0](event.get('params', {}), context)
|
from noopy import lambda_functions
from noopy.endpoint import Endpoint
from noopy.endpoint import methods
def dispatch(event, context):
print event
if event['type'] == 'APIGateway':
path = event['path']
method = getattr(methods, event['method'])
endpoint = Endpoint.endpoints[Endpoint(path, method)]
return endpoint(event.get('params', {}), context)
if event['type'] == 'Lambda':
funcs = [f for f in lambda_functions if f.func_name == event['function_name']]
if len(funcs) != 1:
raise ValueError('One and only one function "{}" needed.'.format(event['function_name']))
return funcs[0](event.get('params', {}), context)
raise ValueError('Undefined type: "%s"' % event['type'])
|
Raise error on undefined type
|
Raise error on undefined type
|
Python
|
mit
|
acuros/noopy
|
e861e74374d22d3684dccfa5e0063ff37549bcfc
|
api/app.py
|
api/app.py
|
from flask import Flask
from flask import request
from flask import jsonify
from y_text_recommender_system.recommender import recommend
app = Flask(__name__)
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, payload=None):
Exception.__init__(self)
self.message = message
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.route('/recommender/', methods=['POST'])
def recommender():
content = request.get_json()
if content is not None:
doc = content.get('doc', {})
docs = content.get('docs', [])
_verify_parameters(doc, docs)
result = recommend(doc, docs)
return jsonify(result)
else:
msg = 'You need to send the parameters: doc and docs'
raise InvalidUsage(msg)
def _verify_parameters(doc, docs):
if doc == {}:
msg = 'The parameter `doc` is missing or empty'
raise InvalidUsage(msg)
if not isinstance(doc, dict):
msg = 'The parameter `doc` should be a dict'
raise InvalidUsage(msg)
if len(docs) == 0:
msg = 'The parameter `docs` is missing or empty'
raise InvalidUsage(msg)
|
from flask import Flask
from flask import request
from flask import jsonify
from y_text_recommender_system.recommender import recommend
app = Flask(__name__)
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, payload=None):
Exception.__init__(self)
self.message = message
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.route('/recommender/', methods=['POST'])
def recommender():
content = request.get_json()
if content is not None:
doc = content.get('doc', {})
docs = content.get('docs', [])
_verify_parameters(doc, docs)
result = recommend(doc, docs)
return jsonify(result)
else:
msg = 'You need to send the parameters: doc and docs'
raise InvalidUsage(msg)
def _verify_parameters(doc, docs):
if bool(doc) is False:
msg = 'The parameter `doc` is missing or empty'
raise InvalidUsage(msg)
if not isinstance(doc, dict):
msg = 'The parameter `doc` should be a dict'
raise InvalidUsage(msg)
if len(docs) == 0:
msg = 'The parameter `docs` is missing or empty'
raise InvalidUsage(msg)
|
Refactor to change the comparator of dict
|
Refactor to change the comparator of dict
|
Python
|
mit
|
joaojunior/y_text_recommender_system
|
89fd94bb06e81f38b40bd75d793107599a1b7c48
|
freedomain.py
|
freedomain.py
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def start(count):
return 'SETUP APP'
if __name__ == '__main__':
app.run(host="172.31.27.41", port=8080)
|
from flask import Flask
import time
app = Flask(__name__)
alphabet = 'abcdefghijklmnopqrstuwxyz'
number = '1234567890'
def numbering_system():
base_system = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
result = {}
for csc_n in base_system:
result[csc_n] = base_system.find(csc_n)
return result
ns = numbering_system()
def csc(sym):
result = ''
for s in sym:
result += str(ns[s])
return result
def r_csc(num):
for key in numbering_system().keys():
if ns[key] == int(num):
return key
return 'out_of_range'
def increment(csc_number):
csc_len = len(csc_number)
i = 0
while 1:
if i > csc_len:
csc_number += '0'
if i == csc_len:
csc_number += '0'
break
num = csc_number[i]
if num in ns.keys():
csc_result = r_csc(int(csc(num))+1)
if csc_result != 'out_of_range':
csc_number = csc_number[:i] + csc_result + csc_number[i+1:]
break
else:
csc_number = csc_number[:i] + '0' + csc_number[i+1:]
i += 1
else:
csc_number = csc_number[:i] + '0' + csc_number[i+1:]
i += 1
return csc_number
def word_generator(csc_number):
return 0
def getDifTime(s):
current_milli_time2 = lambda: int(round(time.time() * 1000))
endTime = current_milli_time2()
return float(endTime - s) / 1000
@app.route('/<count>')
def freedomain(count):
return 'TEST'
if __name__ == '__main__':
app.run(host="172.31.27.41", port=8080)
#app.run()
|
Add base methods for generation dictionary
|
Add base methods for generation dictionary
|
Python
|
mit
|
cludtk/freedomain,cludtk/freedomain
|
1f4ee4e9d978322938579abc4c6723fdc783937d
|
build.py
|
build.py
|
#!/usr/bin/env python
import os
import subprocess
import sys
def build(pkgpath):
os.chdir(pkgpath)
targets = [
'build',
'package',
'install',
'clean',
'clean-depends',
]
for target in targets:
p = subprocess.Popen(
['bmake', target],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = p.communicate()
log = 'bmake-' + target + '-log.txt'
with open(log, 'w+') as f:
f.write(out)
f.write(err)
assert p.returncode == 0, '%s %s' % (pkg, target)
if __name__ == '__main__':
home = os.environ['HOME']
localbase = os.path.join(home, 'usr', 'pkgsrc')
lines = sys.stdin.readlines()
pkgs = [line.rstrip('\n') for line in lines]
pkgpaths = [pkg.split(' ')[0] for pkg in pkgs]
for pkgpath in pkgpaths:
print pkgpath
os.chdir(localbase)
assert os.path.exists(os.path.join(localbase, pkgpath))
build(pkgpath)
|
#!/usr/bin/env python
from __future__ import print_function
import os
import subprocess
import sys
def build(pkgpath):
os.chdir(pkgpath)
targets = [
'build',
'package',
'install',
'clean',
'clean-depends',
]
for target in targets:
p = subprocess.Popen(
['bmake', target],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = p.communicate()
log = 'bmake-' + target + '-log.txt'
with open(log, 'w+') as f:
f.write(out)
f.write(err)
assert p.returncode == 0, '%s %s' % (pkg, target)
if __name__ == '__main__':
home = os.environ['HOME']
localbase = os.path.join(home, 'usr', 'pkgsrc')
lines = sys.stdin.readlines()
pkgs = [line.rstrip('\n') for line in lines]
pkgpaths = [pkg.split(' ')[0] for pkg in pkgs]
for pkgpath in pkgpaths:
print(pkgpath)
os.chdir(localbase)
assert os.path.exists(os.path.join(localbase, pkgpath))
build(pkgpath)
|
Use the Python 3 print function.
|
Use the Python 3 print function.
|
Python
|
isc
|
eliteraspberries/minipkg,eliteraspberries/minipkg
|
c12cbae226f42405a998b93c6fd7049aadc6a19c
|
build.py
|
build.py
|
import os
import string
if __name__ == '__main__':
patch_file = 'example.patch'
base_name, _ = os.path.splitext(patch_file)
values = {
'name': 'kpatch-module-{}'.format(base_name),
'patch_file': patch_file,
'kmod_filename': 'kpatch-{}.ko'.format(base_name),
'description': 'Package generated from {} by '
'kpatch-package-builder'.format(patch_file),
}
with open('kpatch-patch.spec') as f:
spec_template = string.Template(f.read())
print(spec_template.substitute(values))
|
import os
import string
def generate_rpm_spec(template, patch_file):
spec_template = string.Template(template)
base_name, _ = os.path.splitext(patch_file)
values = {
'name': 'kpatch-module-{}'.format(base_name),
'patch_file': patch_file,
'kmod_filename': 'kpatch-{}.ko'.format(base_name),
'description': 'Package generated from {} by '
'kpatch-package-builder'.format(patch_file),
}
return spec_template.substitute(values)
if __name__ == '__main__':
with open('kpatch-patch.spec') as f:
template = f.read()
print(generate_rpm_spec(template, 'example.patch'))
|
Split spec generation into function
|
Split spec generation into function
|
Python
|
mit
|
centos-livepatching/kpatch-package-builder
|
916a02a609af6dc125b0a82215adb94858f4d597
|
yutu.py
|
yutu.py
|
import discord
from discord.ext.commands import Bot
import json
client = Bot("~", game=discord.Game(name="~help"))
@client.event
async def on_ready():
print('We have logged in as {0.user}'.format(client))
@client.command()
async def highfive(ctx):
'''
Give Yutu a high-five
'''
await ctx.send('{0.mention} :pray: {1.mention}'.format(ctx.me, ctx.author))
@client.command()
async def cute(ctx, member: discord.Member = None):
if member is None:
first = ctx.me
second = ctx.author
else:
first = ctx.author
second = member
post = discord.Embed(description='**{0.display_name}** thinks that **{1.display_name}** is cute!'.format(first, second))
post.set_image(url="https://i.imgur.com/MuVAkV2.gif")
await ctx.send(embed=post)
if __name__ == "__main__":
with open("cfg.json") as fh:
token = json.load(fh)['token']
client.run(token)
|
import discord
from discord.ext.commands import Bot
import json
client = Bot("~", game=discord.Game(name="~help"))
@client.event
async def on_ready():
print('We have logged in as {0.user}'.format(client))
@client.command()
async def highfive(ctx):
'''
Give Yutu a high-five
'''
await ctx.send('{0.mention} :pray: {1.mention}'.format(ctx.me, ctx.author))
@client.command()
async def cute(ctx, user: discord.Member = None):
"""
Tell someone they are cute!
Tells a user that you think they are cute, if you don't give a user, then Yutu will let you know that you are cute.
"""
if user is None:
first = ctx.me
second = ctx.author
else:
first = ctx.author
second = user
post = discord.Embed(description='**{0.display_name}** thinks that **{1.display_name}** is cute!'.format(first, second))
post.set_image(url="https://i.imgur.com/MuVAkV2.gif")
await ctx.send(embed=post)
if __name__ == "__main__":
with open("cfg.json") as fh:
token = json.load(fh)['token']
client.run(token)
|
Add help text for cute
|
Add help text for cute
|
Python
|
mit
|
HarkonenBade/yutu
|
e35767544e7c6b4461e511eaad42c047abcbe911
|
openprocurement/tender/esco/utils.py
|
openprocurement/tender/esco/utils.py
|
# -*- coding: utf-8 -*-
from openprocurement.api.utils import get_now
def request_get_now(request):
return get_now()
|
# -*- coding: utf-8 -*-
from decimal import Decimal
from openprocurement.api.utils import get_now
def request_get_now(request):
return get_now()
def to_decimal(fraction):
return Decimal(fraction.numerator) / Decimal(fraction.denominator)
|
Add function to convert fraction to decimal
|
Add function to convert fraction to decimal
|
Python
|
apache-2.0
|
openprocurement/openprocurement.tender.esco
|
aeac11d889695f17aab3b972b64101eaefd322f2
|
fuzzycount.py
|
fuzzycount.py
|
from django.conf import settings
from django.db import connections
from django.db.models.query import QuerySet
from model_utils.managers import PassThroughManager
class FuzzyCountQuerySet(QuerySet):
def count(self):
is_postgresql = settings.DATABASES[self.db]["ENGINE"].endswith(("postgis", "postgresql"))
is_filtered = self.query.where or self.query.having
if not is_postgresql or is_filtered:
return super(FuzzyCountQuerySet, self).count()
cursor = connections[self.db].cursor()
cursor.execute("SELECT reltuples FROM pg_class "
"WHERE relname = '%s';" % self.model._meta.db_table)
return int(cursor.fetchone()[0])
FuzzyCountManager = PassThroughManager.for_queryset_class(FuzzyCountQuerySet)
|
from django.conf import settings
from django.db import connections
from django.db.models.query import QuerySet
from model_utils.managers import PassThroughManager
class FuzzyCountQuerySet(QuerySet):
def count(self):
postgres_engines = ("postgis", "postgresql", "django_postgrespool")
engine = settings.DATABASES[self.db]["ENGINE"].split(".")[-1]
is_postgres = engine.startswith(postgresql_engines)
is_filtered = self.query.where or self.query.having
if not is_postgres or is_filtered
return super(FuzzyCountQuerySet, self).count()
cursor = connections[self.db].cursor()
cursor.execute("SELECT reltuples FROM pg_class "
"WHERE relname = '%s';" % self.model._meta.db_table)
return int(cursor.fetchone()[0])
FuzzyCountManager = PassThroughManager.for_queryset_class(FuzzyCountQuerySet)
|
Fix engine check and added check for django_postgrespool.
|
Fix engine check and added check for django_postgrespool.
|
Python
|
bsd-2-clause
|
stephenmcd/django-postgres-fuzzycount
|
acce959e4885a52ba4a80beaed41a56aac63844e
|
tests/opwen_email_server/api/test_client_read.py
|
tests/opwen_email_server/api/test_client_read.py
|
from contextlib import contextmanager
from os import environ
from unittest import TestCase
class DownloadTests(TestCase):
def test_denies_unknown_client(self):
with self._given_clients('{"client1": "id1"}') as download:
message, status = download('unknown_client')
self.assertEqual(status, 403)
@classmethod
@contextmanager
def _given_clients(cls, clients: str):
environ['LOKOLE_CLIENTS'] = clients
from opwen_email_server.api import client_read
yield client_read.download
del client_read
|
from contextlib import contextmanager
from unittest import TestCase
from opwen_email_server.api import client_read
from opwen_email_server.services.auth import EnvironmentAuth
class DownloadTests(TestCase):
def test_denies_unknown_client(self):
with self.given_clients({'client1': 'id1'}):
message, status = client_read.download('unknown_client')
self.assertEqual(status, 403)
@contextmanager
def given_clients(self, clients):
original_clients = client_read.CLIENTS
client_read.CLIENTS = EnvironmentAuth(clients)
yield
client_read.CLIENTS = original_clients
|
Remove need to set environment variables in test
|
Remove need to set environment variables in test
|
Python
|
apache-2.0
|
ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver
|
bfc7e08ba70ba0e3acb9e4cc69b70c816845b6cb
|
djofx/views/home.py
|
djofx/views/home.py
|
from django.db.models import Sum
from django.views.generic import TemplateView
from djofx.forms import OFXForm
from djofx.views.base import PageTitleMixin, UserRequiredMixin
from djofx import models
class HomePageView(PageTitleMixin, UserRequiredMixin, TemplateView):
template_name = "djofx/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
context['accounts'] = models.Account.objects.filter(
owner=self.request.user
)
context['form'] = OFXForm()
breakdown = models.Transaction.objects.filter(
amount__lt=0,
transaction_category__is_void=False
).values(
'transaction_category__pk',
'transaction_category__name'
).annotate(
total=Sum('amount')
).order_by('-total')
context['breakdown'] = [
(
abs(item['total']),
item['transaction_category__pk'],
item['transaction_category__name']
)
for item in breakdown
]
return context
|
from datetime import date, timedelta
from django.db.models import Sum
from django.views.generic import TemplateView
from djofx.forms import OFXForm
from djofx.views.base import PageTitleMixin, UserRequiredMixin
from djofx import models
from operator import itemgetter
class HomePageView(PageTitleMixin, UserRequiredMixin, TemplateView):
template_name = "djofx/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
context['accounts'] = models.Account.objects.filter(
owner=self.request.user
)
context['form'] = OFXForm()
cutoff = date.today() - timedelta(days=120)
uncategorised_breakdown = models.Transaction.objects.filter(
amount__lt=0,
transaction_category__isnull=True,
date__gte=cutoff
).aggregate(
total=Sum('amount')
)
breakdown = models.Transaction.objects.filter(
amount__lt=0,
transaction_category__is_void=False,
date__gte=cutoff
).values(
'transaction_category__pk',
'transaction_category__name'
).annotate(
total=Sum('amount')
).order_by('-total')
context['breakdown'] = [
(
abs(item['total']),
item['transaction_category__pk'],
item['transaction_category__name']
)
for item in breakdown
]
context['breakdown'].append(
(
uncategorised_breakdown['total'] * -1,
0,
'Uncategorised'
)
)
context['breakdown'] = sorted(context['breakdown'],
key=itemgetter(0),
reverse=True)
return context
|
Include uncategorised spending in overview pie chart
|
Include uncategorised spending in overview pie chart
Also, only show last 120 days
|
Python
|
mit
|
dominicrodger/djofx,dominicrodger/djofx,dominicrodger/djofx
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.