commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
cdc6390ec88a14b339cb336fcc0d77e747aae99a
|
sieve/sieve.py
|
sieve/sieve.py
|
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
|
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
not_prime.update(range(i*i, n+1, i))
yield i
|
Revert back to a generator - it's actually slight faster
|
Revert back to a generator - it's actually slight faster
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
b32900269b3f8c701702e74734ffe248c521fa73
|
dooblr/transformers/__init__.py
|
dooblr/transformers/__init__.py
|
from influxdbclient import InfluxDBClient, InfluxDBClientError, DooblrInfluxDBError
__all__ = ["InfluxDBClient", "InfluxDBClientError", "DooblrInfluxDBError"]
|
from dooblr.transformers.influxdbclient import InfluxDBClient, InfluxDBClientError, DooblrInfluxDBError
__all__ = ["InfluxDBClient", "InfluxDBClientError", "DooblrInfluxDBError"]
|
Fix py3 'relative' import error.
|
Fix py3 'relative' import error.
|
Python
|
isc
|
makerslocal/dooblr
|
288127c575c7672e3a41d7ada360d56a4853f279
|
scripts/examples/14-WiFi-Shield/fw_update.py
|
scripts/examples/14-WiFi-Shield/fw_update.py
|
# WINC Firmware Update Script.
#
# This script updates the ATWINC1500 WiFi module firmware.
# Copy the firmware image to uSD card before running this script.
# NOTE: Firmware version 19.5.2 does NOT support ATWINC1500-MR210PA.
import network
# Init wlan module in Download mode.
wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE)
# Start the firmware update process.
# For ATWINC1500-MR210PA/B
#wlan.fw_update("/winc_19_4_4.bin")
# For ATWINC1500-MR210PB only.
wlan.fw_update("/winc_19_5_2.bin")
|
# WINC Firmware Update Script.
#
# This script updates the ATWINC1500 WiFi module firmware.
# Copy the firmware image to uSD card before running this script.
# NOTE: Older fimware versions are no longer supported by the host driver.
# NOTE: The latest firmware (19.6.1) only works on ATWINC1500-MR210PB.
import network
# Init wlan module in Download mode.
wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE)
# For ATWINC1500-MR210PB only.
wlan.fw_update("/winc_19_6_1.bin")
|
Update WiFi firmware update script.
|
Update WiFi firmware update script.
|
Python
|
mit
|
openmv/openmv,openmv/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv,iabdalkader/openmv,kwagyeman/openmv,iabdalkader/openmv,kwagyeman/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv
|
8f80099062a03fcf6be783f3e5260882f704ec22
|
scss/tests/test_files.py
|
scss/tests/test_files.py
|
from __future__ import absolute_import
import glob
import os.path
import pytest
from scss import Scss
HERE = os.path.join(os.path.split(__file__)[0], 'files')
@pytest.mark.parametrize(
('scss_fn', 'css_fn'), [
(scss_fn, os.path.splitext(scss_fn)[0] + '.css')
for scss_fn in glob.glob(os.path.join(HERE, '*/*.scss'))
]
)
def test_pair(scss_fn, css_fn):
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
compiler = Scss(scss_opts=dict(compress=0))
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert actual == expected
|
from __future__ import absolute_import
import glob
import os.path
import pytest
from scss import Scss
HERE = os.path.join(os.path.split(__file__)[0], 'files')
@pytest.mark.parametrize(
('scss_fn', 'css_fn'), [
(scss_fn, os.path.splitext(scss_fn)[0] + '.css')
for scss_fn in glob.glob(os.path.join(HERE, '*/*.scss'))
]
)
def test_pair(scss_fn, css_fn):
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
compiler = Scss(scss_opts=dict(compress=0))
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
|
Swap the test-file assertion, to make output more sensible.
|
Swap the test-file assertion, to make output more sensible.
|
Python
|
mit
|
cpfair/pyScss,cpfair/pyScss,Kronuz/pyScss,hashamali/pyScss,hashamali/pyScss,Kronuz/pyScss,Kronuz/pyScss,hashamali/pyScss,Kronuz/pyScss,cpfair/pyScss
|
7b3267b2bae436e0580e2a229a64bd8d6a04bc1f
|
manila_ui/local/local_settings.d/_90_manila_shares.py
|
manila_ui/local/local_settings.d/_90_manila_shares.py
|
# Copyright 2016 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# The OPENSTACK_MANILA_FEATURES settings can be used to enable or disable
# the UI for the various services provided by Manila.
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
|
# Copyright 2016 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
settings.POLICY_FILES.update({
'share': 'manila_policy.json',
})
# The OPENSTACK_MANILA_FEATURES settings can be used to enable or disable
# the UI for the various services provided by Manila.
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
|
Define the default policy file
|
Define the default policy file
This change ensures that the default policy file for Manila API access
is defined by default, so that operators can deploy their own policy
more easily.
Change-Id: Ie890766ea2a274791393304cdfe532e024171195
|
Python
|
apache-2.0
|
openstack/manila-ui,openstack/manila-ui,openstack/manila-ui
|
fd90fc7ce0c8a8070966e4a8273c69b8c13955d3
|
masters/master.tryserver.webrtc/master_site_config.py
|
masters/master.tryserver.webrtc/master_site_config.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCTryServer(Master.Master4):
project_name = 'WebRTC Try Server'
master_port = 8070
slave_port = 8170
master_port_alt = 8270
try_job_port = 8370
from_address = 'tryserver@webrtc.org'
reply_to = 'chrome-troopers+tryserver@google.com'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try-webrtc'
base_app_url = 'https://webrtc-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
code_review_site = 'https://webrtc-codereview.appspot.com'
buildbot_url = 'http://build.chromium.org/p/tryserver.webrtc/'
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCTryServer(Master.Master4):
project_name = 'WebRTC Try Server'
master_port = 8070
slave_port = 8170
master_port_alt = 8270
try_job_port = 8370
from_address = 'tryserver@webrtc.org'
reply_to = 'chrome-troopers+tryserver@google.com'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try-webrtc'
base_app_url = 'https://webrtc-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = None
code_review_site = 'https://webrtc-codereview.appspot.com'
buildbot_url = 'http://build.chromium.org/p/tryserver.webrtc/'
|
Make trybots use HEAD instead of LKGR
|
WebRTC: Make trybots use HEAD instead of LKGR
It's about time we make this change, which turned out
to be very simple.
Review URL: https://codereview.chromium.org/776233003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@293261 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
d88a6afddcc9ac90b1fb2327d4df4ece29f1c031
|
docs/plans/wordpress/mysql_boot.py
|
docs/plans/wordpress/mysql_boot.py
|
#!/usr/bin/env python
import sys
import os
import simplejson as json
f = open("bootconf.json", "r")
vals_dict = json.load(f)
f.close()
os.putenv('DEBIAN_FRONTEND', 'noninteractive')
os.putenv('TERM', 'dumb')
password=vals_dict['dbpassword']
dbname=vals_dict['dbname']
commands = []
commands.append('sudo -E apt-get -y -q install mysql-server-5.1')
commands.append('sudo -E mysqladmin -u root password %s' % (password))
commands.append('sudo -E mysqladmin --password=%s create %s' % (password, dbname))
commands.append("sudo -E mysql --password=%s -e \"GRANT Select, Insert, Update ON *.* TO 'root'@'%%' IDENTIFIED BY '%s';\"" % (password, password))
commands.append("sudo -E sed -i 's/bind-address.*/bind-address = 0.0.0.0/' /etc/mysql/my.cnf")
commands.append("sudo -E restart mysql")
for cmd in commands:
print cmd
rc = os.system(cmd)
if rc != 0:
print "ERROR! %d" % (rc)
sys.exit(rc)
print "SUCCESS"
sys.exit(0)
|
#!/usr/bin/env python
import sys
import os
import simplejson as json
f = open("bootconf.json", "r")
vals_dict = json.load(f)
f.close()
os.putenv('DEBIAN_FRONTEND', 'noninteractive')
os.putenv('TERM', 'dumb')
password=vals_dict['dbpassword']
dbname=vals_dict['dbname']
commands = []
commands.append('sudo -E apt-get -y -q install mysql-server-5.1')
commands.append('sudo -E mysqladmin -u root password %s' % (password))
commands.append('sudo -E mysqladmin --password=%s create %s' % (password, dbname))
commands.append("sudo -E mysql --password=%s -e \"GRANT Select, Insert, Update, Create, Delete ON *.* TO 'root'@'%%' IDENTIFIED BY '%s';\"" % (password, password))
commands.append("sudo -E sed -i 's/bind-address.*/bind-address = 0.0.0.0/' /etc/mysql/my.cnf")
commands.append("sudo -E restart mysql")
for cmd in commands:
print cmd
rc = os.system(cmd)
if rc != 0:
print "ERROR! %d" % (rc)
sys.exit(rc)
print "SUCCESS"
sys.exit(0)
|
Add create and delete permissions to mysql user in wordpress example
|
Add create and delete permissions to mysql user in wordpress example
|
Python
|
apache-2.0
|
buzztroll/cloudinit.d,nimbusproject/cloudinit.d,buzztroll/cloudinit.d,nimbusproject/cloudinit.d
|
68b499ea6b73232b3b8a860b3c8b808a1736b733
|
myfedora/controllers/template.py
|
myfedora/controllers/template.py
|
from ${package}.lib.base import *
class TemplateController(BaseController):
def view(self, url):
"""By default, the final controller tried to fulfill the request
when no other routes match. It may be used to display a template
when all else fails, e.g.::
def view(self, url):
return render('/%s' % url)
Or if you're using Mako and want to explicitly send a 404 (Not
Found) response code when the requested template doesn't exist::
import mako.exceptions
def view(self, url):
try:
return render('/%s' % url)
except mako.exceptions.TopLevelLookupException:
abort(404)
By default this controller aborts the request with a 404 (Not
Found)
"""
abort(404)
|
from myfedora.lib.base import *
class TemplateController(BaseController):
def view(self, url):
"""By default, the final controller tried to fulfill the request
when no other routes match. It may be used to display a template
when all else fails, e.g.::
def view(self, url):
return render('/%s' % url)
Or if you're using Mako and want to explicitly send a 404 (Not
Found) response code when the requested template doesn't exist::
import mako.exceptions
def view(self, url):
try:
return render('/%s' % url)
except mako.exceptions.TopLevelLookupException:
abort(404)
By default this controller aborts the request with a 404 (Not
Found)
"""
abort(404)
|
Fix a busted import statement in our TemplateController
|
Fix a busted import statement in our TemplateController
|
Python
|
agpl-3.0
|
Fale/fedora-packages,fedora-infra/fedora-packages,fedora-infra/fedora-packages,Fale/fedora-packages,Fale/fedora-packages,fedora-infra/fedora-packages,fedora-infra/fedora-packages
|
926ddeb63f0366a59f14adbab5421ccb7f78f144
|
exercises/book-store/example.py
|
exercises/book-store/example.py
|
BOOK_PRICE = 8
def _group_price(size):
discounts = [0, .05, .1, .2, .25]
if not (0 < size <= 5):
raise ValueError('size must be in 1..' + len(discounts))
return 8 * size * (1 - discounts[size - 1])
def calculate_total(books, price_so_far=0.):
if not books:
return price_so_far
groups = list(set(books))
min_price = float('inf')
for i in range(len(groups)):
remaining_books = books[:]
for v in groups[:i + 1]:
remaining_books.remove(v)
price = calculate_total(remaining_books,
price_so_far + _group_price(i + 1))
min_price = min(min_price, price)
return min_price
|
BOOK_PRICE = 8
def _group_price(size):
discounts = [0, .05, .1, .2, .25]
if not (0 < size <= 5):
raise ValueError('size must be in 1..' + len(discounts))
return BOOK_PRICE * size * (1 - discounts[size - 1])
def calculate_total(books, price_so_far=0.):
if not books:
return price_so_far
groups = list(set(books))
min_price = float('inf')
for i in range(len(groups)):
remaining_books = books[:]
for v in groups[:i + 1]:
remaining_books.remove(v)
price = calculate_total(remaining_books,
price_so_far + _group_price(i + 1))
min_price = min(min_price, price)
return min_price
|
Use book price constant in calculation
|
book-store: Use book price constant in calculation
|
Python
|
mit
|
N-Parsons/exercism-python,pheanex/xpython,jmluy/xpython,behrtam/xpython,exercism/xpython,smalley/python,exercism/xpython,exercism/python,N-Parsons/exercism-python,smalley/python,pheanex/xpython,jmluy/xpython,exercism/python,behrtam/xpython
|
a220a62e4444e75974ad28915e7216a276f60c9c
|
test_valid_object_file.py
|
test_valid_object_file.py
|
from astropy.table import Table
from astropy.coordinates import ICRS, name_resolve
from astropy import units as u
TABLE_NAME = 'feder_object_list.csv'
MAX_SEP = 5 # arcsec
def test_table_can_be_read_and_coords_good():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
for row in objs:
try:
simbad_pos = ICRS.from_name(row['object'])
except name_resolve.NameResolveError:
continue
table_pos = ICRS(row['ra'], row['dec'], unit=(u.hour, u.degree))
# CHANGE ASSERT TO IF/THEN, print name then assert 0
sep = table_pos.separation(simbad_pos).arcsec
warn = ''
if sep > MAX_SEP:
warn = ('Bad RA/Dec for object {}, '
'separation is {} arcsec'.format(row['object'], sep))
print (warn)
assert len(warn) == 0
|
from astropy.table import Table
from astropy.coordinates import ICRS, name_resolve
from astropy import units as u
TABLE_NAME = 'feder_object_list.csv'
MAX_SEP = 5 # arcsec
# increase timeout so that the Travis builds succeed
name_resolve.NAME_RESOLVE_TIMEOUT.set(30)
def test_table_can_be_read_and_coords_good():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
for row in objs:
try:
simbad_pos = ICRS.from_name(row['object'])
except name_resolve.NameResolveError:
continue
table_pos = ICRS(row['ra'], row['dec'], unit=(u.hour, u.degree))
# CHANGE ASSERT TO IF/THEN, print name then assert 0
sep = table_pos.separation(simbad_pos).arcsec
warn = ''
if sep > MAX_SEP:
warn = ('Bad RA/Dec for object {}, '
'separation is {} arcsec'.format(row['object'], sep))
print (warn)
assert len(warn) == 0
|
Increase timeout for satrapy name lookups
|
Increase timeout for satrapy name lookups
|
Python
|
bsd-2-clause
|
mwcraig/feder-object-list
|
5cd3b53f677fd6ab6e77bee5b7d42cf2ac85e47f
|
feincms/apps.py
|
feincms/apps.py
|
# flake8: noqa
from feincms.content.application.models import *
|
def __getattr__(key):
# Work around Django 3.2's autoloading of *.apps modules (AppConfig
# autodiscovery)
if key in {
"ApplicationContent",
"app_reverse",
"app_reverse_lazy",
"permalink",
"UnpackTemplateResponse",
"standalone",
"unpack",
}:
from feincms.content.application import models
return getattr(models, key)
raise AttributeError("Unknown attribute '%s'" % key)
|
Add a workaround for the AppConfig autodiscovery crashes with Django 3.2
|
Add a workaround for the AppConfig autodiscovery crashes with Django 3.2
|
Python
|
bsd-3-clause
|
mjl/feincms,feincms/feincms,mjl/feincms,feincms/feincms,feincms/feincms,mjl/feincms
|
65e4aba86730525a75e915fe61eb15b681817cc3
|
app/commander.py
|
app/commander.py
|
import rethinkdb as r
class Commander:
def process_message(self, message):
return "I got your message"
|
import re
import rethinkdb as r
class Commander:
def process_message(self, message):
return self.parse_message(message)
def parse_message(self, message):
stripped_message = message.strip()
commander_match = re.match(r'commander\s*(.*)',
stripped_message,
flags=re.IGNORECASE)
if commander_match:
# parse message as incident commander message
task_match = re.match(r'add task\s*(.*)',
commander_match.groups()[0],
flags=re.I)
if task_match:
return self.add_task(task_match.groups()[0])
return 'no match for this command'
def add_task(self, task):
# add task to task list
print(task)
return 'Added task to list!'
|
Add parsing for adding message
|
Add parsing for adding message
|
Python
|
mit
|
henryfjordan/incident-commander
|
a28826a0b57742d3cb2ac57c0a17b37f2afff302
|
homedisplay/control_milight/management/commands/listen_433.py
|
homedisplay/control_milight/management/commands/listen_433.py
|
from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
|
from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
|
Use logging. Remove hardcoded settings
|
Use logging. Remove hardcoded settings
|
Python
|
bsd-3-clause
|
ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display
|
c2fe4483ba70f0ca37b4713a51baf0804a68accd
|
lms/djangoapps/course_wiki/plugins/markdownedx/wiki_plugin.py
|
lms/djangoapps/course_wiki/plugins/markdownedx/wiki_plugin.py
|
# -*- coding: utf-8 -*-
from wiki.core.plugins.base import BasePlugin
from wiki.core.plugins import registry as plugin_registry
from course_wiki.plugins.markdownedx import mdx_mathjax, mdx_video
class ExtendMarkdownPlugin(BasePlugin):
"""
This plugin simply loads all of the markdown extensions we use in edX.
"""
markdown_extensions = [
mdx_mathjax.MathJaxExtension(configs={}),
mdx_video.VideoExtension(configs={})]
plugin_registry.register(ExtendMarkdownPlugin)
|
# -*- coding: utf-8 -*-
from wiki.core.plugins.base import BasePlugin
from wiki.core.plugins import registry as plugin_registry
from course_wiki.plugins.markdownedx import mdx_mathjax, mdx_video
class ExtendMarkdownPlugin(BasePlugin):
"""
This plugin simply loads all of the markdown extensions we use in edX.
"""
markdown_extensions = [
mdx_mathjax.MathJaxExtension(configs={}),
mdx_video.VideoExtension(configs={}),
]
plugin_registry.register(ExtendMarkdownPlugin)
|
Fix PEP8: E126 continuation line over-indented
|
Fix PEP8: E126 continuation line over-indented
for hanging indent
|
Python
|
agpl-3.0
|
IndonesiaX/edx-platform,mbareta/edx-platform-ft,proversity-org/edx-platform,IONISx/edx-platform,Edraak/edx-platform,doganov/edx-platform,shabab12/edx-platform,lduarte1991/edx-platform,deepsrijit1105/edx-platform,pomegranited/edx-platform,prarthitm/edxplatform,fintech-circle/edx-platform,prarthitm/edxplatform,waheedahmed/edx-platform,xingyepei/edx-platform,jbzdak/edx-platform,louyihua/edx-platform,TeachAtTUM/edx-platform,stvstnfrd/edx-platform,nttks/edx-platform,cognitiveclass/edx-platform,jjmiranda/edx-platform,Endika/edx-platform,antoviaque/edx-platform,JCBarahona/edX,ampax/edx-platform,zubair-arbi/edx-platform,wwj718/edx-platform,bigdatauniversity/edx-platform,zhenzhai/edx-platform,ahmadiga/min_edx,synergeticsedx/deployment-wipro,doganov/edx-platform,waheedahmed/edx-platform,itsjeyd/edx-platform,waheedahmed/edx-platform,solashirai/edx-platform,miptliot/edx-platform,inares/edx-platform,MakeHer/edx-platform,JCBarahona/edX,edx-solutions/edx-platform,bigdatauniversity/edx-platform,teltek/edx-platform,fintech-circle/edx-platform,amir-qayyum-khan/edx-platform,hamzehd/edx-platform,IONISx/edx-platform,caesar2164/edx-platform,Livit/Livit.Learn.EdX,cpennington/edx-platform,defance/edx-platform,stvstnfrd/edx-platform,amir-qayyum-khan/edx-platform,tanmaykm/edx-platform,eduNEXT/edunext-platform,ahmedaljazzar/edx-platform,UOMx/edx-platform,iivic/BoiseStateX,CourseTalk/edx-platform,ovnicraft/edx-platform,kmoocdev2/edx-platform,arbrandes/edx-platform,cpennington/edx-platform,edx-solutions/edx-platform,defance/edx-platform,franosincic/edx-platform,arbrandes/edx-platform,IONISx/edx-platform,arbrandes/edx-platform,halvertoluke/edx-platform,IONISx/edx-platform,Lektorium-LLC/edx-platform,halvertoluke/edx-platform,Edraak/edraak-platform,kmoocdev2/edx-platform,devs1991/test_edx_docmode,simbs/edx-platform,solashirai/edx-platform,Edraak/circleci-edx-platform,marcore/edx-platform,Stanford-Online/edx-platform,Endika/edx-platform,tanmaykm/edx-platform,hamzehd/edx-platform,pomegranited/edx-platform,procangroup/edx-platform,msegado/edx-platform,zubair-arbi/edx-platform,procangroup/edx-platform,deepsrijit1105/edx-platform,nttks/edx-platform,RPI-OPENEDX/edx-platform,appsembler/edx-platform,shurihell/testasia,kursitet/edx-platform,edx-solutions/edx-platform,ahmedaljazzar/edx-platform,zubair-arbi/edx-platform,longmen21/edx-platform,a-parhom/edx-platform,ahmedaljazzar/edx-platform,devs1991/test_edx_docmode,wwj718/edx-platform,jzoldak/edx-platform,cognitiveclass/edx-platform,antoviaque/edx-platform,naresh21/synergetics-edx-platform,edx/edx-platform,gsehub/edx-platform,MakeHer/edx-platform,alexthered/kienhoc-platform,jbzdak/edx-platform,Livit/Livit.Learn.EdX,ahmadiga/min_edx,Edraak/circleci-edx-platform,caesar2164/edx-platform,pabloborrego93/edx-platform,defance/edx-platform,IndonesiaX/edx-platform,cognitiveclass/edx-platform,waheedahmed/edx-platform,wwj718/edx-platform,synergeticsedx/deployment-wipro,stvstnfrd/edx-platform,Endika/edx-platform,alu042/edx-platform,Edraak/edraak-platform,ZLLab-Mooc/edx-platform,CourseTalk/edx-platform,IndonesiaX/edx-platform,longmen21/edx-platform,amir-qayyum-khan/edx-platform,appsembler/edx-platform,romain-li/edx-platform,chrisndodge/edx-platform,lduarte1991/edx-platform,jbzdak/edx-platform,cecep-edu/edx-platform,wwj718/edx-platform,naresh21/synergetics-edx-platform,EDUlib/edx-platform,Lektorium-LLC/edx-platform,ampax/edx-platform,jzoldak/edx-platform,Ayub-Khan/edx-platform,shurihell/testasia,philanthropy-u/edx-platform,antoviaque/edx-platform,alu042/edx-platform,nttks/edx-platform,philanthropy-u/edx-platform,ZLLab-Mooc/edx-platform,BehavioralInsightsTeam/edx-platform,solashirai/edx-platform,franosincic/edx-platform,caesar2164/edx-platform,CredoReference/edx-platform,10clouds/edx-platform,eduNEXT/edx-platform,RPI-OPENEDX/edx-platform,Lektorium-LLC/edx-platform,hastexo/edx-platform,itsjeyd/edx-platform,a-parhom/edx-platform,raccoongang/edx-platform,nttks/edx-platform,jzoldak/edx-platform,mbareta/edx-platform-ft,mcgachey/edx-platform,JCBarahona/edX,pomegranited/edx-platform,marcore/edx-platform,a-parhom/edx-platform,JioEducation/edx-platform,shurihell/testasia,ZLLab-Mooc/edx-platform,bigdatauniversity/edx-platform,teltek/edx-platform,inares/edx-platform,edx/edx-platform,lduarte1991/edx-platform,mcgachey/edx-platform,chrisndodge/edx-platform,synergeticsedx/deployment-wipro,pomegranited/edx-platform,JCBarahona/edX,RPI-OPENEDX/edx-platform,stvstnfrd/edx-platform,appsembler/edx-platform,alexthered/kienhoc-platform,CourseTalk/edx-platform,teltek/edx-platform,Stanford-Online/edx-platform,mbareta/edx-platform-ft,EDUlib/edx-platform,eduNEXT/edunext-platform,prarthitm/edxplatform,longmen21/edx-platform,xingyepei/edx-platform,romain-li/edx-platform,devs1991/test_edx_docmode,cecep-edu/edx-platform,simbs/edx-platform,BehavioralInsightsTeam/edx-platform,cognitiveclass/edx-platform,jolyonb/edx-platform,pepeportela/edx-platform,proversity-org/edx-platform,mbareta/edx-platform-ft,proversity-org/edx-platform,IndonesiaX/edx-platform,iivic/BoiseStateX,cpennington/edx-platform,ZLLab-Mooc/edx-platform,hamzehd/edx-platform,xingyepei/edx-platform,hamzehd/edx-platform,zhenzhai/edx-platform,ESOedX/edx-platform,miptliot/edx-platform,mitocw/edx-platform,eduNEXT/edx-platform,Edraak/circleci-edx-platform,RPI-OPENEDX/edx-platform,jbzdak/edx-platform,JCBarahona/edX,jjmiranda/edx-platform,kursitet/edx-platform,fintech-circle/edx-platform,ampax/edx-platform,edx-solutions/edx-platform,alu042/edx-platform,Edraak/circleci-edx-platform,raccoongang/edx-platform,TeachAtTUM/edx-platform,itsjeyd/edx-platform,ampax/edx-platform,JioEducation/edx-platform,jolyonb/edx-platform,UOMx/edx-platform,ESOedX/edx-platform,hastexo/edx-platform,iivic/BoiseStateX,Stanford-Online/edx-platform,mcgachey/edx-platform,gsehub/edx-platform,proversity-org/edx-platform,angelapper/edx-platform,CredoReference/edx-platform,CredoReference/edx-platform,zubair-arbi/edx-platform,solashirai/edx-platform,romain-li/edx-platform,cecep-edu/edx-platform,Ayub-Khan/edx-platform,franosincic/edx-platform,pepeportela/edx-platform,10clouds/edx-platform,Ayub-Khan/edx-platform,halvertoluke/edx-platform,tanmaykm/edx-platform,appsembler/edx-platform,ZLLab-Mooc/edx-platform,eduNEXT/edunext-platform,gsehub/edx-platform,Edraak/edx-platform,solashirai/edx-platform,inares/edx-platform,angelapper/edx-platform,msegado/edx-platform,RPI-OPENEDX/edx-platform,JioEducation/edx-platform,tanmaykm/edx-platform,kursitet/edx-platform,nttks/edx-platform,pabloborrego93/edx-platform,simbs/edx-platform,longmen21/edx-platform,MakeHer/edx-platform,gymnasium/edx-platform,MakeHer/edx-platform,ovnicraft/edx-platform,a-parhom/edx-platform,shabab12/edx-platform,xingyepei/edx-platform,EDUlib/edx-platform,kmoocdev2/edx-platform,jzoldak/edx-platform,defance/edx-platform,franosincic/edx-platform,philanthropy-u/edx-platform,longmen21/edx-platform,prarthitm/edxplatform,eduNEXT/edx-platform,gsehub/edx-platform,itsjeyd/edx-platform,angelapper/edx-platform,gymnasium/edx-platform,analyseuc3m/ANALYSE-v1,zhenzhai/edx-platform,analyseuc3m/ANALYSE-v1,edx/edx-platform,simbs/edx-platform,devs1991/test_edx_docmode,kmoocdev2/edx-platform,alexthered/kienhoc-platform,Stanford-Online/edx-platform,teltek/edx-platform,msegado/edx-platform,Edraak/edraak-platform,Ayub-Khan/edx-platform,alexthered/kienhoc-platform,pepeportela/edx-platform,bigdatauniversity/edx-platform,mitocw/edx-platform,romain-li/edx-platform,cognitiveclass/edx-platform,waheedahmed/edx-platform,BehavioralInsightsTeam/edx-platform,Edraak/edx-platform,synergeticsedx/deployment-wipro,cecep-edu/edx-platform,deepsrijit1105/edx-platform,Edraak/edx-platform,devs1991/test_edx_docmode,louyihua/edx-platform,Ayub-Khan/edx-platform,procangroup/edx-platform,jolyonb/edx-platform,shurihell/testasia,10clouds/edx-platform,pepeportela/edx-platform,jolyonb/edx-platform,eduNEXT/edx-platform,alexthered/kienhoc-platform,IONISx/edx-platform,TeachAtTUM/edx-platform,antoviaque/edx-platform,gymnasium/edx-platform,cpennington/edx-platform,hamzehd/edx-platform,simbs/edx-platform,MakeHer/edx-platform,jjmiranda/edx-platform,doganov/edx-platform,naresh21/synergetics-edx-platform,ahmedaljazzar/edx-platform,10clouds/edx-platform,chrisndodge/edx-platform,naresh21/synergetics-edx-platform,Livit/Livit.Learn.EdX,cecep-edu/edx-platform,zubair-arbi/edx-platform,deepsrijit1105/edx-platform,ovnicraft/edx-platform,mitocw/edx-platform,ovnicraft/edx-platform,marcore/edx-platform,gymnasium/edx-platform,Edraak/circleci-edx-platform,iivic/BoiseStateX,louyihua/edx-platform,Edraak/edraak-platform,xingyepei/edx-platform,ahmadiga/min_edx,inares/edx-platform,marcore/edx-platform,angelapper/edx-platform,alu042/edx-platform,zhenzhai/edx-platform,procangroup/edx-platform,Endika/edx-platform,TeachAtTUM/edx-platform,jjmiranda/edx-platform,mcgachey/edx-platform,jbzdak/edx-platform,CourseTalk/edx-platform,miptliot/edx-platform,wwj718/edx-platform,ESOedX/edx-platform,raccoongang/edx-platform,pabloborrego93/edx-platform,UOMx/edx-platform,iivic/BoiseStateX,mcgachey/edx-platform,raccoongang/edx-platform,kursitet/edx-platform,eduNEXT/edunext-platform,ahmadiga/min_edx,shabab12/edx-platform,chrisndodge/edx-platform,devs1991/test_edx_docmode,caesar2164/edx-platform,Livit/Livit.Learn.EdX,romain-li/edx-platform,shurihell/testasia,halvertoluke/edx-platform,BehavioralInsightsTeam/edx-platform,halvertoluke/edx-platform,devs1991/test_edx_docmode,JioEducation/edx-platform,pomegranited/edx-platform,msegado/edx-platform,inares/edx-platform,UOMx/edx-platform,hastexo/edx-platform,amir-qayyum-khan/edx-platform,msegado/edx-platform,ESOedX/edx-platform,kursitet/edx-platform,arbrandes/edx-platform,franosincic/edx-platform,ovnicraft/edx-platform,IndonesiaX/edx-platform,pabloborrego93/edx-platform,hastexo/edx-platform,mitocw/edx-platform,doganov/edx-platform,louyihua/edx-platform,bigdatauniversity/edx-platform,doganov/edx-platform,philanthropy-u/edx-platform,zhenzhai/edx-platform,lduarte1991/edx-platform,kmoocdev2/edx-platform,ahmadiga/min_edx,miptliot/edx-platform,Edraak/edx-platform,edx/edx-platform,analyseuc3m/ANALYSE-v1,CredoReference/edx-platform,analyseuc3m/ANALYSE-v1,devs1991/test_edx_docmode,shabab12/edx-platform,Lektorium-LLC/edx-platform,fintech-circle/edx-platform,EDUlib/edx-platform
|
351e88dd95db81418cc6d2deb4a943e2659292bc
|
wsgi.py
|
wsgi.py
|
import os
import sys
import site
VIRTUALENV="venv"
# Get site root from this file's location:
SITE_ROOT=os.path.abspath(os.path.dirname(__file__))
# Add virtualenv path to site package root:
site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.7/site-packages"))
site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.6/site-packages"))
# Add site package root to start of pythonpath:
sys.path.insert(0, SITE_ROOT)
# celery should now be available (on the virtualenv path)
import djcelery
djcelery.setup_loader()
# Point Django to settings file:
os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings'
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
import os
# celery should now be available (on the virtualenv path)
import djcelery
djcelery.setup_loader()
# Point Django to settings file:
os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings'
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
Remove virtualenv setup from WSGI entrypoint
|
Remove virtualenv setup from WSGI entrypoint
Handle it in front end server instead.
|
Python
|
agpl-3.0
|
BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit
|
da5ca6baf75b2230e3e8a62066bebaa96a16bf3d
|
test/server.py
|
test/server.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Server used for tests
"""
import sys
import os
# ensure sys knows about pyqode.core in the test env
sys.path.insert(0, os.getcwd())
from pyqode.core import backend
if __name__ == '__main__':
print('Server started')
print(sys.path)
print(os.getcwd())
backend.CodeCompletionWorker.providers.append(
backend.DocumentWordsProvider())
backend.serve_forever()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Server used for tests
"""
import sys
import os
# ensure sys knows about pyqode.core in the test env
sys.path.insert(0, os.getcwd())
sys.path.insert(0, os.path.abspath(".."))
from pyqode.core import backend
if __name__ == '__main__':
print('Server started')
print(sys.path)
print(os.getcwd())
backend.CodeCompletionWorker.providers.append(
backend.DocumentWordsProvider())
backend.serve_forever()
|
Fix test suite on travis (restore previous path config)
|
Fix test suite on travis (restore previous path config)
|
Python
|
mit
|
pyQode/pyqode.core,zwadar/pyqode.core,pyQode/pyqode.core
|
9a81d58bfb1088c8c6286c65150cd13c54c0b4c5
|
wagtail/wagtailredirects/middleware.py
|
wagtail/wagtailredirects/middleware.py
|
from django import http
from wagtail.wagtailredirects import models
# Originally pinched from: https://github.com/django/django/blob/master/django/contrib/redirects/middleware.py
class RedirectMiddleware(object):
def process_response(self, request, response):
# No need to check for a redirect for non-404 responses.
if response.status_code != 404:
return response
# Get the path
path = models.Redirect.normalise_path(request.get_full_path())
# Find redirect
try:
redirect = models.Redirect.get_for_site(request.site).get(old_path=path)
if redirect.is_permanent:
return http.HttpResponsePermanentRedirect(redirect.link)
else:
return http.HttpResponseRedirect(redirect.link)
except:
pass
return response
|
from django import http
from wagtail.wagtailredirects import models
# Originally pinched from: https://github.com/django/django/blob/master/django/contrib/redirects/middleware.py
class RedirectMiddleware(object):
def process_response(self, request, response):
# No need to check for a redirect for non-404 responses.
if response.status_code != 404:
return response
# Get the path
path = models.Redirect.normalise_path(request.get_full_path())
# Find redirect
try:
redirect = models.Redirect.get_for_site(request.site).get(old_path=path)
except models.Redirect.DoesNotExist:
# No redirect found, return the 400 page
return response
if redirect.is_permanent:
return http.HttpResponsePermanentRedirect(redirect.link)
else:
return http.HttpResponseRedirect(redirect.link)
|
Refactor out a bare except: statement
|
Refactor out a bare except: statement
It now catches `Redirect.DoesNotExist`, returning the normal 404 page if
no redirect is found. Any other exception should not be caught here.
|
Python
|
bsd-3-clause
|
rjsproxy/wagtail,jnns/wagtail,chrxr/wagtail,Klaudit/wagtail,iansprice/wagtail,kaedroho/wagtail,wagtail/wagtail,mixxorz/wagtail,kurtrwall/wagtail,kurtrwall/wagtail,FlipperPA/wagtail,chrxr/wagtail,mayapurmedia/wagtail,JoshBarr/wagtail,Klaudit/wagtail,torchbox/wagtail,nrsimha/wagtail,nimasmi/wagtail,iansprice/wagtail,hamsterbacke23/wagtail,FlipperPA/wagtail,Toshakins/wagtail,hanpama/wagtail,Toshakins/wagtail,takeflight/wagtail,takeshineshiro/wagtail,kurtrwall/wagtail,mikedingjan/wagtail,nutztherookie/wagtail,hamsterbacke23/wagtail,gogobook/wagtail,rjsproxy/wagtail,jnns/wagtail,kurtw/wagtail,gasman/wagtail,hanpama/wagtail,nealtodd/wagtail,torchbox/wagtail,Pennebaker/wagtail,hanpama/wagtail,kurtw/wagtail,Pennebaker/wagtail,inonit/wagtail,takeshineshiro/wagtail,takeflight/wagtail,kaedroho/wagtail,rjsproxy/wagtail,nilnvoid/wagtail,timorieber/wagtail,JoshBarr/wagtail,quru/wagtail,Pennebaker/wagtail,serzans/wagtail,nealtodd/wagtail,hamsterbacke23/wagtail,thenewguy/wagtail,gasman/wagtail,davecranwell/wagtail,thenewguy/wagtail,davecranwell/wagtail,tangentlabs/wagtail,nutztherookie/wagtail,mikedingjan/wagtail,Tivix/wagtail,tangentlabs/wagtail,nrsimha/wagtail,rsalmaso/wagtail,takeflight/wagtail,takeflight/wagtail,nealtodd/wagtail,timorieber/wagtail,kurtw/wagtail,zerolab/wagtail,JoshBarr/wagtail,FlipperPA/wagtail,nrsimha/wagtail,nilnvoid/wagtail,nutztherookie/wagtail,gogobook/wagtail,mayapurmedia/wagtail,Klaudit/wagtail,thenewguy/wagtail,wagtail/wagtail,nimasmi/wagtail,chrxr/wagtail,rsalmaso/wagtail,rjsproxy/wagtail,davecranwell/wagtail,Toshakins/wagtail,takeshineshiro/wagtail,quru/wagtail,kurtrwall/wagtail,nimasmi/wagtail,JoshBarr/wagtail,serzans/wagtail,thenewguy/wagtail,iansprice/wagtail,nutztherookie/wagtail,hanpama/wagtail,gogobook/wagtail,tangentlabs/wagtail,kaedroho/wagtail,Pennebaker/wagtail,jnns/wagtail,mikedingjan/wagtail,Klaudit/wagtail,iansprice/wagtail,kurtw/wagtail,timorieber/wagtail,nealtodd/wagtail,inonit/wagtail,Tivix/wagtail,nilnvoid/wagtail,Tivix/wagtail,quru/wagtail,rsalmaso/wagtail,wagtail/wagtail,gasman/wagtail,jnns/wagtail,FlipperPA/wagtail,davecranwell/wagtail,takeshineshiro/wagtail,zerolab/wagtail,quru/wagtail,inonit/wagtail,wagtail/wagtail,hamsterbacke23/wagtail,mikedingjan/wagtail,gasman/wagtail,kaedroho/wagtail,rsalmaso/wagtail,gasman/wagtail,nrsimha/wagtail,serzans/wagtail,nimasmi/wagtail,inonit/wagtail,rsalmaso/wagtail,wagtail/wagtail,mixxorz/wagtail,timorieber/wagtail,Tivix/wagtail,serzans/wagtail,mayapurmedia/wagtail,kaedroho/wagtail,tangentlabs/wagtail,torchbox/wagtail,zerolab/wagtail,mixxorz/wagtail,thenewguy/wagtail,zerolab/wagtail,Toshakins/wagtail,chrxr/wagtail,torchbox/wagtail,mixxorz/wagtail,gogobook/wagtail,zerolab/wagtail,mixxorz/wagtail,mayapurmedia/wagtail,nilnvoid/wagtail
|
60efbb9b6b70036b72f3c756139524c4ca7698d2
|
carepoint/models/cph/fdb_gcn_seq.py
|
carepoint/models/cph/fdb_gcn_seq.py
|
# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from carepoint import Carepoint
from sqlalchemy import (Column,
Integer,
String,
Boolean,
)
class FdbGcnSeq(Carepoint.BASE):
__tablename__ = 'fdrgcnseq'
__dbname__ = 'cph'
gcn_seqno = Column(Integer, primary_key=True)
hic3 = Column(String)
hicl_seqno = Column(Integer)
gcdf = Column(String)
gcrt = Column(String)
str = Column(String)
gtc = Column(Integer)
tc = Column(Integer)
dcc = Column(Integer)
gcnseq_gi = Column(Integer)
gender = Column(Integer)
hic3_seqn = Column(Integer)
str60 = Column(String)
update_yn = Column(Boolean)
|
# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from carepoint import Carepoint
from sqlalchemy import (Column,
Integer,
String,
Boolean,
ForeignKey,
)
class FdbGcnSeq(Carepoint.BASE):
__tablename__ = 'fdrgcnseq'
__dbname__ = 'cph'
gcn_seqno = Column(Integer, primary_key=True)
hic3 = Column(String)
hicl_seqno = Column(Integer)
gcdf = Column(
String,
ForeignKey('fdrdosed.gcdf'),
)
gcrt = Column(
String,
ForeignKey('fdrrouted.gcrt'),
)
str = Column(String)
gtc = Column(Integer)
tc = Column(Integer)
dcc = Column(Integer)
gcnseq_gi = Column(Integer)
gender = Column(Integer)
hic3_seqn = Column(Integer)
str60 = Column(String)
update_yn = Column(Boolean)
|
Add foreign keys for form and route in Fdb Gcn Seq in carepoint cph
|
Add foreign keys for form and route in Fdb Gcn Seq in carepoint cph
|
Python
|
mit
|
laslabs/Python-Carepoint
|
628d777e3751ec8e38f1b98f558799b28cda1569
|
src/services/TemperatureMonitor/TemperatureMonitor.py
|
src/services/TemperatureMonitor/TemperatureMonitor.py
|
import sys
from src.TemperatureMonitor import TemperatureMonitor
from src.temperature import TemperatureSensor
SENSOR_ADDRESS = 0x48
tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS), observers=sys.argv[1:])
tempMonitor.run()
|
import sys
from src.TemperatureMonitor import TemperatureMonitor
from src.temperature import TemperatureSensor
import argparse
parser = argparse.ArgumentParser(description='Broadcast temperatures to URLs')
parser.add_argument('observers', metavar='N', type=str, nargs='+',
help='the observers', default=())
parser.add_argument("-i", "--interval", type=int, help="the period between testing the temperature", default=60)
parser.add_argument("-s", "--smoothing", type=int, help="the number of samples to average when broadcasting a result", default=60)
args = parser.parse_args()
SENSOR_ADDRESS = 0x48
tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS),
interval=args.interval,
smoothing=args.smoothing,
observers=args.observers)
tempMonitor.run()
|
Allow Control of Interval and Observers
|
Allow Control of Interval and Observers
|
Python
|
mit
|
IAPark/PITherm
|
f8c9cb7d353680f48146d0b37e01ac6761ad7904
|
example/bayesian-dark-knowledge/random_num_generator_bug.py
|
example/bayesian-dark-knowledge/random_num_generator_bug.py
|
import mxnet as mx
import mxnet.ndarray as nd
for i in range(1000):
noise = mx.random.normal(0,10,(i,i),ctx=mx.gpu())
|
import mxnet as mx
mx.random.normal(0,10,(3,3), ctx=mx.gpu()).asnumpy()
|
Update Bug for Normal Genrator
|
Update Bug for Normal Genrator
|
Python
|
apache-2.0
|
sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet
|
b77956a993f7f703626dbc9fc85003d6840b24fe
|
partner_compassion/models/partner_bank_compassion.py
|
partner_compassion/models/partner_bank_compassion.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Steve Ferry
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, _
# pylint: disable=C8107
class ResPartnerBank(models.Model):
""" This class upgrade the partners.bank to match Compassion needs.
"""
_inherit = 'res.partner.bank'
@api.model
def create(self, data):
"""Override function to notify creation in a message
"""
result = super(ResPartnerBank, self).create(data)
part = result.partner_id
part.message_post(_("<b>Account number: </b>" + result.acc_number),
_("New account created"), 'comment')
return result
@api.multi
def unlink(self):
"""Override function to notify delte in a message
"""
for account in self:
part = account.partner_id
part.message_post(_("<b>Account number: </b>" +
account.acc_number),
_("Account deleted"), 'comment')
result = super(ResPartnerBank, self).unlink()
return result
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Steve Ferry
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, _
# pylint: disable=C8107
class ResPartnerBank(models.Model):
""" This class upgrade the partners.bank to match Compassion needs.
"""
_inherit = 'res.partner.bank'
@api.model
def create(self, data):
"""Override function to notify creation in a message
"""
result = super(ResPartnerBank, self).create(data)
part = result.partner_id
if part:
part.message_post(_("<b>Account number: </b>" + result.acc_number),
_("New account created"), 'comment')
return result
@api.multi
def unlink(self):
"""Override function to notify delte in a message
"""
for account in self:
part = account.partner_id
part.message_post(_("<b>Account number: </b>" +
account.acc_number),
_("Account deleted"), 'comment')
result = super(ResPartnerBank, self).unlink()
return result
|
FIX only post message if a partner is existent
|
FIX only post message if a partner is existent
|
Python
|
agpl-3.0
|
CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland
|
031d31c65b66dafe15470aeefe6b2a3240bb4969
|
pysis/__init__.py
|
pysis/__init__.py
|
# -*- coding: utf-8 -*-
import os
ISIS_ROOT = os.environ.get('ISISROOT')
if ISIS_ROOT is None:
print 'Warning! ISISROOT is not defined. Bitch.'
(ISIS_VERSION, ISIS_VERSION_MAJOR, ISIS_VERSION_MINOR,
ISIS_VERSION_PATCH, ISIS_VERSION_BUILD) = 5 * (None,)
else:
with open(filename) as _f:
ISIS_VERSION = _f.readline().strip()
(ISIS_VERSION_MAJOR, ISIS_VERSION_MINOR,
ISIS_VERSION_PATCH, ISIS_VERSION_BUILD) = map(int, version.split('.'))
def require_isis_version(major, minor=None, patch=None, build=None):
err_msg = 'Version %s.%s.%s.%s of isis required (%s found).'
err = Exception(err_msg % (major, minor, patch, build, ISIS_VERSION))
if major != ISIS_VERSION_MAJOR:
raise err
if minor is not None and minor != ISIS_VERSION_MINOR:
raise err
if patch is not None and patch != ISIS_VERSION_PATCH:
raise err
if build is not None and build != ISIS_VERSION_BUILD:
raise err
|
# -*- coding: utf-8 -*-
import os, sys
ISIS_ROOT = os.environ.get('ISISROOT')
if ISIS_ROOT is None:
sys.stderr.write('Warning! ISISROOT is not defined. Bitch.\n')
(ISIS_VERSION, ISIS_VERSION_MAJOR, ISIS_VERSION_MINOR,
ISIS_VERSION_PATCH, ISIS_VERSION_BUILD) = 5 * (None,)
else:
with open(filename) as _f:
ISIS_VERSION = _f.readline().strip()
(ISIS_VERSION_MAJOR, ISIS_VERSION_MINOR,
ISIS_VERSION_PATCH, ISIS_VERSION_BUILD) = map(int, version.split('.'))
def require_isis_version(major, minor=None, patch=None, build=None):
err_msg = 'Version %s.%s.%s.%s of isis required (%s found).'
err = Exception(err_msg % (major, minor, patch, build, ISIS_VERSION))
if major != ISIS_VERSION_MAJOR:
raise err
if minor is not None and minor != ISIS_VERSION_MINOR:
raise err
if patch is not None and patch != ISIS_VERSION_PATCH:
raise err
if build is not None and build != ISIS_VERSION_BUILD:
raise err
|
Write warning to std err instead.
|
Write warning to std err instead.
|
Python
|
bsd-3-clause
|
wtolson/pysis,wtolson/pysis,michaelaye/Pysis,michaelaye/Pysis
|
040324578680a26f3816aef6f05a731be54a377d
|
pyroSAR/tests/test_dev_config.py
|
pyroSAR/tests/test_dev_config.py
|
import pytest
from pyroSAR._dev_config import Storage, LOOKUP, URL, STORAGE
class TestStorage:
def test_insert(self):
storage = Storage(a=1, b=2)
assert storage.a == 1
assert storage.b == 2
class TestLookup:
def test_suffix(self):
assert LOOKUP.snap.suffix[0]['Apply-Orbit-File'] == 'Orb'
assert LOOKUP.snap.suffix[0]['Terrain-Correction'] == 'TC'
def test_attributes(self):
assert LOOKUP.attributes['sensor'] == 'TEXT'
assert LOOKUP.attributes['vh'] == 'INTEGER'
class TestSTORAGE:
def test_STORAGE_URL(self):
assert STORAGE.URL.dem.ace == URL.dem.ace
assert STORAGE.URL.orbit.doris == URL.orbit.doris
assert STORAGE.URL.auxcal.ers == URL.auxcal.ers
def test_STORAGE_LOOKUP(self):
assert LOOKUP.snap.suffix[0]['Apply-Orbit-File'] == STORAGE.LOOKUP.snap.suffix[0]['Apply-Orbit-File']
assert LOOKUP.snap.suffix[0]['Terrain-Correction'] == STORAGE.LOOKUP.snap.suffix[0]['Terrain-Correction'] == 'TC'
assert LOOKUP.attributes['sensor'] == STORAGE.LOOKUP.attributes['sensor']
assert LOOKUP.attributes['vh'] == STORAGE.LOOKUP.attributes['vh']
|
import pytest
from pyroSAR._dev_config import Storage, LOOKUP, URL, STORAGE
class TestStorage:
def test_insert(self):
storage = Storage(a=1, b=2)
assert storage.a == 1
assert storage.b == 2
class TestLookup:
def test_suffix(self):
assert LOOKUP.snap.suffix['Apply-Orbit-File'] == 'Orb'
assert LOOKUP.snap.suffix['Terrain-Correction'] == 'TC'
def test_attributes(self):
assert LOOKUP.attributes['sensor'] == 'TEXT'
assert LOOKUP.attributes['vh'] == 'INTEGER'
class TestSTORAGE:
def test_STORAGE_URL(self):
assert STORAGE.URL.dem.ace == URL.dem.ace
assert STORAGE.URL.orbit.doris == URL.orbit.doris
assert STORAGE.URL.auxcal.ers == URL.auxcal.ers
def test_STORAGE_LOOKUP(self):
assert LOOKUP.snap.suffix['Apply-Orbit-File'] == STORAGE.LOOKUP.snap.suffix['Apply-Orbit-File']
assert LOOKUP.snap.suffix['Terrain-Correction'] == STORAGE.LOOKUP.snap.suffix['Terrain-Correction'] == 'TC'
assert LOOKUP.attributes['sensor'] == STORAGE.LOOKUP.attributes['sensor']
assert LOOKUP.attributes['vh'] == STORAGE.LOOKUP.attributes['vh']
|
Update due to changes in LOOKUP.
|
Update due to changes in LOOKUP.
|
Python
|
mit
|
johntruckenbrodt/pyroSAR,johntruckenbrodt/pyroSAR
|
63afb46b7a39881c3a3655af645d5414bdd730ea
|
edumed/forum.py
|
edumed/forum.py
|
from pybb.permissions import DefaultPermissionHandler
class ForumPermissionHandler(DefaultPermissionHandler):
def may_post_as_admin(self, user):
""" return True if `user` may post as admin """
return False
|
from pybb.permissions import DefaultPermissionHandler
class ForumPermissionHandler(DefaultPermissionHandler):
def may_post_as_admin(self, user):
""" return True if `user` may post as admin """
return False
def may_create_topic(self, user, forum):
""" return True if `user` is allowed to create a new topic in `forum` """
return user.is_authenticated()
def may_create_post(self, user, topic):
""" return True if `user` is allowed to create a new post in `topic` """
if topic.forum.hidden and (not user.is_staff):
# if topic is hidden, only staff may post
return False
if topic.closed and (not user.is_staff):
# if topic is closed, only staff may post
return False
return user.is_authenticated()
|
Allow for authenticated non super users to create posts and topics
|
Allow for authenticated non super users to create posts and topics
|
Python
|
agpl-3.0
|
fnp/edumed,fnp/edumed,fnp/edumed
|
ff63299cde0fe34fe3bfdac16593e1a0a989bec4
|
Hydv2/ScreenTools.py
|
Hydv2/ScreenTools.py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'Olivier Larrieu'
from gtk import gdk
class ScreenProperties(object):
"""
Usefull to get basic screen informations
"""
@classmethod
def screen_dimension(cls):
"""
Return a dic with the screen height and screen width
"""
width = gdk.screen_width()
height = gdk.screen_height()
return {'width': width, 'height': height}
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'Olivier Larrieu'
class ScreenProperties(object):
"""
Usefull to get basic screen informations
"""
@classmethod
def screen_dimension(cls):
"""
Return a dic with the screen height and screen width
"""
from Xlib import display
display = display.Display()
root = display.screen().root
desktop = root.get_geometry()
return {'width': desktop.width, 'height': desktop.height}
|
Use Xlib instead of gtk to get screen width and screen height This limit dependances
|
Use Xlib instead of gtk to get screen width and screen height
This limit dependances
|
Python
|
artistic-2.0
|
OlivierLarrieu/HYDV2_EFL,OlivierLarrieu/HYDV2_EFL,OlivierLarrieu/HYDV2_EFL,OlivierLarrieu/HYDV2_EFL
|
7ce51c694e44e8503acd86de0f90dbc4078f4b82
|
user_deletion/managers.py
|
user_deletion/managers.py
|
from dateutil.relativedelta import relativedelta
from django.apps import apps
from django.utils import timezone
user_deletion_config = apps.get_app_config('user_deletion')
class UserDeletionManagerMixin:
def users_to_notify(self):
"""Finds all users who have been inactive and not yet notified."""
inactive_boundary = timezone.now() - relativedelta(
months=user_deletion_config.MONTH_NOTIFICATION,
)
return self.filter(last_login__lte=inactive_boundary, notified=False)
def users_to_delete(self):
"""Finds all users who have been inactive and were notified."""
one_year = timezone.now() - relativedelta(
months=user_deletion_config.MONTH_DELETION,
)
return self.filter(last_login__lte=one_year, notified=True)
|
from dateutil.relativedelta import relativedelta
from django.apps import apps
from django.utils import timezone
user_deletion_config = apps.get_app_config('user_deletion')
class UserDeletionManagerMixin:
def users_to_notify(self):
"""Finds all users who have been inactive and not yet notified."""
threshold = timezone.now() - relativedelta(
months=user_deletion_config.MONTH_NOTIFICATION,
)
return self.filter(last_login__lte=threshold, notified=False)
def users_to_delete(self):
"""Finds all users who have been inactive and were notified."""
threshold = timezone.now() - relativedelta(
months=user_deletion_config.MONTH_DELETION,
)
return self.filter(last_login__lte=threshold, notified=True)
|
Use threshold for time boundary in manager
|
Use threshold for time boundary in manager
|
Python
|
bsd-2-clause
|
incuna/django-user-deletion
|
58f8f4881a9e97206ddf49ea6cfb7f48dd34bfb3
|
example/urls.py
|
example/urls.py
|
from django.conf.urls import url
from django.views.generic import TemplateView
urlpatterns = [
url(r"^$", TemplateView.as_view(template_name="homepage.html")),
url(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
]
|
from django.urls import path, re_path
from django.views.generic import TemplateView
urlpatterns = [
path('', TemplateView.as_view(template_name="homepage.html")),
re_path(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
]
|
Upgrade code to Python 3.6+, Django 2.2 and remove deprecations
|
Upgrade code to Python 3.6+, Django 2.2 and remove deprecations
|
Python
|
bsd-3-clause
|
bashu/django-fancybox,bashu/django-fancybox
|
9912974a283912acd31fa4ee85de2fb44c2cf862
|
nn/model.py
|
nn/model.py
|
import abc
import tensorflow as tf
class Model(metaclass=abc.ABCMeta):
@abc.astractmethod
def __init__(self, **hyperparameters_and_initial_parameters):
return NotImplemented
@abc.astractmethod
def train(self, *input_tensors) -> tf.Tensor: # scalar loss
return NotImplemented
@abc.astractmethod
def test(self, *input_tensors) -> tf.Tensor: # labels
return NotImplemented
|
import abc
import tensorflow as tf
class Model(metaclass=abc.ABCMeta):
@abc.astractmethod
def __init__(self, **hyperparameters_and_initial_parameters):
return NotImplemented
@abc.astractmethod
def train(self, *input_tensors) -> tf.Operation: # training operation
return NotImplemented
@abc.astractmethod
def test(self, *input_tensors) -> tf.Tensor: # labels
return NotImplemented
|
Fix type annotation for Model.train()
|
Fix type annotation for Model.train()
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
33775cd9e740ac70e9213c37825077516e683e55
|
pyatv/support/device_info.py
|
pyatv/support/device_info.py
|
"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
|
"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
"17L256": "13.4",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
|
Add tvOS 13.4 build number
|
mrp: Add tvOS 13.4 build number
|
Python
|
mit
|
postlund/pyatv,postlund/pyatv
|
694a85c71c315ccdb3e2f2946f86ce95936ee684
|
sahara_dashboard/api/__init__.py
|
sahara_dashboard/api/__init__.py
|
from sahara_dashboard.api import sahara
__all__ = [
"sahara"
]
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara_dashboard.api import sahara
__all__ = [
"sahara"
]
|
Add licensing info in source file.
|
Add licensing info in source file.
[H102 H103] Source code should be licensed under the Apache 2.0 license.
All source files should have the licensing header.
Change-Id: I4f9ead44b5efa3616086f5a62a2e0e68854baf44
|
Python
|
apache-2.0
|
openstack/sahara-dashboard,openstack/sahara-dashboard,openstack/sahara-dashboard,openstack/sahara-dashboard
|
3016872091618c78f60e17338f5581856a17f7af
|
endpoints/tests/test_utils.py
|
endpoints/tests/test_utils.py
|
from utils.testcase import EndpointTestCase
from rest_framework import status
from rest_framework.test import APIClient
from django.utils.translation import ugettext_lazy as _
import sure
class TestUtils(EndpointTestCase):
def test_fail_authentication(self):
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer ' + 'wrongToken')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token.')})
def test_bad_formatted_authentication(self):
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. No credentials provided.')})
client.credentials(HTTP_AUTHORIZATION='Bearer token1 token2')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. Token string should not contain spaces.')})
client.credentials(HTTP_AUTHORIZATION='token')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. No credentials provided.')})
|
from utils.testcase import EndpointTestCase
from rest_framework import status
from rest_framework.test import APIClient
from django.utils.translation import ugettext_lazy as _
import sure
class TestUtils(EndpointTestCase):
def test_fail_authentication(self):
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer ' + 'wrongToken')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token.')})
def test_bad_formatted_authentication(self):
client = APIClient()
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. No credentials provided.')})
client.credentials(HTTP_AUTHORIZATION='Bearer')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. No credentials provided.')})
client.credentials(HTTP_AUTHORIZATION='Bearer token1 token2')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. Token string should not contain spaces.')})
client.credentials(HTTP_AUTHORIZATION='token')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. No credentials provided.')})
|
Add test for no HTTP_AUTHORIZATION header at all
|
Add test for no HTTP_AUTHORIZATION header at all
|
Python
|
mit
|
Amoki/Amoki-Music,Amoki/Amoki-Music,Amoki/Amoki-Music
|
5e2943b8e17ee753ddfafd1420c9e8155c496aba
|
example/tests/test_parsers.py
|
example/tests/test_parsers.py
|
import json
from django.test import TestCase
from io import BytesIO
from rest_framework_json_api.parsers import JSONParser
class TestJSONParser(TestCase):
def setUp(self):
class MockRequest(object):
def __init__(self):
self.method = 'GET'
request = MockRequest()
self.parser_context = {'request': request, 'kwargs': {}, 'view': 'BlogViewSet'}
data = {
'data': {
'id': 123,
'type': 'Blog'
},
'meta': {
'random_key': 'random_value'
}
}
self.string = json.dumps(data)
def test_parse_include_metadata(self):
parser = JSONParser()
stream = BytesIO(self.string.encode('utf-8'))
data = parser.parse(stream, None, self.parser_context)
self.assertEqual(data['_meta'], {'random_key': 'random_value'})
|
import json
from io import BytesIO
from django.test import TestCase
from rest_framework.exceptions import ParseError
from rest_framework_json_api.parsers import JSONParser
class TestJSONParser(TestCase):
def setUp(self):
class MockRequest(object):
def __init__(self):
self.method = 'GET'
request = MockRequest()
self.parser_context = {'request': request, 'kwargs': {}, 'view': 'BlogViewSet'}
data = {
'data': {
'id': 123,
'type': 'Blog'
},
'meta': {
'random_key': 'random_value'
}
}
self.string = json.dumps(data)
def test_parse_include_metadata(self):
parser = JSONParser()
stream = BytesIO(self.string.encode('utf-8'))
data = parser.parse(stream, None, self.parser_context)
self.assertEqual(data['_meta'], {'random_key': 'random_value'})
def test_parse_include_metadata(self):
parser = JSONParser()
string = json.dumps([])
stream = BytesIO(string.encode('utf-8'))
with self.assertRaises(ParseError):
parser.parse(stream, None, self.parser_context)
|
Test case for parsing invalid data.
|
Test case for parsing invalid data.
|
Python
|
bsd-2-clause
|
django-json-api/rest_framework_ember,Instawork/django-rest-framework-json-api,leo-naeka/django-rest-framework-json-api,abdulhaq-e/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api
|
d2b4ec50442a00df85ef525cc82aca971b72eb86
|
erpnext/patches/v11_0/rename_field_max_days_allowed.py
|
erpnext/patches/v11_0/rename_field_max_days_allowed.py
|
import frappe
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "leave_type")
frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""")
rename_field("Leave Type", "max_days_allowed", "max_continuous_days_allowed")
|
import frappe
def execute():
frappe.db.sql("""
UPDATE `tabLeave Type`
SET max_days_allowed = '0'
WHERE trim(coalesce(max_days_allowed, '')) = ''
""")
frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""")
|
Set null values to '0' before changing column type
|
[fix] Set null values to '0' before changing column type
|
Python
|
agpl-3.0
|
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
|
d3a2e344caa34f763f7e46710db5b9ddefe73c55
|
doc/mkapidoc.py
|
doc/mkapidoc.py
|
#!/usr/bin/env python
# Generates the *public* API documentation.
# Remember to hide your private parts, people!
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.AbstractMethod',
'--exclude Exscript.AccountManager',
'--exclude Exscript.HostAction',
'--exclude Exscript.Log',
'--exclude Exscript.Logfile',
'--exclude Exscript.QueueLogger',
'--exclude Exscript.QueueListener',
'--exclude Exscript.util.otp',
'--exclude Exscript.interpreter',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.stdlib',
'--exclude Exscript.workqueue',
'--exclude Exscript.version',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
|
#!/usr/bin/env python
# Generates the *public* API documentation.
# Remember to hide your private parts, people!
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.AbstractMethod',
'--exclude Exscript.AccountManager',
'--exclude Exscript.HostAction',
'--exclude Exscript.Log',
'--exclude Exscript.Logfile',
'--exclude Exscript.QueueLogger',
'--exclude Exscript.QueueListener',
'--exclude Exscript.util.otp',
'--exclude Exscript.interpreter',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.StreamAnalyzer',
'--exclude Exscript.protocols.OsGuesser',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.stdlib',
'--exclude Exscript.workqueue',
'--exclude Exscript.version',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
|
Hide StreamAnalyzer and OsGuesser from the API docs.
|
Hide StreamAnalyzer and OsGuesser from the API docs.
|
Python
|
mit
|
maximumG/exscript,knipknap/exscript,knipknap/exscript,maximumG/exscript
|
91229ab93609f66af866f7ef87b576a84546aeab
|
api/base/parsers.py
|
api/base/parsers.py
|
from rest_framework.parsers import JSONParser
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
|
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
if not isinstance(result, dict):
raise ParseError()
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
object_type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': object_type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
|
Raise Parse Error if request data is not a dictionary
|
Raise Parse Error if request data is not a dictionary
|
Python
|
apache-2.0
|
mluo613/osf.io,adlius/osf.io,alexschiller/osf.io,samanehsan/osf.io,doublebits/osf.io,Ghalko/osf.io,rdhyee/osf.io,GageGaskins/osf.io,erinspace/osf.io,samanehsan/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,doublebits/osf.io,caseyrygt/osf.io,adlius/osf.io,caneruguz/osf.io,kwierman/osf.io,erinspace/osf.io,icereval/osf.io,cslzchen/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,mfraezz/osf.io,zamattiac/osf.io,zachjanicki/osf.io,asanfilippo7/osf.io,chennan47/osf.io,KAsante95/osf.io,caneruguz/osf.io,abought/osf.io,RomanZWang/osf.io,leb2dg/osf.io,acshi/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,brianjgeiger/osf.io,caseyrygt/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,laurenrevere/osf.io,rdhyee/osf.io,mluke93/osf.io,billyhunt/osf.io,mattclark/osf.io,TomBaxter/osf.io,KAsante95/osf.io,baylee-d/osf.io,asanfilippo7/osf.io,mluo613/osf.io,caseyrollins/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,Ghalko/osf.io,chrisseto/osf.io,doublebits/osf.io,kwierman/osf.io,mluo613/osf.io,mluke93/osf.io,amyshi188/osf.io,crcresearch/osf.io,zamattiac/osf.io,jnayak1/osf.io,aaxelb/osf.io,crcresearch/osf.io,cwisecarver/osf.io,emetsger/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,zamattiac/osf.io,wearpants/osf.io,doublebits/osf.io,samchrisinger/osf.io,kch8qx/osf.io,mattclark/osf.io,aaxelb/osf.io,ticklemepierce/osf.io,rdhyee/osf.io,mfraezz/osf.io,zachjanicki/osf.io,mfraezz/osf.io,adlius/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,KAsante95/osf.io,njantrania/osf.io,emetsger/osf.io,emetsger/osf.io,saradbowman/osf.io,hmoco/osf.io,caseyrollins/osf.io,binoculars/osf.io,abought/osf.io,ticklemepierce/osf.io,monikagrabowska/osf.io,brandonPurvis/osf.io,pattisdr/osf.io,chrisseto/osf.io,samchrisinger/osf.io,cosenal/osf.io,KAsante95/osf.io,acshi/osf.io,petermalcolm/osf.io,Nesiehr/osf.io,crcresearch/osf.io,chennan47/osf.io,acshi/osf.io,pattisdr/osf.io,RomanZWang/osf.io,alexschiller/osf.io,saradbowman/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,alexschiller/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,cosenal/osf.io,danielneis/osf.io,billyhunt/osf.io,haoyuchen1992/osf.io,danielneis/osf.io,ZobairAlijan/osf.io,cwisecarver/osf.io,alexschiller/osf.io,RomanZWang/osf.io,jnayak1/osf.io,caneruguz/osf.io,acshi/osf.io,caseyrollins/osf.io,mattclark/osf.io,hmoco/osf.io,billyhunt/osf.io,samchrisinger/osf.io,jnayak1/osf.io,kch8qx/osf.io,cosenal/osf.io,rdhyee/osf.io,SSJohns/osf.io,chrisseto/osf.io,pattisdr/osf.io,amyshi188/osf.io,KAsante95/osf.io,billyhunt/osf.io,zachjanicki/osf.io,petermalcolm/osf.io,asanfilippo7/osf.io,GageGaskins/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,GageGaskins/osf.io,njantrania/osf.io,TomHeatwole/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,kwierman/osf.io,baylee-d/osf.io,ticklemepierce/osf.io,hmoco/osf.io,TomHeatwole/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,kwierman/osf.io,DanielSBrown/osf.io,SSJohns/osf.io,sloria/osf.io,Ghalko/osf.io,kch8qx/osf.io,danielneis/osf.io,laurenrevere/osf.io,zamattiac/osf.io,binoculars/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,TomHeatwole/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,felliott/osf.io,abought/osf.io,sloria/osf.io,brandonPurvis/osf.io,petermalcolm/osf.io,abought/osf.io,felliott/osf.io,TomBaxter/osf.io,chrisseto/osf.io,caseyrygt/osf.io,billyhunt/osf.io,GageGaskins/osf.io,petermalcolm/osf.io,acshi/osf.io,HalcyonChimera/osf.io,cosenal/osf.io,emetsger/osf.io,monikagrabowska/osf.io,erinspace/osf.io,Johnetordoff/osf.io,wearpants/osf.io,icereval/osf.io,hmoco/osf.io,njantrania/osf.io,mluo613/osf.io,TomBaxter/osf.io,ZobairAlijan/osf.io,jnayak1/osf.io,wearpants/osf.io,aaxelb/osf.io,haoyuchen1992/osf.io,Johnetordoff/osf.io,danielneis/osf.io,ticklemepierce/osf.io,TomHeatwole/osf.io,njantrania/osf.io,adlius/osf.io,felliott/osf.io,amyshi188/osf.io,leb2dg/osf.io,sloria/osf.io,brandonPurvis/osf.io,mluo613/osf.io,samanehsan/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,ZobairAlijan/osf.io,DanielSBrown/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,Nesiehr/osf.io,SSJohns/osf.io,mluke93/osf.io,mluke93/osf.io,leb2dg/osf.io,alexschiller/osf.io,binoculars/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,aaxelb/osf.io,samanehsan/osf.io,CenterForOpenScience/osf.io,asanfilippo7/osf.io
|
bdbff5ea5548067713951a85b05f3818e537c8d4
|
streamparse/bootstrap/project/src/bolts/wordcount.py
|
streamparse/bootstrap/project/src/bolts/wordcount.py
|
from __future__ import absolute_import, print_function, unicode_literals
from collections import Counter
from streamparse.bolt import Bolt
class WordCounter(Bolt):
def initialize(self, conf, ctx):
self.counts = Counter()
def process(self, tup):
word = tup.values[0]
self.counts[word] += 1
self.emit([word, self.counts[word]])
self.log('%s: %d' % (word, self.counts[word]))
|
from __future__ import absolute_import, print_function, unicode_literals
from collections import Counter
from streamparse.bolt import Bolt
class WordCounter(Bolt):
AUTO_ACK = True # automatically acknowledge tuples after process()
AUTO_ANCHOR = True # automatically anchor tuples to current tuple
AUTO_FAIL = True # automatically fail tuples when exceptions occur
def initialize(self, conf, ctx):
self.counts = Counter()
def process(self, tup):
word = tup.values[0]
self.counts[word] += 1
self.emit([word, self.counts[word]])
self.log('%s: %d' % (word, self.counts[word]))
|
Update quickstart project to use AUTO_*
|
Update quickstart project to use AUTO_*
|
Python
|
apache-2.0
|
crohling/streamparse,petchat/streamparse,petchat/streamparse,eric7j/streamparse,msmakhlouf/streamparse,codywilbourn/streamparse,Parsely/streamparse,codywilbourn/streamparse,msmakhlouf/streamparse,scrapinghub/streamparse,Parsely/streamparse,petchat/streamparse,phanib4u/streamparse,petchat/streamparse,scrapinghub/streamparse,msmakhlouf/streamparse,scrapinghub/streamparse,scrapinghub/streamparse,crohling/streamparse,msmakhlouf/streamparse,phanib4u/streamparse,petchat/streamparse,msmakhlouf/streamparse,hodgesds/streamparse,hodgesds/streamparse,scrapinghub/streamparse,eric7j/streamparse
|
95988fc4e5d7b5b5fa3235000ad9680c168c485c
|
aiospamc/__init__.py
|
aiospamc/__init__.py
|
#!/usr/bin/env python3
'''aiospamc package.
An asyncio-based library to communicate with SpamAssassin's SPAMD service.'''
from aiospamc.client import Client
__all__ = ('Client',
'MessageClassOption',
'ActionOption')
__author__ = 'Michael Caley'
__copyright__ = 'Copyright 2016, 2017 Michael Caley'
__license__ = 'MIT'
__version__ = '0.3.0'
__email__ = 'mjcaley@darkarctic.com'
|
#!/usr/bin/env python3
'''aiospamc package.
An asyncio-based library to communicate with SpamAssassin's SPAMD service.'''
from aiospamc.client import Client
from aiospamc.options import ActionOption, MessageClassOption
__all__ = ('Client',
'MessageClassOption',
'ActionOption')
__author__ = 'Michael Caley'
__copyright__ = 'Copyright 2016, 2017 Michael Caley'
__license__ = 'MIT'
__version__ = '0.3.0'
__email__ = 'mjcaley@darkarctic.com'
|
Add import ActionOption and MessageClassOption to __all__
|
Add import ActionOption and MessageClassOption to __all__
|
Python
|
mit
|
mjcaley/aiospamc
|
f1793ed8a494701271b4a4baff8616e9c6202e80
|
message_view.py
|
message_view.py
|
import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
panel_view = self.window.create_output_panel(PANEL_NAME)
panel_view.set_read_only(False)
panel_view.run_command('append', {'characters': msg})
panel_view.set_read_only(True)
panel_view.show(0)
self.window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
|
import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel_view = window.find_output_panel(PANEL_NAME)
else:
panel_view = window.create_output_panel(PANEL_NAME)
scroll_to = panel_view.size()
msg = msg.rstrip() + '\n\n\n'
panel_view.set_read_only(False)
panel_view.run_command('append', {'characters': msg})
panel_view.set_read_only(True)
panel_view.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
|
Append messages if message view is currently open
|
Append messages if message view is currently open
|
Python
|
mit
|
SublimeLinter/SublimeLinter3,SublimeLinter/SublimeLinter3
|
4803578ebb306e2e142b629d98cb82899c0b0270
|
authorize/__init__.py
|
authorize/__init__.py
|
import xml.etree.ElementTree as E
from authorize.configuration import Configuration
from authorize.address import Address
from authorize.bank_account import BankAccount
from authorize.batch import Batch
from authorize.credit_card import CreditCard
from authorize.customer import Customer
from authorize.environment import Environment
from authorize.exceptions import AuthorizeError
from authorize.exceptions import AuthorizeConnectionError
from authorize.exceptions import AuthorizeResponseError
from authorize.exceptions import AuthorizeInvalidError
from authorize.recurring import Recurring
from authorize.transaction import Transaction
# Monkeypatch the ElementTree module so that we can use CDATA element types
E._original_serialize_xml = E._serialize_xml
def _serialize_xml(write, elem, *args):
if elem.tag == '![CDATA[':
write('<![CDATA[%s]]>' % elem.text)
return
return E._original_serialize_xml(write, elem, *args)
E._serialize_xml = E._serialize['xml'] = _serialize_xml
|
import xml.etree.ElementTree as E
from authorize.configuration import Configuration
from authorize.address import Address
from authorize.bank_account import BankAccount
from authorize.batch import Batch
from authorize.credit_card import CreditCard
from authorize.customer import Customer
from authorize.environment import Environment
from authorize.exceptions import AuthorizeError
from authorize.exceptions import AuthorizeConnectionError
from authorize.exceptions import AuthorizeResponseError
from authorize.exceptions import AuthorizeInvalidError
from authorize.recurring import Recurring
from authorize.transaction import Transaction
# Monkeypatch the ElementTree module so that we can use CDATA element types
E._original_serialize_xml = E._serialize_xml
def _serialize_xml(write, elem, *args, **kwargs):
if elem.tag == '![CDATA[':
write('<![CDATA[%s]]>' % elem.text)
return
return E._original_serialize_xml(write, elem, *args, **kwargs)
E._serialize_xml = E._serialize['xml'] = _serialize_xml
|
Fix monkey patching to pass kwargs required by Python 3.4
|
Fix monkey patching to pass kwargs required by Python 3.4
|
Python
|
mit
|
aryeh/py-authorize,uglycitrus/py-authorize,vcatalano/py-authorize
|
e8dd4ca8bd51b84d5d7d5a6a1c4144475e066bf1
|
zabbix.py
|
zabbix.py
|
import requests
class Api(object):
def __init__(self, server='http://localhost/zabbix'):
self.session = requests.Session()
self.session.headers.update({
'Content-Type': 'application/json'
})
self.url = server + '/api_jsonrpc.php'
self.auth = ''
self.id = 0
|
import requests
class ZabbixError(Exception):
pass
class Api(object):
def __init__(self, server='http://localhost/zabbix'):
self.session = requests.Session()
self.session.headers.update({
'Content-Type': 'application/json'
})
self.url = server + '/api_jsonrpc.php'
self.auth = ''
self.id = 0
def do_request(self, method, params=None):
json_payload = {
'jsonrpc': '2.0',
'method': method,
'params': params or {},
'auth': self.auth,
'id': self.id,
}
self.id += 1
response = self.session.post(self.url, data = json.dumps(json_payload))
if response.status_code != 200:
raise ZabbixError("HTTP ERROR %S: %S" % (response.status, response.reason))
if response.text == '':
raise ZabbixError("Received empty response")
return response.json()
|
Create do_requestion function to be used by other methods.
|
Create do_requestion function to be used by other methods.
|
Python
|
apache-2.0
|
supasate/PythonZabbixApi
|
1b20116059b21905688b7fd6153ecd7c42bdc4a1
|
parseSAMOutput.py
|
parseSAMOutput.py
|
#!python
# Load libraries
import sys, getopt
import pysam
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseSAMOutput
parseSAMOutput [OPTIONS] SAMFILE
#
DESCRIPTION
parseSAMOutput.py
Parses SAM alignments into paired-end read summaries.
Prints results to stdout.
OPTIONS
--rmdup Remove duplicate reads (reduces PCR effects)
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Set defaults
rmdup = False
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help", "rmdup"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
elif opt == "--rmdup":
rmdup = True
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentPath = args[0]
else:
print >> sys.stderr, "Error -- need path to SAM file"
sys.exit(1)
libPipeline.processSAMOutput(alignmentPath, sys.stdout)
|
#!python
# Load libraries
import sys, getopt
import pysam
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseSAMOutput
parseSAMOutput [OPTIONS] SAMFILE
#
DESCRIPTION
parseSAMOutput.py
Parses SAM alignments into paired-end read summaries.
Prints results to stdout.
OPTIONS
--rmdup Remove duplicate reads (reduces PCR effects)
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Set defaults
rmdup = False
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help", "rmdup"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
elif opt == "--rmdup":
rmdup = True
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentPath = args[0]
else:
print >> sys.stderr, "Error -- need path to SAM file"
sys.exit(1)
libPipeline.processSAMOutput(alignmentPath, sys.stdout, rmdup=rmdup)
|
Fix rmdup handling in wrapper script.
|
Fix rmdup handling in wrapper script.
|
Python
|
apache-2.0
|
awblocker/paired-end-pipeline,awblocker/paired-end-pipeline
|
613a6f7947b46b9a6c4c679b638d1c4d946b644d
|
neutron/conf/policies/network_ip_availability.py
|
neutron/conf/policies/network_ip_availability.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from neutron.conf.policies import base
rules = [
policy.DocumentedRuleDefault(
'get_network_ip_availability',
base.RULE_ADMIN_ONLY,
'Get network IP availability',
[
{
'method': 'GET',
'path': '/network-ip-availabilities',
},
{
'method': 'GET',
'path': '/network-ip-availabilities/{network_id}',
},
]
),
]
def list_rules():
return rules
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import versionutils
from oslo_policy import policy
from neutron.conf.policies import base
DEPRECATED_REASON = """
The network IP availability API now support system scope and default roles.
"""
rules = [
policy.DocumentedRuleDefault(
name='get_network_ip_availability',
check_str=base.SYSTEM_READER,
scope_types=['system'],
description='Get network IP availability',
operations=[
{
'method': 'GET',
'path': '/network-ip-availabilities',
},
{
'method': 'GET',
'path': '/network-ip-availabilities/{network_id}',
},
],
deprecated_rule=policy.DeprecatedRule(
name='get_network_ip_availability',
check_str=base.RULE_ADMIN_ONLY),
deprecated_reason=DEPRECATED_REASON,
deprecated_since=versionutils.deprecated.WALLABY
),
]
def list_rules():
return rules
|
Implement secure RBAC for the network IP availability
|
Implement secure RBAC for the network IP availability
This commit updates the network IP availability policies to understand scope
checking and account for a read-only role. This is part of a broader series of
changes across OpenStack to provide a consistent RBAC experience and improve
security.
Change-Id: Ia965e549ec5d8b23e837b41c304004d8e57785e9
|
Python
|
apache-2.0
|
openstack/neutron,mahak/neutron,mahak/neutron,mahak/neutron,openstack/neutron,openstack/neutron
|
3f9623766b58c02b21abb967315bfe30a2b3974f
|
tests/TestTransaction.py
|
tests/TestTransaction.py
|
import Transaction
import unittest
class TestTransaction(unittest.TestCase) :
def setUp(self) :
self.test_object = Transaction.Transaction()
def tearDown(self) :
pass
def test_not_None(self) :
self.assertIsNotNone(self.test_object)
def test_can_assign_data(self) :
self.test_object['foo'] = 'bar'
self.assertIn('foo', self.test_object)
self.assertEqual(self.test_object['foo'], 'bar')
|
import Transaction
import unittest
class TestTransaction(unittest.TestCase) :
def setUp(self) :
self.test_object = Transaction.Transaction()
def tearDown(self) :
pass
def test_not_None(self) :
self.assertIsNotNone(self.test_object)
def test_can_assign_data(self) :
self.test_object['foo'] = 'bar'
self.assertIn('foo', self.test_object)
self.assertEqual(self.test_object['foo'], 'bar')
def test_different_transactions_are_not_each_other(self) :
emptyTransaction = Transaction.Transaction()
self.assertIsNot(self.test_object, emptyTransaction)
def test_different_transactions_with_same_data_are_equal(self) :
self.test_object['foo'] = 'bar'
newTransaction = Transaction.Transaction()
newTransaction['foo'] = 'bar'
self.assertEqual(self.test_object, newTransaction)
def test_transaction_is_itself(self) :
self.assertIs(self.test_object, self.test_object)
def test_different_transactions_with_same_data_are_equal(self) :
self.test_object['foo'] = 'bar'
newTransaction = Transaction.Transaction()
newTransaction['foo'] = 'baz'
self.assertNotEqual(self.test_object, newTransaction)
|
Add test to clarify equality/is behavior
|
Add test to clarify equality/is behavior
|
Python
|
apache-2.0
|
mattdeckard/wherewithal
|
2728f33a0c8477d75b3716ea39fe2e3c8db9378d
|
tests/test_OrderedSet.py
|
tests/test_OrderedSet.py
|
from twisted.trial import unittest
from better_od import OrderedSet
class TestOrderedSet(unittest.TestCase):
def setUp(self):
self.values = 'abcddefg'
self.s = OrderedSet(self.values)
def test_order(self):
expected = list(enumerate('abcdefg'))
self.assertEquals(list(enumerate(self.s)), expected)
def test_index(self):
self.assertEquals(self.s.key_index('c'), 2)
class TestOrderedSetMutations(unittest.TestCase):
def test_add_new_value(self):
prev = len(self.s)
self.s.add('z')
self.assertEqual(len(self.s), prev + 1)
|
from twisted.trial import unittest
from better_od import OrderedSet
class TestOrderedSet(unittest.TestCase):
def setUp(self):
self.s = OrderedSet('abcdefg')
def test_order(self):
expected = list(enumerate('abcdefg'))
self.assertEquals(list(enumerate(self.s)), expected)
def test_reorder(self):
new_order = 'gdcbaef'
self.s.reorder_keys(new_order)
self.assertEquals(list(enumerate(self.s)), list(enumerate(new_order)))
def test_index(self):
self.assertEquals(self.s.key_index('c'), 2)
class TestOrderedSetMutations(unittest.TestCase):
def test_add_new_value(self):
s = OrderedSet('abcdef')
prev = len(s)
s.add('z')
self.assertEqual(len(s), prev + 1)
def test_add_existing_value(self):
s = OrderedSet('abcdef')
prev = len(s)
s.add('a')
self.assertEqual(len(s), prev)
def test_discard_existing_value(self):
s = OrderedSet('abcdef')
self.assertIs(s.discard('a'), None)
def test_discard_nonexistent_value(self):
s = OrderedSet('abcdef')
self.assertIs(s.discard('z'), None)
|
Add OrderedSet mutation tests. Refactor tests.
|
Add OrderedSet mutation tests. Refactor tests.
Refactored the tests to rely less on setUp because I've got to test
mutating the objects.
|
Python
|
mit
|
JustusW/BetterOrderedDict,therealfakemoot/collections2
|
756860e325edd06eb98bed7c6fd5fa6c4a78243e
|
tests/test_migrations.py
|
tests/test_migrations.py
|
"""
Tests that migrations are not missing
"""
try:
from io import StringIO
except ImportError:
from StringIO import StringIO
import pytest
from django.core.management import call_command
def test_no_missing_migrations():
"""Check no model changes have been made since the last `./manage.py makemigrations`.
Pulled from mozilla/treeherder #dd53914, subject to MPL
"""
with pytest.raises(SystemExit) as e:
# Replace with `check_changes=True` once we're using a Django version that includes:
# https://code.djangoproject.com/ticket/25604
# https://github.com/django/django/pull/5453
call_command('makemigrations', interactive=False, dry_run=True, exit_code=True)
assert str(e.value) == '1'
|
"""
Tests that migrations are not missing
"""
try:
from io import StringIO
except ImportError:
from StringIO import StringIO
import pytest
from django.core.management import call_command
@pytest.mark.django_db
def test_no_missing_migrations():
"""Check no model changes have been made since the last `./manage.py makemigrations`.
Pulled from mozilla/treeherder #dd53914, subject to MPL
"""
with pytest.raises(SystemExit) as e:
# Replace with `check_changes=True` once we're using a Django version that includes:
# https://code.djangoproject.com/ticket/25604
# https://github.com/django/django/pull/5453
call_command('makemigrations', interactive=False, dry_run=True, exit_code=True)
assert str(e.value) == '1'
|
Add missing pytest DB marker
|
Add missing pytest DB marker
|
Python
|
bsd-2-clause
|
bennylope/django-organizations,bennylope/django-organizations
|
d4003a3b07e4ead9bccbc6a9c8ff835970ad99a3
|
pymatgen/core/design_patterns.py
|
pymatgen/core/design_patterns.py
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
class NullFile(object):
"""A file object that is associated to /dev/null."""
def __new__(cls):
import os
return open(os.devnull, 'w')
def __init__(self):
"""no-op"""
class NullStream(object):
"""A fake stream with a no-op write.."""
def write(*args):
"""no-op"""
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
|
Move NullFile and NullStream to monty
|
Move NullFile and NullStream to monty
Former-commit-id: 5492c3519fdfc444fd8cbb92cbf4b9c67c8a0883 [formerly 4aa6714284cb45a2747cea8e0f38e8fbcd8ec0bc]
Former-commit-id: e6119512027c605a8277d0a99f37a6ab0d73b6c7
|
Python
|
mit
|
xhqu1981/pymatgen,tallakahath/pymatgen,fraricci/pymatgen,aykol/pymatgen,mbkumar/pymatgen,ndardenne/pymatgen,czhengsci/pymatgen,blondegeek/pymatgen,czhengsci/pymatgen,czhengsci/pymatgen,gVallverdu/pymatgen,nisse3000/pymatgen,dongsenfo/pymatgen,aykol/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,gpetretto/pymatgen,Bismarrck/pymatgen,fraricci/pymatgen,gmatteo/pymatgen,mbkumar/pymatgen,mbkumar/pymatgen,tschaume/pymatgen,ndardenne/pymatgen,mbkumar/pymatgen,vorwerkc/pymatgen,setten/pymatgen,gVallverdu/pymatgen,tallakahath/pymatgen,vorwerkc/pymatgen,montoyjh/pymatgen,setten/pymatgen,gVallverdu/pymatgen,Bismarrck/pymatgen,tallakahath/pymatgen,montoyjh/pymatgen,tschaume/pymatgen,xhqu1981/pymatgen,vorwerkc/pymatgen,dongsenfo/pymatgen,blondegeek/pymatgen,blondegeek/pymatgen,czhengsci/pymatgen,johnson1228/pymatgen,tschaume/pymatgen,richardtran415/pymatgen,davidwaroquiers/pymatgen,gpetretto/pymatgen,fraricci/pymatgen,dongsenfo/pymatgen,tschaume/pymatgen,blondegeek/pymatgen,johnson1228/pymatgen,matk86/pymatgen,montoyjh/pymatgen,matk86/pymatgen,gpetretto/pymatgen,aykol/pymatgen,nisse3000/pymatgen,davidwaroquiers/pymatgen,xhqu1981/pymatgen,gpetretto/pymatgen,dongsenfo/pymatgen,richardtran415/pymatgen,gVallverdu/pymatgen,Bismarrck/pymatgen,Bismarrck/pymatgen,matk86/pymatgen,davidwaroquiers/pymatgen,richardtran415/pymatgen,ndardenne/pymatgen,setten/pymatgen,richardtran415/pymatgen,Bismarrck/pymatgen,tschaume/pymatgen,vorwerkc/pymatgen,gmatteo/pymatgen,setten/pymatgen,johnson1228/pymatgen,davidwaroquiers/pymatgen,nisse3000/pymatgen,nisse3000/pymatgen,fraricci/pymatgen,matk86/pymatgen
|
c4903f5b631bba21e17be1b7deb118c0c9571432
|
Lab3/PalindromeExercise.py
|
Lab3/PalindromeExercise.py
|
# Asks the user for input of the word and makes it lower case.
normStr = raw_input("Enter the word:\n").lower();
# Inverts the string so it can compare it with the original input.
invertStr = normStr[::-1];
|
# Asks the user for input of the word and makes it lower case.
normStr = raw_input("Enter the word:\n").lower();
# Inverts the string so it can compare it with the original input.
invertStr = normStr[::-1];
# Tests if the string is a palindrome. If so, it prints True. Else, prints False.
if normStr == invertStr:
print 'True';
else:
print 'False';
|
Test added. Program should be complete.
|
Test added. Program should be complete.
|
Python
|
mit
|
lgomie/dt228-3B-cloud-repo
|
98eead2549f4a2793011ffe8107e64530ddbf782
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname
import django
from django.conf import settings
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'relatives',
'relatives.tests',
'django.contrib.admin',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='relatives.tests.urls',
STATIC_URL='/static/',
)
def runtests(*test_args):
if hasattr(django, 'setup'):
django.setup()
if not test_args:
test_args = ['tests']
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests(*sys.argv[1:])
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname
import django
from django.conf import settings
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'relatives',
'relatives.tests',
'django.contrib.admin',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
MIDDLEWARE_CLASSES=[
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
],
ROOT_URLCONF='relatives.tests.urls',
STATIC_URL='/static/',
)
def runtests(*test_args):
if hasattr(django, 'setup'):
django.setup()
if not test_args:
test_args = ['tests']
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests(*sys.argv[1:])
|
Add missing auth middleware for 1.7 tests
|
Add missing auth middleware for 1.7 tests
|
Python
|
mit
|
treyhunner/django-relatives,treyhunner/django-relatives
|
de02f354e406e5b9a3f742697d3979d54b9ee581
|
fvserver/urls.py
|
fvserver/urls.py
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^macnamer/', include('macnamer.foo.urls')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
#url(r'^$', 'namer.views.index', name='home'),
)
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^macnamer/', include('macnamer.foo.urls')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change', name='password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done', name='password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
#url(r'^$', 'namer.views.index', name='home'),
)
|
Update password functions for Django 1.8
|
Update password functions for Django 1.8
|
Python
|
apache-2.0
|
grahamgilbert/Crypt-Server,squarit/Crypt-Server,arubdesu/Crypt-Server,grahamgilbert/Crypt-Server,squarit/Crypt-Server,arubdesu/Crypt-Server,squarit/Crypt-Server,arubdesu/Crypt-Server,squarit/Crypt-Server,grahamgilbert/Crypt-Server,grahamgilbert/Crypt-Server
|
cc8f0760aa5497d2285dc85c6f3c17c6ce327c35
|
core/__init__.py
|
core/__init__.py
|
# Offer forward compatible imports of datastore_rpc and datastore_query.
import logging
try:
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import datastore_query
logging.info('Imported official google datastore_{rpc,query}')
except ImportError:
logging.warning('Importing local datastore_{rpc,query}')
from . import datastore_rpc
from . import datastore_query
from . import monkey
|
# Offer forward compatible imports of datastore_rpc and datastore_query.
import logging
import sys
try:
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import datastore_query
sys.modules['core.datastore_rpc'] = datastore_rpc
sys.modules['core.datastore_query'] = datastore_query
logging.info('Imported official google datastore_{rpc,query}')
except ImportError:
logging.warning('Importing local datastore_{rpc,query}')
from . import datastore_rpc
from . import datastore_query
from . import monkey
|
Make official google imports actually work.
|
Make official google imports actually work.
|
Python
|
apache-2.0
|
GoogleCloudPlatform/datastore-ndb-python,GoogleCloudPlatform/datastore-ndb-python
|
c566fa8f49ea826b29937c9c128350494eb10bf6
|
rrd/__init__.py
|
rrd/__init__.py
|
#-*- coding:utf-8 -*-
import os
from flask import Flask
#-- create app --
app = Flask(__name__)
app.config.from_object("rrd.config")
@app.errorhandler(Exception)
def all_exception_handler(error):
print "exception: %s" %error
return u'dashboard 暂时无法访问,请联系管理员', 500
from view import api, chart, screen, index
|
#-*- coding:utf-8 -*-
import os
from flask import Flask
from flask import request
from flask import redirect
#-- create app --
app = Flask(__name__)
app.config.from_object("rrd.config")
@app.errorhandler(Exception)
def all_exception_handler(error):
print "exception: %s" %error
return u'dashboard 暂时无法访问,请联系管理员', 500
from view import api, chart, screen, index
@app.before_request
def before_request():
sig = request.cookies.get('sig')
if not sig:
return redirect(config.JSONCFG['redirectUrl'], code=302)
|
Add before_request and it works for this bug
|
Add before_request and it works for this bug
Check if the signature exists. Redirect to the login page if it doesn't. I took `rrd/view/index.py` as the reference
|
Python
|
apache-2.0
|
Cepave/dashboard,Cepave/dashboard,Cepave/dashboard,Cepave/dashboard
|
4735804f4951835e4e3c7d116628344bddf45aa3
|
atomicpress/admin.py
|
atomicpress/admin.py
|
# -*- coding: utf-8 -*-
from flask import current_app
from flask_admin.contrib.fileadmin import FileAdmin
from flask_admin import AdminIndexView, expose, Admin
from flask_admin.contrib.sqla import ModelView
from atomicpress import models
from atomicpress.app import db
class HomeView(AdminIndexView):
@expose("/")
def index(self):
return self.render('admin/home.html')
def create_admin():
app = current_app._get_current_object()
admin = Admin(app, "AtomicPress", index_view=HomeView(name='Home'))
admin.add_view(ModelView(models.Blog, db.session, category="Blog"))
admin.add_view(ModelView(models.Author, db.session, category="Blog"))
admin.add_view(ModelView(models.Post, db.session, category="Post"))
admin.add_view(ModelView(models.Tag, db.session, category="Post"))
admin.add_view(ModelView(models.Category, db.session, category="Post"))
admin.add_view(FileAdmin(app.config["UPLOADS_PATH"],
app.config["UPLOADS_URL"],
name='Upload files'))
|
# -*- coding: utf-8 -*-
from flask import current_app
from flask_admin.contrib.fileadmin import FileAdmin
from flask_admin import AdminIndexView, expose, Admin
from flask_admin.contrib.sqla import ModelView
from atomicpress import models
from atomicpress.app import db
class HomeView(AdminIndexView):
@expose("/")
def index(self):
return self.render('admin/home.html')
class PostView(ModelView):
column_default_sort = ('date', True)
def create_admin():
app = current_app._get_current_object()
admin = Admin(app, "AtomicPress", index_view=HomeView(name='Home'))
admin.add_view(ModelView(models.Blog, db.session, category="Blog"))
admin.add_view(ModelView(models.Author, db.session, category="Blog"))
admin.add_view(PostView(models.Post, db.session, category="Post"))
admin.add_view(ModelView(models.Tag, db.session, category="Post"))
admin.add_view(ModelView(models.Category, db.session, category="Post"))
admin.add_view(FileAdmin(app.config["UPLOADS_PATH"],
app.config["UPLOADS_URL"],
name='Upload files'))
|
Update post view sorting (so latest comes first)
|
Update post view sorting (so latest comes first)
|
Python
|
mit
|
marteinn/AtomicPress,marteinn/AtomicPress,marteinn/AtomicPress,marteinn/AtomicPress
|
3a204de33589de943ff09525895812530baac0b2
|
saylua/modules/pets/models/db.py
|
saylua/modules/pets/models/db.py
|
from google.appengine.ext import ndb
# This is to store alternate linart versions of the same pets
class SpeciesVersion(ndb.Model):
name = ndb.StringProperty()
base_image = ndb.StringProperty()
base_psd = ndb.StringProperty()
default_image = ndb.StringProperty()
# Pets are divided into species and species are divided into variations
class Species(ndb.Model):
name = ndb.StringProperty(indexed=True)
versions = ndb.StructuredProperty(SpeciesVersion, repeated=True)
description = ndb.StringProperty()
class SpeciesVariation(ndb.Model):
species_key = ndb.KeyProperty(indexed=True)
name = ndb.StringProperty(indexed=True)
description = ndb.StringProperty()
class Pet(ndb.Model):
user_key = ndb.KeyProperty(indexed=True)
variation_key = ndb.KeyProperty(indexed=True) # Only set if the pet is a variation
species_name = ndb.StringProperty(indexed=True) # Note the denormalization
# Personal profile information for the pet
name = ndb.StringProperty()
css = ndb.StringProperty()
description = ndb.StringProperty()
# If either of these is set to a number other than 0, the pet is for sale
ss_price = ndb.IntegerProperty(default=0, indexed=True)
cc_price = ndb.IntegerProperty(default=0, indexed=True)
|
from google.appengine.ext import ndb
# This is to store alternate linart versions of the same pets
class SpeciesVersion(ndb.Model):
name = ndb.StringProperty()
base_image = ndb.StringProperty()
base_psd = ndb.StringProperty()
default_image = ndb.StringProperty()
# Pets are divided into species and species are divided into variations
class Species(ndb.Model):
name = ndb.StringProperty()
versions = ndb.StructuredProperty(SpeciesVersion)
description = ndb.TextProperty()
class SpeciesVariation(ndb.Model):
species_id = ndb.StringProperty()
name = ndb.StringProperty()
description = ndb.TextProperty()
class Pet(ndb.Model):
pet_id = ndb.StringProperty()
owner_id = ndb.IntegerProperty()
variation_key = ndb.KeyProperty() # Only set if the pet is a variation
species_name = ndb.StringProperty() # Note the denormalization
# Personal profile information for the pet
name = ndb.StringProperty()
css = ndb.TextProperty()
description = ndb.TextProperty()
# If either of these is set to a number other than 0, the pet is for sale
ss_price = ndb.IntegerProperty(default=0)
cc_price = ndb.IntegerProperty(default=0)
|
Update to pet model for provisioner
|
Update to pet model for provisioner
|
Python
|
agpl-3.0
|
saylua/SayluaV2,saylua/SayluaV2,LikeMyBread/Saylua,LikeMyBread/Saylua,saylua/SayluaV2,LikeMyBread/Saylua,LikeMyBread/Saylua
|
4aeb2e57a05491973c761eb169a42cb5e1e32737
|
gtr/__init__.py
|
gtr/__init__.py
|
__all__ = [
"gtr.services.funds.Funds"
]
__version__ = "0.1.0"
from gtr.services.base import _Service
from gtr.services.funds import Funds
from gtr.services.organisations import Organisations
from gtr.services.persons import Persons
from gtr.services.projects import Projects
|
__all__ = [
"gtr.services.funds.Funds",
"gtr.services.organisations.Organisations",
"gtr.services.persons.Persons",
"gtr.services.projects.Projects"
]
__version__ = "0.1.0"
from gtr.services.base import _Service
from gtr.services.funds import Funds
from gtr.services.organisations import Organisations
from gtr.services.persons import Persons
from gtr.services.projects import Projects
|
Add all service Classes to import
|
Add all service Classes to import
|
Python
|
apache-2.0
|
nestauk/gtr
|
1d84a3b58aa752834aed31123dd16e3bfa723609
|
tests/storage_adapter_tests/test_storage_adapter.py
|
tests/storage_adapter_tests/test_storage_adapter.py
|
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_find(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.find('')
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_get_response_statements(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_response_statements()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
|
from unittest import TestCase
from chatterbot.storage import StorageAdapter
class StorageAdapterTestCase(TestCase):
"""
This test case is for the StorageAdapter base class.
Although this class is not intended for direct use,
this test case ensures that exceptions requiring
basic functionality are triggered when needed.
"""
def setUp(self):
super(StorageAdapterTestCase, self).setUp()
self.adapter = StorageAdapter()
def test_count(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.count()
def test_filter(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.filter()
def test_remove(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.remove('')
def test_create(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.create()
def test_update(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.update('')
def test_get_random(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.get_random()
def test_drop(self):
with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError):
self.adapter.drop()
|
Remove tests for storage adapter methods being removed.
|
Remove tests for storage adapter methods being removed.
|
Python
|
bsd-3-clause
|
vkosuri/ChatterBot,gunthercox/ChatterBot
|
2d95f8fe9c9e9edf5b1a0b5dee2992187b0d89ed
|
src/pytest_django_lite/plugin.py
|
src/pytest_django_lite/plugin.py
|
import os
import pytest
try:
from django.conf import settings
except ImportError:
settings = None # NOQA
def is_configured():
if settings is None:
return False
return settings.configured or os.environ.get('DJANGO_SETTINGS_MODULE')
@pytest.fixture(autouse=True, scope='session')
def _django_runner(request):
if not is_configured():
return
from django.test.simple import DjangoTestSuiteRunner
runner = DjangoTestSuiteRunner(interactive=False)
runner.setup_test_environment()
request.addfinalizer(runner.teardown_test_environment)
config = runner.setup_databases()
def teardown_database():
runner.teardown_databases(config)
request.addfinalizer(teardown_database)
return runner
|
import os
import pytest
try:
from django.conf import settings
except ImportError:
settings = None # NOQA
def is_configured():
if settings is None:
return False
return settings.configured or os.environ.get('DJANGO_SETTINGS_MODULE')
@pytest.fixture(autouse=True, scope='session')
def _django_runner(request):
if not is_configured():
return
from django.test.simple import DjangoTestSuiteRunner
try:
import django
django.setup()
except AttributeError:
pass
runner = DjangoTestSuiteRunner(interactive=False)
runner.setup_test_environment()
request.addfinalizer(runner.teardown_test_environment)
config = runner.setup_databases()
def teardown_database():
runner.teardown_databases(config)
request.addfinalizer(teardown_database)
return runner
|
Deal with the Django app refactoring.
|
Deal with the Django app refactoring.
|
Python
|
apache-2.0
|
pombredanne/pytest-django-lite,dcramer/pytest-django-lite
|
2f152c5036d32a780741edd8fb6ce75684728824
|
singleuser/user-config.py
|
singleuser/user-config.py
|
import os
mylang = 'test'
family = 'wikipedia'
# Not defining any extra variables here at all since that causes pywikibot
# to issue a warning about potential misspellings
if os.path.exists(os.path.expanduser('~/user-config.py')):
with open(os.path.expanduser('~/user-config.py'), 'r') as f:
exec(
compile(f.read(), os.path.expanduser('~/user-config.py'), 'exec'),
globals())
# Things that should be non-easily-overridable
usernames['*']['*'] = os.environ['JPY_USER']
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
# Things that should be non-easily-overridable
usernames['*']['*'] = os.environ['JPY_USER']
|
Revert "Do not introduce extra variables"
|
Revert "Do not introduce extra variables"
Since the 'f' is considered an extra variable and introduces
a warning anyway :( Let's fix this the right way
This reverts commit a03de68fb772d859098327d0e54a219fe4507072.
|
Python
|
mit
|
yuvipanda/paws,yuvipanda/paws
|
2d4016d8e4245a6e85c2bbea012d13471718b1b0
|
journal/views.py
|
journal/views.py
|
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.http import JsonResponse
from rest_framework.parsers import JSONParser
from .models import Entry
from .serializers import EntrySerializer
@method_decorator(csrf_exempt, name='dispatch')
class RestView(View):
def get(self, request):
last = request.GET.get('last', None)
if last is None:
entries = Entry.objects.all()
else:
last_entry = Entry.objects.get(uuid=last)
entries = Entry.objects.filter(id__gt=last_entry.id)
serializer = EntrySerializer(entries, many=True)
return JsonResponse({'entries': serializer.data})
@csrf_exempt
def put(self, request):
body = JSONParser().parse(request)
serializer = EntrySerializer(data=body['entries'], many=True)
if serializer.is_valid():
serializer.save()
return JsonResponse({}, status=201)
return JsonResponse(serializer.errors, status=400)
|
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.http import JsonResponse
from rest_framework.parsers import JSONParser
from .models import Entry
from .serializers import EntrySerializer
@method_decorator(csrf_exempt, name='dispatch')
class RestView(View):
def get(self, request):
last = request.GET.get('last', None)
tag = request.GET.get('tag', None)
entries = Entry.objects.filter(tag=tag)
if last is not None:
last_entry = entries.get(uuid=last)
entries = entries.filter(id__gt=last_entry.id)
serializer = EntrySerializer(entries, many=True)
return JsonResponse({'entries': serializer.data})
@csrf_exempt
def put(self, request):
tag = request.GET.get('tag', None)
body = JSONParser().parse(request)
serializer = EntrySerializer(data=body['entries'], many=True)
if serializer.is_valid():
serializer.save(tag=tag)
return JsonResponse({}, status=201)
return JsonResponse(serializer.errors, status=400)
|
Add a way to specific tag.
|
Add a way to specific tag.
|
Python
|
agpl-3.0
|
etesync/journal-manager
|
a4c9dd451062b83b907a350ea30f2d36badb6522
|
parsers/__init__.py
|
parsers/__init__.py
|
import importlib
parsers = """
singtao.STParser
apple.AppleParser
""".split()
parser_dict = {}
# Import the parser and fill in parser_dict: domain -> parser
for parser_name in parsers:
module, class_name = parser_name.rsplit('.', 1)
parser = getattr(importlib.import_module('parsers.' + module), class_name)
for domain in parser.domains:
parser_dict[domain] = parser
def get_parser(url):
return parser_dict[url.split('/')[2]]
# Each feeder places URLs into the database to be checked periodically.
parsers = [parser for parser in parser_dict.values()]
__all__ = ['parsers', 'get_parser']
|
import importlib
parsers = """
singtao.STParser
apple.AppleParser
tvb.TVBParser
""".split()
parser_dict = {}
# Import the parser and fill in parser_dict: domain -> parser
for parser_name in parsers:
module, class_name = parser_name.rsplit('.', 1)
parser = getattr(importlib.import_module('parsers.' + module), class_name)
for domain in parser.domains:
parser_dict[domain] = parser
def get_parser(url):
return parser_dict[url.split('/')[2]]
# Each feeder places URLs into the database to be checked periodically.
parsers = [parser for parser in parser_dict.values()]
__all__ = ['parsers', 'get_parser']
|
Add tvb Parser to the init
|
Add tvb Parser to the init
|
Python
|
mit
|
code4hk/hk-news-scrapper
|
a90889b773010d2fe2ed1dff133f951c0b5baea4
|
demo/__init__.py
|
demo/__init__.py
|
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
PYTHON_VERSION = 2, 7
import sys
if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test)
exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
|
"""Package for PythonTemplateDemo."""
import sys
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
PYTHON_VERSION = 2, 7
if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test)
exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
|
Deploy Travis CI build 387 to GitHub
|
Deploy Travis CI build 387 to GitHub
|
Python
|
mit
|
jacebrowning/template-python-demo
|
7e738ddbc1a4585f92e605369f8d6dc1d986dbec
|
scripts/get_cuda_version.py
|
scripts/get_cuda_version.py
|
import os
nvcc_version_cmd = 'nvcc -V > output.txt'
os.system(nvcc_version_cmd)
with open('output.txt') as f:
lines = f.readlines()
for line in lines:
if ", release" in line:
start = line.index(', release') + 10
end = line.index('.', start)
result = line[start:end]
print(result)
quit()
|
import os
nvcc_version_cmd = 'nvcc -V > output.txt'
os.system(nvcc_version_cmd)
with open('output.txt') as f:
lines = f.readlines()
for line in lines:
if ", release" in line:
start = line.index(', release') + 10
end = line.index('.', start)
result = line[start:end]
print(result)
os.remove("output.txt")
quit()
|
Remove output.txt file after done
|
Remove output.txt file after done
After we are done detecting nvcc version, let's delete the temporary output.txt file.
|
Python
|
mit
|
GOMC-WSU/GOMC,GOMC-WSU/GOMC,GOMC-WSU/GOMC,GOMC-WSU/GOMC,GOMC-WSU/GOMC
|
a6f0b0db3e32c71e89d73db8997308e67aae294f
|
setup_cython.py
|
setup_cython.py
|
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
core = Extension(
'geopy.core',
["geopy/core.pyx"],
language='c++',
libraries=['stdc++'],
)
setup(
cmdclass = {'build_ext': build_ext},
include_dirs = [],
ext_modules = [core]
)
|
import os
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
core = Extension(
'geometry.core',
[os.path.join("geometry", "core.pyx")],
language='c++',
libraries=['stdc++'],
include_dirs = ['.'],
)
setup(
cmdclass = {'build_ext': build_ext},
include_dirs = [],
ext_modules = [core]
)
|
Make module path OS independent by using os.path.join
|
Make module path OS independent by using os.path.join
|
Python
|
bsd-3-clause
|
FRidh/python-geometry
|
0571864eb2d99b746386ace721b8e218f127c6ac
|
email_obfuscator/templatetags/email_obfuscator.py
|
email_obfuscator/templatetags/email_obfuscator.py
|
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
register = template.Library()
def obfuscate_string(value):
return ''.join(['&#%s;'.format(str(ord(char))) for char in value])
@register.filter
@stringfilter
def obfuscate(value):
return mark_safe(obfuscate_string(value))
@register.filter
@stringfilter
def obfuscate_mailto(value, text=False):
mail = obfuscate_string(value)
if text:
link_text = text
else:
link_text = mail
return mark_safe('<a href="%s%s">%s</a>'.format(
obfuscate_string('mailto:'), mail, link_text))
|
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
register = template.Library()
def obfuscate_string(value):
return ''.join(['&#{0:s};'.format(str(ord(char))) for char in value])
@register.filter
@stringfilter
def obfuscate(value):
return mark_safe(obfuscate_string(value))
@register.filter
@stringfilter
def obfuscate_mailto(value, text=False):
mail = obfuscate_string(value)
if text:
link_text = text
else:
link_text = mail
return mark_safe('<a href="{0:s}{1:s}">{2:s}</a>'.format(
obfuscate_string('mailto:'), mail, link_text))
|
Fix mixup of old and new-style string formatting
|
Fix mixup of old and new-style string formatting
|
Python
|
mit
|
morninj/django-email-obfuscator
|
41ac7e2d85126c2fe5dd16230ed678d72a8d048f
|
jax/__init__.py
|
jax/__init__.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
os.environ.setdefault('TF_CPP_MIN_LOG_LEVEL', '1')
version_file = os.path.join(os.path.abspath(os.path.dirname(__file__)),
"version.py")
with open(version_file) as f:
exec(f.read(), globals())
from jax.api import *
import jax.numpy as np # side-effecting import sets up operator overloads
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
os.environ.setdefault('TF_CPP_MIN_LOG_LEVEL', '1')
from jax.version import __version__
from jax.api import *
import jax.numpy as np # side-effecting import sets up operator overloads
|
Use a regular import to add jax.__version__ rather than exec() trickery.
|
Use a regular import to add jax.__version__ rather than exec() trickery.
(The exec() trickery is needed for setup.py, but not for jax/__init__.py.)
|
Python
|
apache-2.0
|
tensorflow/probability,google/jax,google/jax,google/jax,google/jax,tensorflow/probability
|
e1c6b7c369395208b467fcf169b6e3d0eb8c8dd9
|
src/rlib/string_stream.py
|
src/rlib/string_stream.py
|
from rpython.rlib.streamio import Stream, StreamError
class StringStream(Stream):
def __init__(self, string):
self._string = string
self.pos = 0
self.max = len(string) - 1
def write(self, data):
raise StreamError("StringStream is not writable")
def truncate(self, size):
raise StreamError("StringStream is immutable")
def peek(self):
if self.pos < self.max:
return self._string[self.pos:]
else:
return ''
def tell(self):
return self.pos
def seek(self, offset, whence):
if whence == 0:
self.pos = max(0, offset)
elif whence == 1:
self.pos = max(0, self.pos + offset)
elif whence == 2:
self.pos = max(0, self.max + offset)
else:
raise StreamError("seek(): whence must be 0, 1 or 2")
def read(self, n):
assert isinstance(n, int)
end = self.pos + n
data = self._string[self.pos:end]
self.pos += len(data)
return data
|
from rpython.rlib.streamio import Stream, StreamError
class StringStream(Stream):
def __init__(self, string):
self._string = string
self.pos = 0
self.max = len(string) - 1
def write(self, data):
raise StreamError("StringStream is not writable")
def truncate(self, size):
raise StreamError("StringStream is immutable")
def tell(self):
return self.pos
def seek(self, offset, whence):
if whence == 0:
self.pos = max(0, offset)
elif whence == 1:
self.pos = max(0, self.pos + offset)
elif whence == 2:
self.pos = max(0, self.max + offset)
else:
raise StreamError("seek(): whence must be 0, 1 or 2")
def read(self, n):
assert isinstance(n, int)
end = self.pos + n
assert end >= 0
data = self._string[self.pos:end]
self.pos += len(data)
return data
|
Fix StringStream to conform to latest pypy
|
Fix StringStream to conform to latest pypy
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
|
Python
|
mit
|
SOM-st/RPySOM,smarr/RTruffleSOM,smarr/RTruffleSOM,smarr/PySOM,SOM-st/RPySOM,SOM-st/RTruffleSOM,SOM-st/PySOM,SOM-st/RTruffleSOM,SOM-st/PySOM,smarr/PySOM
|
98925a82dfb45a4c76496cd11af8d1483a678e6e
|
sigh/views/api.py
|
sigh/views/api.py
|
import json
from functools import wraps
from flask import Blueprint
from flask import Response
from ..models import Tag
api_views = Blueprint('api', __name__, url_prefix='/api/')
def jsonify(func):
@wraps(func)
def _(*args, **kwargs):
result = func(*args, **kwargs)
return Response(json.dumps(result), mimetype='application/json')
return _
@api_views.route('tag/autocompletion/<q>')
@jsonify
def autocomplete_tag(q):
tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all()
tags = [tag.to_dict('id_', 'display_name') for tag in tags]
return tags
|
import json
from functools import wraps
from flask import Blueprint
from flask import Response
from ..models import Tag
from ..models import User
api_views = Blueprint('api', __name__, url_prefix='/api/')
def jsonify(func):
@wraps(func)
def _(*args, **kwargs):
result = func(*args, **kwargs)
return Response(json.dumps(result), mimetype='application/json')
return _
@api_views.route('tag/autocompletion/<q>')
@jsonify
def autocomplete_tag(q):
tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all()
tags = [tag.to_dict('id_', 'display_name') for tag in tags]
return tags
@api_views.route('user/autocompletion/<q>')
@jsonify
def autocomplete_user(q):
users = User.query.filter(User.username.ilike(u'%{}%'.format(q.lower()))).all()
users = [user.to_dict('id_', 'name', 'username', 'avatar') for user in users]
return users
|
Create a new API for User autocompletion
|
Create a new API for User autocompletion
|
Python
|
mit
|
kxxoling/Programmer-Sign,kxxoling/Programmer-Sign,kxxoling/Programmer-Sign
|
83cc9a5304e41e4ce517cfc739238a37f13f626a
|
matchzoo/data_pack/build_unit_from_data_pack.py
|
matchzoo/data_pack/build_unit_from_data_pack.py
|
from tqdm import tqdm
from .data_pack import DataPack
from matchzoo import processor_units
def build_unit_from_data_pack(
unit: processor_units.StatefulProcessorUnit,
data_pack: DataPack, flatten: bool = True,
verbose: int = 1
) -> processor_units.StatefulProcessorUnit:
"""
Build a :class:`StatefulProcessorUnit` from a :class:`DataPack` object.
:param unit: :class:`StatefulProcessorUnit` object to be built.
:param data_pack: The input :class:`DataPack` object.
:param flatten: Flatten the datapack or not. `True` to organize the
:class:`DataPack` text as a list, and `False` to organize
:class:`DataPack` text as a list of list.
:param verbose: Verbosity.
:return: A built :class:`StatefulProcessorUnit` object.
"""
corpus = []
if flatten:
data_pack.apply_on_text(corpus.extend, verbose=verbose)
else:
data_pack.apply_on_text(corpus.append, verbose=verbose)
if verbose:
description = 'Building ' + unit.__class__.__name__ + \
' from a datapack.'
corpus = tqdm(corpus, desc=description)
unit.fit(corpus)
return unit
|
"""Build unit from data pack."""
from tqdm import tqdm
from matchzoo import processor_units
from .data_pack import DataPack
def build_unit_from_data_pack(
unit: processor_units.StatefulProcessorUnit,
data_pack: DataPack, flatten: bool = True,
verbose: int = 1
) -> processor_units.StatefulProcessorUnit:
"""
Build a :class:`StatefulProcessorUnit` from a :class:`DataPack` object.
:param unit: :class:`StatefulProcessorUnit` object to be built.
:param data_pack: The input :class:`DataPack` object.
:param flatten: Flatten the datapack or not. `True` to organize the
:class:`DataPack` text as a list, and `False` to organize
:class:`DataPack` text as a list of list.
:param verbose: Verbosity.
:return: A built :class:`StatefulProcessorUnit` object.
"""
corpus = []
if flatten:
data_pack.apply_on_text(corpus.extend, verbose=verbose)
else:
data_pack.apply_on_text(corpus.append, verbose=verbose)
if verbose:
description = 'Building ' + unit.__class__.__name__ + \
' from a datapack.'
corpus = tqdm(corpus, desc=description)
unit.fit(corpus)
return unit
|
Update docs for build unit.
|
Update docs for build unit.
|
Python
|
apache-2.0
|
faneshion/MatchZoo,faneshion/MatchZoo
|
880b5257d549c2150d8888a2f062acd9cc948480
|
array/is-crypt-solution.py
|
array/is-crypt-solution.py
|
# You have an array of strings crypt, the cryptarithm, and an an array containing the mapping of letters and digits, solution. The array crypt will contain three non-empty strings that follow the structure: [word1, word2, word3], which should be interpreted as the word1 + word2 = word3 cryptarithm
# Write a solution where if crypt, when it is decoded by replacing all of the letters in the cryptarithm with digits using the mapping in solution, becomes a valid arithmetic equation containing no numbers with leading zeroes, the answer is true. If it does not become a valid arithmetic solution, the answer is false
def isCryptSolution(crypt, solution):
# map letters to given numbers
dic = {}
for key in solution:
dic[key[0]] = int(key[1])
# generate input strings into numbers
arr = []
for string in crypt:
arr.append(0)
for letter in string:
arr[-1] = arr[-1]*10 + dic[letter]
|
# You have an array of strings crypt, the cryptarithm, and an an array containing the mapping of letters and digits, solution. The array crypt will contain three non-empty strings that follow the structure: [word1, word2, word3], which should be interpreted as the word1 + word2 = word3 cryptarithm
# Write a solution where if crypt, when it is decoded by replacing all of the letters in the cryptarithm with digits using the mapping in solution, becomes a valid arithmetic equation containing no numbers with leading zeroes, the answer is true. If it does not become a valid arithmetic solution, the answer is false
def isCryptSolution(crypt, solution):
# map letters to given numbers
dic = {}
for key in solution:
dic[key[0]] = int(key[1])
# generate input strings into numbers
arr = []
for string in crypt:
arr.append(0)
for letter in string:
arr[-1] = arr[-1]*10 + dic[letter]
# check if sum of decoded numbers of first and second strings equal to decoded number of third string
if arr[0] + arr[1] == arr[2]:
if len(`arr[0]`) == len(crypt[0]): # check if decoded number of first string has any leading zero
if len(`arr[1]`) == len(crypt[1]): # check if decoded number of second string has any leading zero
if len(`arr[2]`) == len(crypt[2]): # check if decoded number of third string has any leading zero
return True
return False
|
Check if sum of decoded numbers of first and second strings equal to decoded number of third string
|
Check if sum of decoded numbers of first and second strings equal to decoded number of third string
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
fef260c3731408592fd88e73817fe0f0cd7fe769
|
telemetry/telemetry/core/chrome/inspector_memory_unittest.py
|
telemetry/telemetry/core/chrome/inspector_memory_unittest.py
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.test import tab_test_case
class InspectorMemoryTest(tab_test_case.TabTestCase):
def testGetDOMStats(self):
unittest_data_dir = os.path.join(os.path.dirname(__file__),
'..', '..', '..', 'unittest_data')
self._browser.SetHTTPServerDirectories(unittest_data_dir)
self._tab.Navigate(
self._browser.http_server.UrlOf('dom_counter_sample.html'))
self._tab.WaitForDocumentReadyStateToBeComplete()
counts = self._tab.dom_stats
self.assertEqual(counts['document_count'], 1)
self.assertEqual(counts['node_count'], 14)
self.assertEqual(counts['event_listener_count'], 2)
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.test import tab_test_case
class InspectorMemoryTest(tab_test_case.TabTestCase):
def testGetDOMStats(self):
unittest_data_dir = os.path.join(os.path.dirname(__file__),
'..', '..', '..', 'unittest_data')
self._browser.SetHTTPServerDirectories(unittest_data_dir)
# Due to an issue with CrOS, we create a new tab here rather than
# using self._tab to get a consistent starting page on all platforms
tab = self._browser.tabs.New()
tab.Navigate(
self._browser.http_server.UrlOf('dom_counter_sample.html'))
tab.WaitForDocumentReadyStateToBeComplete()
counts = tab.dom_stats
self.assertEqual(counts['document_count'], 2)
self.assertEqual(counts['node_count'], 18)
self.assertEqual(counts['event_listener_count'], 2)
|
Fix InspectorMemoryTest.testGetDOMStats to have consistent behaviour on CrOS and desktop versions of Chrome. Starting the browser in CrOS requires navigating through an initial setup that does not leave us with a tab at "chrome://newtab". This workaround runs the test in a new tab on all platforms for consistency.
|
Fix InspectorMemoryTest.testGetDOMStats to have consistent
behaviour on CrOS and desktop versions of Chrome. Starting the
browser in CrOS requires navigating through an initial setup
that does not leave us with a tab at "chrome://newtab". This workaround
runs the test in a new tab on all platforms for consistency.
BUG=235634
TEST=InspectorMemoryTest.testGetDOMStats passes on cros and system
NOTRY=true
Review URL: https://chromiumcodereview.appspot.com/14672002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@197490 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult,sahiljain/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,benschmaus/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,catapult-project/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm,sahiljain/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm
|
3a0c7caadb46a69fb29fe34bd64de28c9b263fd6
|
restconverter.py
|
restconverter.py
|
# -*- coding: utf-8 -*-
"""
flaskjk.restconverter
~~~~~~~~~~~~~~~~~~~~~
Helper functions for converting RestructuredText
This class heavily depends on the functionality provided by the docutils
package.
:copyright: (c) 2010 by Jochem Kossen.
:license: BSD, see LICENSE for more details.
"""
from docutils import core
from docutils.writers.html4css1 import Writer, HTMLTranslator
class HTMLFragmentTranslator(HTMLTranslator):
def __init__(self, document):
HTMLTranslator.__init__(self, document)
self.head_prefix = ['','','','','']
self.body_prefix = []
self.body_suffix = []
self.stylesheet = []
def astext(self):
return ''.join(self.body)
html_fragment_writer = Writer()
html_fragment_writer.translator_class = HTMLFragmentTranslator
def rest_to_html(s):
"""Convert ReST input to HTML output"""
return core.publish_string(s, writer=html_fragment_writer)
|
# -*- coding: utf-8 -*-
"""
flaskjk.restconverter
~~~~~~~~~~~~~~~~~~~~~
Helper functions for converting RestructuredText
This class heavily depends on the functionality provided by the docutils
package.
See http://wiki.python.org/moin/ReStructuredText for more information
:copyright: (c) 2010 by Jochem Kossen.
:license: BSD, see LICENSE for more details.
"""
from docutils import core
from docutils.writers.html4css1 import Writer, HTMLTranslator
class HTMLFragmentTranslator(HTMLTranslator):
def __init__(self, document):
HTMLTranslator.__init__(self, document)
self.head_prefix = ['','','','','']
self.body_prefix = []
self.body_suffix = []
self.stylesheet = []
def astext(self):
return ''.join(self.body)
html_fragment_writer = Writer()
html_fragment_writer.translator_class = HTMLFragmentTranslator
def rest_to_html(s):
"""Convert ReST input to HTML output"""
return core.publish_string(s, writer=html_fragment_writer)
def rest_to_html_fragment(s):
parts = core.publish_parts(
source=s,
writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']
|
Add rest_to_html_fragment to be able to convert just the body part
|
Add rest_to_html_fragment to be able to convert just the body part
|
Python
|
bsd-2-clause
|
jkossen/flaskjk
|
17db9de51b816210728db7f58685b7d8e5545c65
|
src/__init__.py
|
src/__init__.py
|
from pkg_resources import get_distribution
import codecs
import json
__version__ = get_distribution('rasa_nlu').version
class Interpreter(object):
def parse(self, text):
raise NotImplementedError()
@staticmethod
def load_synonyms(entity_synonyms):
if entity_synonyms:
with codecs.open(entity_synonyms, encoding='utf-8') as infile:
return json.loads(infile.read())
@staticmethod
def replace_synonyms(entities, entity_synonyms):
for i in range(len(entities)):
entity_value = entities[i]["value"]
if (type(entity_value) == unicode and type(entity_synonyms) == unicode and
entity_value.lower() in entity_synonyms):
entities[i]["value"] = entity_synonyms[entity_value]
|
from pkg_resources import get_distribution
import codecs
import json
__version__ = get_distribution('rasa_nlu').version
class Interpreter(object):
def parse(self, text):
raise NotImplementedError()
@staticmethod
def load_synonyms(entity_synonyms):
if entity_synonyms:
with codecs.open(entity_synonyms, encoding='utf-8') as infile:
return json.loads(infile.read())
@staticmethod
def replace_synonyms(entities, entity_synonyms):
for i in range(len(entities)):
entity_value = entities[i]["value"]
if entity_value.lower() in entity_synonyms:
entities[i]["value"] = entity_synonyms[entity_value.lower()]
|
Fix entity dict access key
|
Fix entity dict access key
|
Python
|
apache-2.0
|
RasaHQ/rasa_nlu,beeva-fernandocerezal/rasa_nlu,beeva-fernandocerezal/rasa_nlu,verloop/rasa_nlu,PHLF/rasa_nlu,verloop/rasa_nlu,PHLF/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu
|
2a950c91416d3b92a91f4f245a37a95b418b4bab
|
custom/uth/tasks.py
|
custom/uth/tasks.py
|
from custom.uth.utils import create_case, match_case, attach_images_to_case, submit_error_case
from custom.uth.models import SonositeUpload, VscanUpload
from celery.task import task
import io
def get_files_from_doc(doc):
files = {}
for f in doc._attachments.keys():
files[f] = io.BytesIO(doc.fetch_attachment(f))
return files
@task
def async_create_case(upload_id):
upload_doc = SonositeUpload.get(upload_id)
files = get_files_from_doc(upload_doc)
create_case(upload_doc.related_case_id, files)
# TODO delete doc if processing is successful
@task
def async_find_and_attach(upload_id):
try:
upload_doc = VscanUpload.get(upload_id)
files = get_files_from_doc(upload_doc)
case = match_case(
upload_doc.scanner_serial,
upload_doc.scan_id,
# upload_doc.date
)
if case:
files = {}
for f in upload_doc._attachments.keys():
files[f] = io.BytesIO(upload_doc.fetch_attachment(f))
attach_images_to_case(case._id, files)
else:
return -1
# TODO delete doc if successful
except:
# mark the case as having errored (if we know what it is)
# but reraise the error since we don't want to hide it
if case:
submit_error_case(case._id)
raise
|
from custom.uth.utils import create_case, match_case, attach_images_to_case, submit_error_case
from custom.uth.models import SonositeUpload, VscanUpload
from celery.task import task
import io
def get_files_from_doc(doc):
files = {}
for f in doc._attachments.keys():
files[f] = io.BytesIO(doc.fetch_attachment(f))
return files
@task
def async_create_case(upload_id):
upload_doc = SonositeUpload.get(upload_id)
files = get_files_from_doc(upload_doc)
create_case(upload_doc.related_case_id, files)
upload_doc.delete()
@task
def async_find_and_attach(upload_id):
case = None
try:
upload_doc = VscanUpload.get(upload_id)
files = get_files_from_doc(upload_doc)
case = match_case(
upload_doc.scanner_serial,
upload_doc.scan_id,
)
if case:
files = {}
for f in upload_doc._attachments.keys():
files[f] = io.BytesIO(upload_doc.fetch_attachment(f))
attach_images_to_case(case._id, files)
else:
return -1
upload_doc.delete()
except:
# mark the case as having errored (if we know what it is)
# but reraise the error since we don't want to hide it
if case:
submit_error_case(case._id)
raise
|
Delete docs on task completion
|
Delete docs on task completion
|
Python
|
bsd-3-clause
|
puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq
|
5bb84d5eac353cd4bbe1843fccaca64161830591
|
savu/__init__.py
|
savu/__init__.py
|
# Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Base level for Savu
use with :
import savu
.. moduleauthor:: Mark Basham <scientificsoftware@diamond.ac.uk>
"""
|
# Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Base level for Savu
use with :
import savu
.. moduleauthor:: Mark Basham <scientificsoftware@diamond.ac.uk>
"""
from . import core
from . import data
from . import plugins
|
Update to make import of savu a little more useful
|
Update to make import of savu a little more useful
|
Python
|
apache-2.0
|
mjn19172/Savu,swtp1v07/Savu,swtp1v07/Savu,swtp1v07/Savu,mjn19172/Savu,swtp1v07/Savu,mjn19172/Savu,mjn19172/Savu,mjn19172/Savu
|
43ae9bdec900081d6ff91fc3847a4d8d9a42eaeb
|
contrib/plugins/w3cdate.py
|
contrib/plugins/w3cdate.py
|
"""
Add a 'w3cdate' key to every entry -- this contains the date in ISO8601 format
WARNING: you must have PyXML installed as part of your python installation
in order for this plugin to work
Place this plugin early in your load_plugins list, so that the w3cdate will
be available to subsequent plugins
"""
__author__ = "Ted Leung <twl@sauria.com>"
__version__ = "$Id:"
__copyright__ = "Copyright (c) 2003 Ted Leung"
__license__ = "Python"
import xml.utils.iso8601
import time
def cb_prepare(args):
request = args["request"]
form = request.getHttp()['form']
config = request.getConfiguration()
data = request.getData()
entry_list = data['entry_list']
for i in range(len(entry_list)):
entry = entry_list[i]
entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(entry['timetuple']))
|
"""
Add a 'w3cdate' key to every entry -- this contains the date in ISO8601 format
WARNING: you must have PyXML installed as part of your python installation
in order for this plugin to work
Place this plugin early in your load_plugins list, so that the w3cdate will
be available to subsequent plugins
"""
__author__ = "Ted Leung <twl@sauria.com>"
__version__ = "$Id:"
__copyright__ = "Copyright (c) 2003 Ted Leung"
__license__ = "Python"
import xml.utils.iso8601
import time
def cb_prepare(args):
request = args["request"]
form = request.getHttp()['form']
config = request.getConfiguration()
data = request.getData()
entry_list = data['entry_list']
for i in range(len(entry_list)):
entry = entry_list[i]
t = entry['timetuple']
# adjust for daylight savings time
t = t[0],t[1],t[2],t[3]+time.localtime()[-1],t[4],t[5],t[6],t[7],t[8]
entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(t))
|
Fix daylight savings time bug
|
Fix daylight savings time bug
|
Python
|
mit
|
daitangio/pyblosxom,daitangio/pyblosxom,willkg/douglas,willkg/douglas
|
e0a6ea3d48691bedfb39a0a92d569ea4aaf61810
|
pavement.py
|
pavement.py
|
import paver.doctools
import paver.setuputils
from schevo.release import setup_meta
options(
setup=setup_meta,
sphinx=Bunch(
docroot='doc',
builddir='build',
sourcedir='source',
),
)
@task
@needs('paver.doctools.html')
def openhtml():
index_file = path('doc/build/html/index.html')
sh('open ' + index_file)
|
from schevo.release import setup_meta
options(
setup=setup_meta,
sphinx=Bunch(
docroot='doc',
builddir='build',
sourcedir='source',
),
)
try:
import paver.doctools
except ImportError:
pass
else:
@task
@needs('paver.doctools.html')
def openhtml():
index_file = path('doc/build/html/index.html')
sh('open ' + index_file)
|
Make paver.doctools optional, to allow for downloading of ==dev eggs
|
Make paver.doctools optional, to allow for downloading of ==dev eggs
Signed-off-by: Matthew R. Scott <878b2bb7d7b44067d87275810e479f4abd7737ae@gmail.com>
|
Python
|
mit
|
Schevo/schevo,Schevo/schevo
|
f408d7e61753ecdeb280e59ecb35485385ec3f6a
|
Tools/compiler/compile.py
|
Tools/compiler/compile.py
|
import sys
import getopt
from compiler import compile, visitor
##import profile
def main():
VERBOSE = 0
DISPLAY = 0
CONTINUE = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqdc')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if k == '-c':
CONTINUE = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
try:
compile(filename, DISPLAY)
## profile.run('compile(%s, %s)' % (`filename`, `DISPLAY`),
## filename + ".prof")
except SyntaxError, err:
print err
print err.lineno
if not CONTINUE:
sys.exit(-1)
if __name__ == "__main__":
main()
|
import sys
import getopt
from compiler import compile, visitor
import profile
def main():
VERBOSE = 0
DISPLAY = 0
PROFILE = 0
CONTINUE = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqdcp')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if k == '-c':
CONTINUE = 1
if k == '-p':
PROFILE = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
try:
if PROFILE:
profile.run('compile(%s, %s)' % (`filename`, `DISPLAY`),
filename + ".prof")
else:
compile(filename, DISPLAY)
except SyntaxError, err:
print err
print err.lineno
if not CONTINUE:
sys.exit(-1)
if __name__ == "__main__":
main()
|
Add -p option to invoke Python profiler
|
Add -p option to invoke Python profiler
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
a67176ba0ba06d1a7cfff5d8e21446bb78a30518
|
subscription/api.py
|
subscription/api.py
|
from tastypie import fields
from tastypie.resources import ModelResource, ALL
from tastypie.authentication import ApiKeyAuthentication
from tastypie.authorization import Authorization
from subscription.models import Subscription, MessageSet
from djcelery.models import PeriodicTask
class PeriodicTaskResource(ModelResource):
class Meta:
queryset = PeriodicTask.objects.all()
resource_name = 'periodic_task'
list_allowed_methods = ['get']
include_resource_uri = True
always_return_data = True
authentication = ApiKeyAuthentication()
class MessageSetResource(ModelResource):
class Meta:
queryset = MessageSet.objects.all()
resource_name = 'message_set'
list_allowed_methods = ['get']
include_resource_uri = True
always_return_data = True
authentication = ApiKeyAuthentication()
class SubscriptionResource(ModelResource):
schedule = fields.ToOneField(PeriodicTaskResource, 'schedule')
message_set = fields.ToOneField(MessageSetResource, 'message_set')
class Meta:
queryset = Subscription.objects.all()
resource_name = 'subscription'
list_allowed_methods = ['post', 'get']
include_resource_uri = True
always_return_data = True
authentication = ApiKeyAuthentication()
authorization = Authorization()
filtering = {
'to_addr': ALL,
'user_account': ALL
}
|
from tastypie import fields
from tastypie.resources import ModelResource, ALL
from tastypie.authentication import ApiKeyAuthentication
from tastypie.authorization import Authorization
from subscription.models import Subscription, MessageSet
from djcelery.models import PeriodicTask
class PeriodicTaskResource(ModelResource):
class Meta:
queryset = PeriodicTask.objects.all()
resource_name = 'periodic_task'
list_allowed_methods = ['get']
include_resource_uri = True
always_return_data = True
authentication = ApiKeyAuthentication()
class MessageSetResource(ModelResource):
class Meta:
queryset = MessageSet.objects.all()
resource_name = 'message_set'
list_allowed_methods = ['get']
include_resource_uri = True
always_return_data = True
authentication = ApiKeyAuthentication()
class SubscriptionResource(ModelResource):
schedule = fields.ToOneField(PeriodicTaskResource, 'schedule')
message_set = fields.ToOneField(MessageSetResource, 'message_set')
class Meta:
queryset = Subscription.objects.all()
resource_name = 'subscription'
list_allowed_methods = ['post', 'get', 'put', 'patch']
include_resource_uri = True
always_return_data = True
authentication = ApiKeyAuthentication()
authorization = Authorization()
filtering = {
'to_addr': ALL,
'user_account': ALL
}
|
Update tastypie methods allowed for subscriptions
|
Update tastypie methods allowed for subscriptions
|
Python
|
bsd-3-clause
|
praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control
|
dfb6d41be3acf5fc4d4d0f3d8a7fb9d3507e9ae7
|
labware/microplates.py
|
labware/microplates.py
|
from .grid import GridContainer, GridItem
from .liquids import LiquidWell
class Microplate(GridContainer):
rows = 12
cols = 8
volume = 100
min_vol = 50
max_vol = 90
height = 14.45
length = 127.76
width = 85.47
diameter = 7.15
depth = 3.25
a1_x = 14.38
a1_y = 11.24
spacing = 9
child_class = LiquidWell
def well(self, position):
return self.get_child(position)
def calibrate(self, **kwargs):
"""
Coordinates should represent the center and near-bottom of well
A1 with the pipette tip in place.
"""
super(Microplate, self).calibrate(**kwargs)
|
from .grid import GridContainer, GridItem
from .liquids import LiquidWell
class Microplate(GridContainer):
rows = 12
cols = 8
volume = 100
min_vol = 50
max_vol = 90
height = 14.45
length = 127.76
width = 85.47
diameter = 7.15
depth = 3.25
a1_x = 14.38
a1_y = 11.24
spacing = 9
child_class = LiquidWell
def well(self, position):
return self.get_child(position)
def calibrate(self, **kwargs):
"""
Coordinates should represent the center and near-bottom of well
A1 with the pipette tip in place.
"""
super(Microplate, self).calibrate(**kwargs)
class Microplate_96(Microplate):
pass
class Microplate_96_Deepwell(Microplate_96):
volume = 400
min_vol = 50
max_vol = 380
height = 14.6
depth = 10.8
|
Revert of af99d4483acb36eda65b; Microplate subsets are special and important.
|
Revert of af99d4483acb36eda65b; Microplate subsets are special and important.
|
Python
|
apache-2.0
|
OpenTrons/opentrons-api,OpenTrons/opentrons-api,Opentrons/labware,OpenTrons/opentrons-api,OpenTrons/opentrons-api,OpenTrons/opentrons_sdk,OpenTrons/opentrons-api
|
abd4859f8bac46fd6d114352ffad4ee9af28aa5f
|
common/lib/xmodule/xmodule/tests/test_mongo_utils.py
|
common/lib/xmodule/xmodule/tests/test_mongo_utils.py
|
"""Tests for methods defined in mongo_utils.py"""
import os
from unittest import TestCase
from uuid import uuid4
from pymongo import ReadPreference
from django.conf import settings
from xmodule.mongo_utils import connect_to_mongodb
class MongoUtilsTests(TestCase):
"""
Tests for methods exposed in mongo_utils
"""
def test_connect_to_mongo_read_preference(self):
"""
Test that read_preference parameter gets converted to a valid pymongo read preference.
"""
host = 'edx.devstack.mongo' if 'BOK_CHOY_HOSTNAME' in os.environ else 'localhost'
db = 'test_read_preference_%s' % uuid4().hex
# Support for read_preference given in constant name form (ie. PRIMARY, SECONDARY_PREFERRED)
connection = connect_to_mongodb(db, host, read_preference='SECONDARY_PREFERRED')
self.assertEqual(connection.client.read_preference, ReadPreference.SECONDARY_PREFERRED)
# Support for read_preference given as mongos name.
connection = connect_to_mongodb(db, host, read_preference='secondaryPreferred')
self.assertEqual(connection.client.read_preference, ReadPreference.SECONDARY_PREFERRED)
|
"""
Tests for methods defined in mongo_utils.py
"""
import ddt
import os
from unittest import TestCase
from uuid import uuid4
from pymongo import ReadPreference
from django.conf import settings
from xmodule.mongo_utils import connect_to_mongodb
@ddt.ddt
class MongoUtilsTests(TestCase):
"""
Tests for methods exposed in mongo_utils
"""
@ddt.data(
('PRIMARY', 'primary', ReadPreference.PRIMARY),
('SECONDARY_PREFERRED', 'secondaryPreferred', ReadPreference.SECONDARY_PREFERRED),
('NEAREST', 'nearest', ReadPreference.NEAREST),
)
@ddt.unpack
def test_connect_to_mongo_read_preference(self, enum_name, mongos_name, expected_read_preference):
"""
Test that read_preference parameter gets converted to a valid pymongo read preference.
"""
host = 'edx.devstack.mongo' if 'BOK_CHOY_HOSTNAME' in os.environ else 'localhost'
db = 'test_read_preference_%s' % uuid4().hex
# Support for read_preference given in constant name form (ie. PRIMARY, SECONDARY_PREFERRED)
connection = connect_to_mongodb(db, host, read_preference=enum_name)
self.assertEqual(connection.client.read_preference, expected_read_preference)
# Support for read_preference given as mongos name.
connection = connect_to_mongodb(db, host, read_preference=mongos_name)
self.assertEqual(connection.client.read_preference, expected_read_preference)
|
Convert test to DDT and test for primary, nearest modes.
|
Convert test to DDT and test for primary, nearest modes.
|
Python
|
agpl-3.0
|
teltek/edx-platform,arbrandes/edx-platform,kmoocdev2/edx-platform,CredoReference/edx-platform,Stanford-Online/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,appsembler/edx-platform,ahmedaljazzar/edx-platform,appsembler/edx-platform,gsehub/edx-platform,gymnasium/edx-platform,jolyonb/edx-platform,a-parhom/edx-platform,stvstnfrd/edx-platform,kmoocdev2/edx-platform,msegado/edx-platform,jolyonb/edx-platform,ahmedaljazzar/edx-platform,msegado/edx-platform,ESOedX/edx-platform,ESOedX/edx-platform,philanthropy-u/edx-platform,appsembler/edx-platform,kmoocdev2/edx-platform,gsehub/edx-platform,a-parhom/edx-platform,gymnasium/edx-platform,hastexo/edx-platform,ESOedX/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,lduarte1991/edx-platform,gymnasium/edx-platform,kmoocdev2/edx-platform,a-parhom/edx-platform,msegado/edx-platform,Edraak/edraak-platform,procangroup/edx-platform,jolyonb/edx-platform,jolyonb/edx-platform,teltek/edx-platform,edx/edx-platform,cpennington/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,gymnasium/edx-platform,CredoReference/edx-platform,hastexo/edx-platform,procangroup/edx-platform,ahmedaljazzar/edx-platform,BehavioralInsightsTeam/edx-platform,lduarte1991/edx-platform,mitocw/edx-platform,procangroup/edx-platform,eduNEXT/edx-platform,cpennington/edx-platform,kmoocdev2/edx-platform,lduarte1991/edx-platform,mitocw/edx-platform,hastexo/edx-platform,angelapper/edx-platform,CredoReference/edx-platform,proversity-org/edx-platform,edx-solutions/edx-platform,edx/edx-platform,msegado/edx-platform,philanthropy-u/edx-platform,eduNEXT/edx-platform,BehavioralInsightsTeam/edx-platform,teltek/edx-platform,gsehub/edx-platform,eduNEXT/edunext-platform,Edraak/edraak-platform,TeachAtTUM/edx-platform,Stanford-Online/edx-platform,philanthropy-u/edx-platform,EDUlib/edx-platform,cpennington/edx-platform,stvstnfrd/edx-platform,appsembler/edx-platform,mitocw/edx-platform,arbrandes/edx-platform,gsehub/edx-platform,proversity-org/edx-platform,eduNEXT/edx-platform,Stanford-Online/edx-platform,TeachAtTUM/edx-platform,EDUlib/edx-platform,edx/edx-platform,ESOedX/edx-platform,procangroup/edx-platform,cpennington/edx-platform,arbrandes/edx-platform,proversity-org/edx-platform,eduNEXT/edunext-platform,proversity-org/edx-platform,Stanford-Online/edx-platform,arbrandes/edx-platform,teltek/edx-platform,ahmedaljazzar/edx-platform,mitocw/edx-platform,BehavioralInsightsTeam/edx-platform,Edraak/edraak-platform,philanthropy-u/edx-platform,BehavioralInsightsTeam/edx-platform,TeachAtTUM/edx-platform,Edraak/edraak-platform,CredoReference/edx-platform,a-parhom/edx-platform,msegado/edx-platform,angelapper/edx-platform,edx-solutions/edx-platform,edx/edx-platform,eduNEXT/edunext-platform,edx-solutions/edx-platform,hastexo/edx-platform,stvstnfrd/edx-platform,stvstnfrd/edx-platform,lduarte1991/edx-platform,TeachAtTUM/edx-platform,edx-solutions/edx-platform
|
956cb919554c8103149fa6442254bdfed0ce32d1
|
lms/djangoapps/experiments/factories.py
|
lms/djangoapps/experiments/factories.py
|
import factory
from experiments.models import ExperimentData, ExperimentKeyValue
from student.tests.factories import UserFactory
class ExperimentDataFactory(factory.DjangoModelFactory):
class Meta(object):
model = ExperimentData
user = factory.SubFactory(UserFactory)
experiment_id = factory.fuzzy.FuzzyInteger(0)
key = factory.Sequence(lambda n: n)
value = factory.Faker('word')
class ExperimentKeyValueFactory(factory.DjangoModelFactory):
class Meta(object):
model = ExperimentKeyValue
experiment_id = factory.fuzzy.FuzzyInteger(0)
key = factory.Sequence(lambda n: n)
value = factory.Faker('word')
|
import factory
import factory.fuzzy
from experiments.models import ExperimentData, ExperimentKeyValue
from student.tests.factories import UserFactory
class ExperimentDataFactory(factory.DjangoModelFactory):
class Meta(object):
model = ExperimentData
user = factory.SubFactory(UserFactory)
experiment_id = factory.fuzzy.FuzzyInteger(0)
key = factory.Sequence(lambda n: n)
value = factory.Faker('word')
class ExperimentKeyValueFactory(factory.DjangoModelFactory):
class Meta(object):
model = ExperimentKeyValue
experiment_id = factory.fuzzy.FuzzyInteger(0)
key = factory.Sequence(lambda n: n)
value = factory.Faker('word')
|
Add an import of a submodule to make pytest less complainy
|
Add an import of a submodule to make pytest less complainy
|
Python
|
agpl-3.0
|
angelapper/edx-platform,TeachAtTUM/edx-platform,a-parhom/edx-platform,CredoReference/edx-platform,gsehub/edx-platform,eduNEXT/edunext-platform,eduNEXT/edx-platform,stvstnfrd/edx-platform,eduNEXT/edx-platform,TeachAtTUM/edx-platform,Stanford-Online/edx-platform,a-parhom/edx-platform,eduNEXT/edx-platform,ahmedaljazzar/edx-platform,msegado/edx-platform,lduarte1991/edx-platform,kmoocdev2/edx-platform,edx-solutions/edx-platform,kmoocdev2/edx-platform,a-parhom/edx-platform,Edraak/edraak-platform,cpennington/edx-platform,eduNEXT/edunext-platform,msegado/edx-platform,gymnasium/edx-platform,hastexo/edx-platform,stvstnfrd/edx-platform,lduarte1991/edx-platform,hastexo/edx-platform,appsembler/edx-platform,gsehub/edx-platform,jolyonb/edx-platform,eduNEXT/edunext-platform,msegado/edx-platform,jolyonb/edx-platform,a-parhom/edx-platform,Edraak/edraak-platform,gymnasium/edx-platform,kmoocdev2/edx-platform,teltek/edx-platform,teltek/edx-platform,TeachAtTUM/edx-platform,lduarte1991/edx-platform,msegado/edx-platform,ESOedX/edx-platform,angelapper/edx-platform,edx-solutions/edx-platform,cpennington/edx-platform,teltek/edx-platform,mitocw/edx-platform,appsembler/edx-platform,arbrandes/edx-platform,jolyonb/edx-platform,edx-solutions/edx-platform,philanthropy-u/edx-platform,arbrandes/edx-platform,stvstnfrd/edx-platform,EDUlib/edx-platform,proversity-org/edx-platform,gsehub/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,gymnasium/edx-platform,mitocw/edx-platform,edx-solutions/edx-platform,angelapper/edx-platform,kmoocdev2/edx-platform,gymnasium/edx-platform,cpennington/edx-platform,teltek/edx-platform,hastexo/edx-platform,CredoReference/edx-platform,procangroup/edx-platform,arbrandes/edx-platform,Stanford-Online/edx-platform,Edraak/edraak-platform,EDUlib/edx-platform,proversity-org/edx-platform,Stanford-Online/edx-platform,edx/edx-platform,BehavioralInsightsTeam/edx-platform,angelapper/edx-platform,procangroup/edx-platform,ahmedaljazzar/edx-platform,ahmedaljazzar/edx-platform,CredoReference/edx-platform,BehavioralInsightsTeam/edx-platform,gsehub/edx-platform,edx/edx-platform,EDUlib/edx-platform,mitocw/edx-platform,appsembler/edx-platform,Edraak/edraak-platform,mitocw/edx-platform,arbrandes/edx-platform,appsembler/edx-platform,proversity-org/edx-platform,eduNEXT/edunext-platform,philanthropy-u/edx-platform,ESOedX/edx-platform,edx/edx-platform,ESOedX/edx-platform,jolyonb/edx-platform,cpennington/edx-platform,kmoocdev2/edx-platform,ahmedaljazzar/edx-platform,procangroup/edx-platform,msegado/edx-platform,TeachAtTUM/edx-platform,philanthropy-u/edx-platform,procangroup/edx-platform,hastexo/edx-platform,Stanford-Online/edx-platform,stvstnfrd/edx-platform,proversity-org/edx-platform,ESOedX/edx-platform,BehavioralInsightsTeam/edx-platform,edx/edx-platform,BehavioralInsightsTeam/edx-platform,CredoReference/edx-platform,lduarte1991/edx-platform,philanthropy-u/edx-platform
|
ea3e9270788b251440b5f6fab1605361e0dc2ade
|
inonemonth/challenges/tests/test_forms.py
|
inonemonth/challenges/tests/test_forms.py
|
import unittest
import django.test
from django.core.exceptions import ValidationError
from core.tests.setups import RobrechtSocialUserFactory
from ..validators import RepoExistanceValidator
###############################################################################
# Forms #
###############################################################################
'''
from ..forms import InvestmentModelForm
class InvestmentModelFormTestCase(TestCase):
"""
Tests for InvestmentModelForm
"""
def test_initial_value_of_investor_type(self):
"""
Verify initial value of investor_type field of InvestmentModelForm.
"""
investor_type_initial = InvestmentModelForm().fields["investor_type"].initial
self.assertEqual(investor_type_initial, "PERSON")
'''
###############################################################################
# Validators #
###############################################################################
class RepoExistanceValidatorTestCase(django.test.TestCase):
def test_name(self):
user_rob = RobrechtSocialUserFactory()
self.assertRaises(ValidationError, RepoExistanceValidator(user_rob), "asiakas/non_existing_branch")
|
import unittest
import django.test
from django.core.exceptions import ValidationError
from core.tests.setups import RobrechtSocialUserFactory
from ..validators import RepoExistanceValidator
###############################################################################
# Forms #
###############################################################################
'''
from ..forms import InvestmentModelForm
class InvestmentModelFormTestCase(TestCase):
"""
Tests for InvestmentModelForm
"""
def test_initial_value_of_investor_type(self):
"""
Verify initial value of investor_type field of InvestmentModelForm.
"""
investor_type_initial = InvestmentModelForm().fields["investor_type"].initial
self.assertEqual(investor_type_initial, "PERSON")
'''
###############################################################################
# Validators #
###############################################################################
# Test takes longer than average test because of requests call
#@unittest.skip("")
class RepoExistanceValidatorTestCase(django.test.TestCase):
def test_repo_existance_validator(self):
user_rob = RobrechtSocialUserFactory()
self.assertRaises(ValidationError, RepoExistanceValidator(user_rob), "asiakas/non_existing_branch")
|
Add Comment to RepoExistanceValidator test and correct test name
|
Add Comment to RepoExistanceValidator test and correct test name
|
Python
|
mit
|
robrechtdr/inonemonth,robrechtdr/inonemonth,robrechtdr/inonemonth,robrechtdr/inonemonth
|
0bdcb1c36432cfa0506c6dd667e4e1910edcd371
|
ixprofile_client/management/commands/createsuperuser.py
|
ixprofile_client/management/commands/createsuperuser.py
|
"""
A management command to create a user with a given email.
"""
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from ixprofile_client.webservice import UserWebService
from optparse import make_option
class Command(BaseCommand):
"""
The command to create a superuser with a given email.
"""
option_list = BaseCommand.option_list + (
make_option('--email', default=None,
help='Specifies the email for the superuser.'),
make_option('--noinput',
action='store_false',
dest='interactive',
default=True,
help='Tells Django to NOT prompt the user for input of ' +
'any kind. You must use --email with --noinput.'),
)
def handle(self, *args, **options):
interactive = options.get('interactive')
email = options.get('email')
verbosity = int(options.get('verbosity', 1))
if interactive and not email:
email = raw_input("Email: ")
if not email:
raise CommandError("No email given.")
user = User()
user.email = email
user.set_password(None)
user.is_active = True
user.is_staff = True
user.is_superuser = True
user_ws = UserWebService()
user_ws.connect(user)
if verbosity >= 1:
self.stdout.write("Superuser created successfully.")
|
"""
A management command to create a user with a given email.
"""
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from ixprofile_client.webservice import UserWebService
from optparse import make_option
class Command(BaseCommand):
"""
The command to create a superuser with a given email.
"""
option_list = BaseCommand.option_list + (
make_option('--email', default=None,
help='Specifies the email for the superuser.'),
make_option('--noinput',
action='store_false',
dest='interactive',
default=True,
help='Tells Django to NOT prompt the user for input of ' +
'any kind. You must use --email with --noinput.'),
)
def handle(self, *args, **options):
interactive = options.get('interactive')
email = options.get('email')
verbosity = int(options.get('verbosity', 1))
if interactive and not email:
email = raw_input("Email: ")
if not email:
raise CommandError("No email given.")
with transaction.atomic():
user, created = User.objects.get_or_create(email=email)
user.set_password(None)
user.is_active = True
user.is_staff = True
user.is_superuser = True
user_ws = UserWebService()
user_ws.connect(user)
if verbosity >= 1:
if created:
self.stdout.write("Superuser created successfully.")
else:
self.stdout.write("Superuser flag added successfully.")
|
Handle the case where the user may already exist in the database
|
Handle the case where the user may already exist in the database
|
Python
|
mit
|
infoxchange/ixprofile-client,infoxchange/ixprofile-client
|
1cd3096322b5d4b4c4df0f1fba6891e29c911c53
|
spaces/utils.py
|
spaces/utils.py
|
import re
import os
def normalize_path(path):
"""
Normalizes a path:
* Removes extra and trailing slashes
* Converts special characters to underscore
"""
path = re.sub(r'/+', '/', path) # repeated slash
path = re.sub(r'/*$', '', path) # trailing slash
path = [to_slug(p) for p in path.split(os.sep)]
return os.sep.join(path) # preserves leading slash
def to_slug(value):
"""
Convert a string to a URL slug
"""
# Space to dashes
value = re.sub(r'[\s_]+', '-', value)
# Special characters
value = re.sub(r'[^a-z0-9\-]+', '', value, flags=re.I)
# Extra dashes
value = re.sub(r'\-{2,}', '-', value)
value = re.sub(r'(^\-)|(\-$)', '', value)
return value
|
import re
import os
def normalize_path(path):
"""
Normalizes a path:
* Removes extra and trailing slashes
* Converts special characters to underscore
"""
if path is None:
return ""
path = re.sub(r'/+', '/', path) # repeated slash
path = re.sub(r'/*$', '', path) # trailing slash
path = [to_slug(p) for p in path.split(os.sep)]
return os.sep.join(path) # preserves leading slash
def to_slug(value):
""" Convert a string to a URL slug. """
value = value.lower()
# Space to dashes
value = re.sub(r'[\s_]+', '-', value)
# Special characters
value = re.sub(r'[^a-z0-9\-]+', '', value, flags=re.I)
# Extra dashes
value = re.sub(r'\-{2,}', '-', value)
value = re.sub(r'(^\-)|(\-$)', '', value)
return value
|
Convert path to lowercase when normalizing
|
Convert path to lowercase when normalizing
|
Python
|
mit
|
jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces
|
33c03c8d50524dca3b9c5990958a0b44e9fe399e
|
isserviceup/services/models/statuspage.py
|
isserviceup/services/models/statuspage.py
|
import requests
from bs4 import BeautifulSoup
from isserviceup.services.models.service import Service, Status
class StatusPagePlugin(Service):
def get_status(self):
r = requests.get(self.status_url)
if r.status_code != 200:
return Status.unavailable
b = BeautifulSoup(r.content, 'html.parser')
status = next(x for x in b.find(class_='page-status').attrs['class'] if x.startswith('status-'))
if status == 'status-none':
return Status.ok
elif status == 'status-critical':
return Status.critical
elif status == 'status-major':
return Status.major
elif status == 'status-minor':
return Status.minor
elif status == 'status-maintenance':
return Status.maintenance
else:
raise Exception('unexpected status')
|
import requests
from bs4 import BeautifulSoup
from isserviceup.services.models.service import Service, Status
class StatusPagePlugin(Service):
def get_status(self):
r = requests.get(self.status_url)
if r.status_code != 200:
return Status.unavailable
b = BeautifulSoup(r.content, 'html.parser')
page_status = b.find(class_='page-status')
if page_status is None:
if b.find(class_='unresolved-incidents'):
return Status.major
status = next(x for x in page_status.attrs['class'] if x.startswith('status-'))
if status == 'status-none':
return Status.ok
elif status == 'status-critical':
return Status.critical
elif status == 'status-major':
return Status.major
elif status == 'status-minor':
return Status.minor
elif status == 'status-maintenance':
return Status.maintenance
else:
raise Exception('unexpected status')
|
Use unresolved-incidents when page-status is empty
|
Use unresolved-incidents when page-status is empty
|
Python
|
apache-2.0
|
marcopaz/is-service-up,marcopaz/is-service-up,marcopaz/is-service-up
|
e49ac8daeabf82708f2ba7bb623d7db73e1fcaff
|
readthedocs/core/subdomain_urls.py
|
readthedocs/core/subdomain_urls.py
|
from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^projects/(?P<project_slug>[\w.-]+)',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^$', 'core.views.subdomain_handler'),
)
urlpatterns += main_patterns
|
from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^projects/(?P<project_slug>[\w.-]+)',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'core.views.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'core.views.subdomain_handler'),
)
urlpatterns += main_patterns
|
Add verison_slug redirection back in for now.
|
Add verison_slug redirection back in for now.
|
Python
|
mit
|
agjohnson/readthedocs.org,kdkeyser/readthedocs.org,istresearch/readthedocs.org,davidfischer/readthedocs.org,KamranMackey/readthedocs.org,espdev/readthedocs.org,ojii/readthedocs.org,singingwolfboy/readthedocs.org,michaelmcandrew/readthedocs.org,SteveViss/readthedocs.org,hach-que/readthedocs.org,raven47git/readthedocs.org,dirn/readthedocs.org,dirn/readthedocs.org,sunnyzwh/readthedocs.org,GovReady/readthedocs.org,rtfd/readthedocs.org,titiushko/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,tddv/readthedocs.org,gjtorikian/readthedocs.org,espdev/readthedocs.org,mhils/readthedocs.org,kenshinthebattosai/readthedocs.org,CedarLogic/readthedocs.org,SteveViss/readthedocs.org,singingwolfboy/readthedocs.org,mhils/readthedocs.org,espdev/readthedocs.org,soulshake/readthedocs.org,nikolas/readthedocs.org,wijerasa/readthedocs.org,michaelmcandrew/readthedocs.org,istresearch/readthedocs.org,attakei/readthedocs-oauth,mrshoki/readthedocs.org,d0ugal/readthedocs.org,VishvajitP/readthedocs.org,Carreau/readthedocs.org,SteveViss/readthedocs.org,davidfischer/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,kenshinthebattosai/readthedocs.org,ojii/readthedocs.org,soulshake/readthedocs.org,kenshinthebattosai/readthedocs.org,laplaceliu/readthedocs.org,attakei/readthedocs-oauth,espdev/readthedocs.org,michaelmcandrew/readthedocs.org,tddv/readthedocs.org,LukasBoersma/readthedocs.org,sunnyzwh/readthedocs.org,Tazer/readthedocs.org,fujita-shintaro/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,VishvajitP/readthedocs.org,singingwolfboy/readthedocs.org,mhils/readthedocs.org,royalwang/readthedocs.org,fujita-shintaro/readthedocs.org,techtonik/readthedocs.org,laplaceliu/readthedocs.org,emawind84/readthedocs.org,SteveViss/readthedocs.org,GovReady/readthedocs.org,kenshinthebattosai/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,attakei/readthedocs-oauth,cgourlay/readthedocs.org,laplaceliu/readthedocs.org,atsuyim/readthedocs.org,espdev/readthedocs.org,sid-kap/readthedocs.org,wanghaven/readthedocs.org,raven47git/readthedocs.org,stevepiercy/readthedocs.org,gjtorikian/readthedocs.org,asampat3090/readthedocs.org,sils1297/readthedocs.org,jerel/readthedocs.org,GovReady/readthedocs.org,atsuyim/readthedocs.org,stevepiercy/readthedocs.org,kdkeyser/readthedocs.org,techtonik/readthedocs.org,istresearch/readthedocs.org,tddv/readthedocs.org,takluyver/readthedocs.org,raven47git/readthedocs.org,singingwolfboy/readthedocs.org,VishvajitP/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,KamranMackey/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,attakei/readthedocs-oauth,takluyver/readthedocs.org,takluyver/readthedocs.org,dirn/readthedocs.org,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,ojii/readthedocs.org,kdkeyser/readthedocs.org,rtfd/readthedocs.org,fujita-shintaro/readthedocs.org,wanghaven/readthedocs.org,atsuyim/readthedocs.org,agjohnson/readthedocs.org,emawind84/readthedocs.org,cgourlay/readthedocs.org,CedarLogic/readthedocs.org,mrshoki/readthedocs.org,VishvajitP/readthedocs.org,ojii/readthedocs.org,sils1297/readthedocs.org,wijerasa/readthedocs.org,sils1297/readthedocs.org,nyergler/pythonslides,titiushko/readthedocs.org,CedarLogic/readthedocs.org,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,nyergler/pythonslides,hach-que/readthedocs.org,jerel/readthedocs.org,pombredanne/readthedocs.org,nikolas/readthedocs.org,asampat3090/readthedocs.org,soulshake/readthedocs.org,d0ugal/readthedocs.org,LukasBoersma/readthedocs.org,sid-kap/readthedocs.org,sils1297/readthedocs.org,Carreau/readthedocs.org,nikolas/readthedocs.org,sunnyzwh/readthedocs.org,asampat3090/readthedocs.org,techtonik/readthedocs.org,royalwang/readthedocs.org,hach-que/readthedocs.org,soulshake/readthedocs.org,takluyver/readthedocs.org,nyergler/pythonslides,dirn/readthedocs.org,Tazer/readthedocs.org,davidfischer/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,gjtorikian/readthedocs.org,LukasBoersma/readthedocs.org,kenwang76/readthedocs.org,GovReady/readthedocs.org,stevepiercy/readthedocs.org,mrshoki/readthedocs.org,mrshoki/readthedocs.org,atsuyim/readthedocs.org,fujita-shintaro/readthedocs.org,clarkperkins/readthedocs.org,pombredanne/readthedocs.org,gjtorikian/readthedocs.org,mhils/readthedocs.org,kenwang76/readthedocs.org,michaelmcandrew/readthedocs.org,KamranMackey/readthedocs.org,pombredanne/readthedocs.org,royalwang/readthedocs.org,davidfischer/readthedocs.org,wijerasa/readthedocs.org,nyergler/pythonslides,wanghaven/readthedocs.org,jerel/readthedocs.org,nikolas/readthedocs.org,KamranMackey/readthedocs.org,Carreau/readthedocs.org,sid-kap/readthedocs.org,sid-kap/readthedocs.org,emawind84/readthedocs.org,LukasBoersma/readthedocs.org,asampat3090/readthedocs.org,stevepiercy/readthedocs.org,agjohnson/readthedocs.org,Tazer/readthedocs.org,d0ugal/readthedocs.org,Carreau/readthedocs.org,wanghaven/readthedocs.org,sunnyzwh/readthedocs.org,d0ugal/readthedocs.org,clarkperkins/readthedocs.org,clarkperkins/readthedocs.org,hach-que/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,Tazer/readthedocs.org,laplaceliu/readthedocs.org,titiushko/readthedocs.org,kdkeyser/readthedocs.org,wijerasa/readthedocs.org
|
0484d3f14f29aa489bc848f1d83a9fb20183532e
|
plaidml/keras/tile_sandbox.py
|
plaidml/keras/tile_sandbox.py
|
from collections import OrderedDict
import numpy as np
import plaidml
import plaidml.keras
plaidml.keras.install_backend()
import keras.backend as K
def main(code, tensor_A, tensor_B, output_shape):
print(K.backend())
op = K._Op('sandbox_op', A.dtype, output_shape, code,
OrderedDict([('A', tensor_A), ('B', tensor_B)]), ['O'])
print(op.eval())
if __name__ == '__main__':
plaidml._internal_set_vlog(3)
A = K.variable(np.array([[1., 2., 3.], [4., 5., 6.]]))
B = K.variable(np.array([-7., -1., 2.]))
# code = """function (A[N, M], B[M]) -> (O) {
# O[i, j: N, M] = =(A[i, j] + B[j]), i/2 + j/2 + 1/2 < 2;
# }"""
# out_shape = (2, 3)
code = """function (A[N, M], B[M]) -> (O) {
O[i: N] = +(A[i - j, 0] + B[0]), j < N;
}"""
out_shape = (3,)
main(code, A, B, out_shape)
|
from collections import OrderedDict
import numpy as np
import plaidml
import plaidml.tile as tile
import plaidml.keras
plaidml.keras.install_backend()
import keras.backend as K
class SandboxOp(tile.Operation):
def __init__(self, code, a, b, output_shape):
super(SandboxOp, self).__init__(code, [('A', a), ('B', b)], [('O', output_shape)])
def main(code, tensor_A, tensor_B, output_shape):
print(K.backend())
op = SandboxOp(code, tensor_A, tensor_B, tile.Shape(plaidml.DType.FLOAT32, output_shape))
print(op.sole_output().shape)
print(op.sole_output().eval())
if __name__ == '__main__':
plaidml._internal_set_vlog(1)
A = K.variable(np.arange(12).reshape(4, 3))
B = K.variable(np.arange(3).reshape(3))
code = """function (A[N, M], B[M]) -> (O) {
O[i, j: N, M] = =(A[i, j] + B[j]), i/2 + j/2 + 1/2 < 2;
}"""
out_shape = (2, 3)
main(code, A, B, out_shape)
|
Update Tile sandbox for op lib
|
Update Tile sandbox for op lib
|
Python
|
apache-2.0
|
plaidml/plaidml,plaidml/plaidml,plaidml/plaidml,plaidml/plaidml
|
583c946061f8af815c32254655f4aed8f0c18dc9
|
watcher/tests/api/test_config.py
|
watcher/tests/api/test_config.py
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import imp
from oslo_config import cfg
from watcher.api import config as api_config
from watcher.tests.api import base
class TestRoot(base.FunctionalTest):
def test_config_enable_webhooks_auth(self):
acl_public_routes = ['/']
cfg.CONF.set_override('enable_webhooks_auth', True, 'api')
imp.reload(api_config)
self.assertEqual(acl_public_routes,
api_config.app['acl_public_routes'])
def test_config_disable_webhooks_auth(self):
acl_public_routes = ['/', '/v1/webhooks/.*']
cfg.CONF.set_override('enable_webhooks_auth', False, 'api')
imp.reload(api_config)
self.assertEqual(acl_public_routes,
api_config.app['acl_public_routes'])
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
from oslo_config import cfg
from watcher.api import config as api_config
from watcher.tests.api import base
class TestRoot(base.FunctionalTest):
def test_config_enable_webhooks_auth(self):
acl_public_routes = ['/']
cfg.CONF.set_override('enable_webhooks_auth', True, 'api')
importlib.reload(api_config)
self.assertEqual(acl_public_routes,
api_config.app['acl_public_routes'])
def test_config_disable_webhooks_auth(self):
acl_public_routes = ['/', '/v1/webhooks/.*']
cfg.CONF.set_override('enable_webhooks_auth', False, 'api')
importlib.reload(api_config)
self.assertEqual(acl_public_routes,
api_config.app['acl_public_routes'])
|
Use importlib to take place of im module
|
Use importlib to take place of im module
The imp module is deprecated[1] since version 3.4, use importlib to
instead
1: https://docs.python.org/3/library/imp.html#imp.reload
Change-Id: Ic126bc8e0936e5d7a2c7a910b54b7348026fedcb
|
Python
|
apache-2.0
|
openstack/watcher,openstack/watcher
|
ed5a151942ff6aeddeaab0fb2e23428821f89fc4
|
rovercode/drivers/grovepi_ultrasonic_ranger_binary.py
|
rovercode/drivers/grovepi_ultrasonic_ranger_binary.py
|
"""
Class for communicating with the GrovePi ultrasonic ranger.
Here we treat it as a binary sensor.
"""
import logging
logging.basicConfig()
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.getLevelName('INFO'))
try:
from GrovePi.Software.Python.grovepi import ultrasonicRead
except ImportError:
LOGGER.warning("GrovePi lib unavailable. Using dummy.")
from drivers.dummy_grovepi_interface import ultrasonicRead
class GrovePiUltrasonicRangerBinary:
"""A module to read from the GrovePi Ultrasonic as a binary sensor."""
def __init__(self, port, binary_threshold):
"""Create a GrovePi Ultrasonic Ranger (Binary) driver module."""
self.port = int(port)
self.binary_threshold = binary_threshold
print(f"Setting up GrovePi Ultrasonic Ranger (Binary) on port {port}")
def is_high(self):
"""HIGH, meaning "not seeing something"."""
# to match the old GPIO sensors, we'll make this sensor active low
# False output means object detected
# True output means no object detected
return ultrasonicRead(self.port) > self.binary_threshold
|
"""
Class for communicating with the GrovePi ultrasonic ranger.
Here we treat it as a binary sensor.
"""
import logging
logging.basicConfig()
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.getLevelName('INFO'))
try:
from grovepi import ultrasonicRead
except ImportError:
LOGGER.warning("GrovePi lib unavailable. Using dummy.")
from drivers.dummy_grovepi_interface import ultrasonicRead
class GrovePiUltrasonicRangerBinary:
"""A module to read from the GrovePi Ultrasonic as a binary sensor."""
def __init__(self, port, binary_threshold):
"""Create a GrovePi Ultrasonic Ranger (Binary) driver module."""
self.port = int(port)
self.binary_threshold = binary_threshold
print(f"Setting up GrovePi Ultrasonic Ranger (Binary) on port {port}")
def is_high(self):
"""HIGH, meaning "not seeing something"."""
# to match the old GPIO sensors, we'll make this sensor active low
# False output means object detected
# True output means no object detected
return ultrasonicRead(self.port) > self.binary_threshold
|
Fix grovepi import in sensor driver
|
Fix grovepi import in sensor driver
|
Python
|
apache-2.0
|
aninternetof/rover-code,aninternetof/rover-code,aninternetof/rover-code
|
95788f09949e83cf39588444b44eda55e13c6071
|
wluopensource/accounts/models.py
|
wluopensource/accounts/models.py
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class UserProfile(models.Model):
user = models.ForeignKey(User, blank=True, unique=True)
url = models.URLField("Website", blank=True, verify_exists=False)
def __unicode__(self):
return self.user.username
def profile_creation_handler(sender, **kwargs):
if kwargs.get('created', False):
UserProfile.objects.get_or_create(user=kwargs['instance'])
post_save.connect(profile_creation_handler, sender=User)
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class UserProfile(models.Model):
user = models.ForeignKey(User, blank=True, unique=True)
url = models.URLField("Website", blank=True)
def __unicode__(self):
return self.user.username
def profile_creation_handler(sender, **kwargs):
if kwargs.get('created', False):
UserProfile.objects.get_or_create(user=kwargs['instance'])
post_save.connect(profile_creation_handler, sender=User)
|
Remove verify false from user URL to match up with comment URL
|
Remove verify false from user URL to match up with comment URL
|
Python
|
bsd-3-clause
|
jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website
|
d3bc714478c3f7a665b39dfb1b8d65e7bc59ccd0
|
utuputki-webui/utuputki/handlers/logout.py
|
utuputki-webui/utuputki/handlers/logout.py
|
# -*- coding: utf-8 -*-
from handlers.handlerbase import HandlerBase
from db import db_session, Session
class LogoutHandler(HandlerBase):
def handle(self, packet_msg):
# Remove session
s = db_session()
s.query(Session).filter_by(key=self.sock.sid).delete()
s.commit()
s.close()
# Dump out log
self.log.info("Logged out.")
self.log.set_sid(None)
# Deauthenticate & clear session ID
self.sock.authenticated = False
self.sock.sid = None
|
# -*- coding: utf-8 -*-
from handlers.handlerbase import HandlerBase
from db import db_session, Session
class LogoutHandler(HandlerBase):
def handle(self, packet_msg):
# Remove session
s = db_session()
s.query(Session).filter_by(key=self.sock.sid).delete()
s.commit()
s.close()
# Dump out log
self.log.info("Logged out.")
self.log.set_sid(None)
# Deauthenticate & clear session ID
self.sock.authenticated = False
self.sock.sid = None
self.sock.uid = None
self.sock.level = 0
|
Clear all session data from websocket obj
|
Clear all session data from websocket obj
|
Python
|
mit
|
katajakasa/utuputki2,katajakasa/utuputki2,katajakasa/utuputki2,katajakasa/utuputki2
|
5d5b59bde655fbeb2d07bd5539c2ff9b29879d1d
|
pythontutorials/books/AutomateTheBoringStuff/Ch14/P2_writeCSV.py
|
pythontutorials/books/AutomateTheBoringStuff/Ch14/P2_writeCSV.py
|
# This program uses the csv module to manipulate .csv files
import csv
# Writer Objects
outputFile = open("output.csv", "w", newline='')
outputWriter = csv.writer(outputFile)
print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham']))
print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham']))
print(outputWriter.writerow([1, 2, 3.141592, 4]))
outputFile.close()
# Delimiter and lineterminator Keyword Arguments
csvFile = open("example.tsv", 'w', newline='')
csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n')
print(csvWriter.writerow(['apples', 'oranges', 'grapes']))
print(csvWriter.writerow(['eggs', 'bacon', 'ham']))
print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam']))
csvFile.close()
|
"""Write CSV
This program uses :py:mod:`csv` to write .csv files.
Note:
Creates 'output.csv' and 'example.tsv' files.
"""
def main():
import csv
# Writer Objects
outputFile = open("output.csv", "w", newline='')
outputWriter = csv.writer(outputFile)
print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham']))
print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham']))
print(outputWriter.writerow([1, 2, 3.141592, 4]))
outputFile.close()
# Delimiter and lineterminator Keyword Arguments
csvFile = open("example.tsv", 'w', newline='')
csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n')
print(csvWriter.writerow(['apples', 'oranges', 'grapes']))
print(csvWriter.writerow(['eggs', 'bacon', 'ham']))
print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam']))
csvFile.close()
if __name__ == '__main__':
main()
|
Update P1_writeCSV.py added docstring and wrapped in main function
|
Update P1_writeCSV.py
added docstring and wrapped in main function
|
Python
|
mit
|
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
|
20d94336b163c1e98458f14ab44651e2df8ed659
|
web/social/management/commands/stream_twitter.py
|
web/social/management/commands/stream_twitter.py
|
import logging
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from social.models import *
from social.utils import *
from tweetstream import FilterStream
class Command(BaseCommand):
help = "Start Twitter streaming"
def handle(self, *args, **options):
self.logger = logging.getLogger(__name__)
self.updater = FeedUpdater(self.logger)
feed_ids = Feed.objects.filter(type='TW').values_list('origin_id', flat=True)
stream = FilterStream(settings.TWITTER_USERNAME, settings.TWITTER_PASSWORD,
follow=feed_ids)
self.logger.info("Waiting for tweets for %d feeds" % len(feed_ids))
for tweet in stream:
self.updater.process_tweet(tweet)
|
import logging
import time
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from social.models import *
from social.utils import *
from tweetstream import FilterStream, ConnectionError
class Command(BaseCommand):
help = "Start Twitter streaming"
def handle(self, *args, **options):
self.logger = logging.getLogger(__name__)
self.updater = FeedUpdater(self.logger)
feed_ids = Feed.objects.filter(type='TW').values_list('origin_id', flat=True)
self.logger.info("Waiting for tweets for %d feeds" % len(feed_ids))
reconnect_timeout = 1
while True:
stream = FilterStream(settings.TWITTER_USERNAME, settings.TWITTER_PASSWORD,
follow=feed_ids)
try:
for tweet in stream:
reconnect_timeout = 1
self.updater.process_tweet(tweet)
except ConnectionError as e:
self.logger.error("%s" % e)
reconnect_timeout = 2 * reconnect_timeout
time.sleep(reconnect_timeout)
|
Add ConnectionError handling and reconnection to Twitter streamer
|
Add ConnectionError handling and reconnection to Twitter streamer
|
Python
|
agpl-3.0
|
kansanmuisti/datavaalit,kansanmuisti/datavaalit
|
6bf762b7aeabcb47571fe4d23fe13ae8e4b3ebc3
|
editorsnotes/main/views.py
|
editorsnotes/main/views.py
|
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.contrib.auth.decorators import login_required
from models import Term, Reference
@login_required
def index(request):
o = {}
o['term_list'] = Term.objects.all()
return render_to_response('index.html', o)
@login_required
def term(request, slug):
o = {}
o['contact'] = { 'name': settings.ADMINS[0][0],
'email': settings.ADMINS[0][1] }
o['term'] = Term.objects.get(slug=slug)
o['note_list'] = list(o['term'].note_set.filter(type__exact='N'))
o['query_list'] = list(o['term'].note_set.filter(type__exact='Q'))
o['note_dict'] = [ (n, n.references.all()) for n in o['note_list'] ]
o['query_dict'] = [ (q, q.references.all()) for q in o['query_list'] ]
for note in o['note_list'] + o['query_list']:
if ('last_updated' not in o) or (note.last_updated > o['last_updated']):
o['last_updated'] = note.last_updated
o['last_updater'] = note.last_updater.username
o['last_updated_display'] = note.last_updated_display()
return render_to_response('term.html', o)
|
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.decorators import login_required
from models import Term, Reference
@login_required
def index(request):
o = {}
o['term_list'] = Term.objects.all()
return render_to_response('index.html', o)
@login_required
def term(request, slug):
o = {}
o['term'] = get_object_or_404(Term, slug=slug)
o['contact'] = { 'name': settings.ADMINS[0][0],
'email': settings.ADMINS[0][1] }
o['note_list'] = list(o['term'].note_set.filter(type__exact='N'))
o['query_list'] = list(o['term'].note_set.filter(type__exact='Q'))
o['note_dict'] = [ (n, n.references.all()) for n in o['note_list'] ]
o['query_dict'] = [ (q, q.references.all()) for q in o['query_list'] ]
for note in o['note_list'] + o['query_list']:
if ('last_updated' not in o) or (note.last_updated > o['last_updated']):
o['last_updated'] = note.last_updated
o['last_updater'] = note.last_updater.username
o['last_updated_display'] = note.last_updated_display()
return render_to_response('term.html', o)
|
Throw 404 for non-existent terms.
|
Throw 404 for non-existent terms.
|
Python
|
agpl-3.0
|
editorsnotes/editorsnotes,editorsnotes/editorsnotes
|
1fa22ca68394d4ce55a4e10aa7c23f7bcfa02f79
|
zc_common/remote_resource/mixins.py
|
zc_common/remote_resource/mixins.py
|
"""
Class Mixins.
"""
from django.db import IntegrityError
from django.http import Http404
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
if hasattr(self.request, 'query_params') and 'ids' in self.request.query_params:
query_param_ids = self.request.query_params.get('ids')
ids = [] if not query_param_ids else query_param_ids.split(',')
try:
self.queryset = self.queryset.filter(pk__in=ids)
except (ValueError, IntegrityError):
raise Http404
return self.queryset
|
"""
Class Mixins.
"""
from django.db import IntegrityError
from django.http import Http404
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
if hasattr(self.request, 'query_params') and 'filter[id]' in self.request.query_params:
query_param_ids = self.request.query_params.get('filter[id]')
ids = [] if not query_param_ids else query_param_ids.split(',')
try:
self.queryset = self.queryset.filter(pk__in=ids)
except (ValueError, IntegrityError):
raise Http404
return self.queryset
|
Update query param for mixin
|
Update query param for mixin
|
Python
|
mit
|
ZeroCater/zc_common,ZeroCater/zc_common
|
c5f6a9632b6d996fc988bfc9317915208ff69a42
|
domain/companies.py
|
domain/companies.py
|
# -*- coding: utf-8 -*-
"""
'companies' resource and schema settings.
:copyright: (c) 2014 by Nicola Iarocci and CIR2000.
:license: BSD, see LICENSE for more details.
"""
from common import required_string
_schema = {
# company id ('id')
'n': required_string, # name
'p': {'type': 'string', 'nullable': True}, # password
}
definition = {
'url': 'companies',
'item_title': 'company',
# 'additional_lookup': company_lookup,
'schema': _schema,
}
|
# -*- coding: utf-8 -*-
"""
'companies' resource and schema settings.
:copyright: (c) 2014 by Nicola Iarocci and CIR2000.
:license: BSD, see LICENSE for more details.
"""
from common import required_string
_schema = {
# company id ('id')
'name': required_string,
'password': {'type': 'string', 'nullable': True},
'state_or_province': {'type': 'string', 'nullable': True},
}
definition = {
'url': 'companies',
'item_title': 'company',
# 'additional_lookup': company_lookup,
'schema': _schema,
}
|
Add a snake_cased field to the test document.
|
Add a snake_cased field to the test document.
|
Python
|
bsd-3-clause
|
nicolaiarocci/Eve.NET-testbed
|
9502de0e6be30e4592f4f0cf141abc27db64ccf4
|
dependencies.py
|
dependencies.py
|
import os
import pkgutil
import site
if pkgutil.find_loader("gi"):
try:
import gi
print('Found gi:', os.path.abspath(gi.__file__))
gi.require_version('Gst', '1.0')
# from gi.repository import GLib, Gst
except ValueError:
print('Couldn\'t find Gst')
print('Please run \'sudo apt-get install gir1.2-gstreamer-1.0\'')
return False
print('Environment seems to be ok.')
else:
print('No gi installed', '\n',
'Please run \'sudo apt-get install python3-gi\'',
'\n',
'A virtual environment might need extra actions like symlinking, ',
'\n',
'you might need to do a symlink looking similar to this:',
'\n',
'ln -s /usr/lib/python3/dist-packages/gi ',
'/srv/homeassistant/lib/python3.4/site-packages',
'\n',
'run this script inside and outside of the virtual environment to find the paths needed')
print(site.getsitepackages())
|
import os
import pkgutil
import site
from sys import exit
if pkgutil.find_loader('gi'):
try:
import gi
print("Found gi at:", os.path.abspath(gi.__file__))
gi.require_version('Gst', '1.0')
# from gi.repository import Gst
except ValueError:
print("Couldn\'t find Gst",
'\n',
"Please run \'sudo apt-get install gir1.2-gstreamer-1.0\'")
exit(False)
print("Environment seems to be ok.")
else:
print("No gi available in this environment",
'\n',
"Please run \'sudo apt-get install python3-gi\'",
'\n',
"A virtual environment might need extra actions like symlinking,",
'\n',
"you might need to do a symlink looking similar to this:",
'\n',
"ln -s /usr/lib/python3/dist-packages/gi",
"/srv/homeassistant/lib/python3.4/site-packages",
'\n',
"run this script inside and outside of the virtual environment",
"to find the paths needed")
print(site.getsitepackages())
|
Clean up of text Proper exit when exception has been raised
|
Clean up of text
Proper exit when exception has been raised
|
Python
|
mit
|
Kane610/axis
|
08cbb4ebd44b5dca26d55a0e177c03930a2beb57
|
stopspam/forms/widgets.py
|
stopspam/forms/widgets.py
|
from django import forms
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
# RECAPTCHA widgets
class RecaptchaResponse(forms.Widget):
def render(self, *args, **kwargs):
from recaptcha.client import captcha as recaptcha
recaptcha_options = "<script> var RecaptchaOptions = { theme: '" + self.theme + \
"', lang: '" + get_language()[0:2] + \
("', custom_theme_widget: 'recaptcha_widget'" if self.theme == 'custom' else "'") + " }; </script>\n"
return mark_safe(recaptcha_options + recaptcha.displayhtml(self.public_key))
class RecaptchaChallenge(forms.Widget):
is_hidden = True
def render(self, *args, **kwargs):
return ""
# return mark_safe('')
# Honeypot widget -- most automated spam posters will check any checkbox
# assuming it's an "I accept terms and conditions" box
class HoneypotWidget(forms.CheckboxInput):
is_hidden = True
def render(self, *args, **kwargs):
wrapper_html = '<div style="display:none"><label for="id_accept_terms">' + _('Are you a robot?') + '</label>%s</div>'
return mark_safe(wrapper_html % super(HoneypotWidget, self).render(*args, **kwargs))
|
from django import forms
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
# RECAPTCHA widgets
class RecaptchaResponse(forms.Widget):
is_hidden = True
def render(self, *args, **kwargs):
from recaptcha.client import captcha as recaptcha
recaptcha_options = u"<script> var RecaptchaOptions = { theme: '" + self.theme + \
"', lang: '" + get_language()[0:2] + \
("', custom_theme_widget: 'recaptcha_widget'" if self.theme == 'custom' else "'") + " }; </script>\n"
return mark_safe(recaptcha_options + recaptcha.displayhtml(self.public_key))
class RecaptchaChallenge(forms.Widget):
is_hidden = True
def render(self, *args, **kwargs):
return ""
# return mark_safe('')
# Honeypot widget -- most automated spam posters will check any checkbox
# assuming it's an "I accept terms and conditions" box
class HoneypotWidget(forms.CheckboxInput):
is_hidden = True
def render(self, *args, **kwargs):
wrapper_html = '<div style="display:none"><label for="id_accept_terms">' + _('Are you a robot?') + '</label>%s</div>'
return mark_safe(wrapper_html % super(HoneypotWidget, self).render(*args, **kwargs))
|
Fix skipping of recaptcha field widget HTML by marking it is_hidden
|
Fix skipping of recaptcha field widget HTML by marking it is_hidden
|
Python
|
bsd-3-clause
|
pombredanne/glamkit-stopspam
|
3d385898592b07249b478b37854d179d27a27bbb
|
OmniMarkupLib/Renderers/MarkdownRenderer.py
|
OmniMarkupLib/Renderers/MarkdownRenderer.py
|
from base_renderer import *
import re
import markdown
@renderer
class MarkdownRenderer(MarkupRenderer):
FILENAME_PATTERN_RE = re.compile(r'\.(md|mkdn?|mdwn|mdown|markdown)$')
def load_settings(self, renderer_options, global_setting):
super(MarkdownRenderer, self).load_settings(renderer_options, global_setting)
if 'extensions' in renderer_options:
self.extensions = renderer_options['extensions']
else:
# Fallback to the default GFM style
self.extensions = ['tables', 'strikeout', 'fenced_code', 'codehilite']
if global_setting.mathjax_enabled:
if 'mathjax' not in self.extensions:
self.extensions.append('mathjax')
@classmethod
def is_enabled(cls, filename, syntax):
if syntax == "text.html.markdown":
return True
return cls.FILENAME_PATTERN_RE.search(filename) is not None
def render(self, text, **kwargs):
return markdown.markdown(text, output_format='html5',
extensions=self.extensions
)
|
from base_renderer import *
import re
import markdown
@renderer
class MarkdownRenderer(MarkupRenderer):
FILENAME_PATTERN_RE = re.compile(r'\.(md|mkdn?|mdwn|mdown|markdown|litcoffee)$')
def load_settings(self, renderer_options, global_setting):
super(MarkdownRenderer, self).load_settings(renderer_options, global_setting)
if 'extensions' in renderer_options:
self.extensions = renderer_options['extensions']
else:
# Fallback to the default GFM style
self.extensions = ['tables', 'strikeout', 'fenced_code', 'codehilite']
if global_setting.mathjax_enabled:
if 'mathjax' not in self.extensions:
self.extensions.append('mathjax')
@classmethod
def is_enabled(cls, filename, syntax):
if syntax == "text.html.markdown":
return True
return cls.FILENAME_PATTERN_RE.search(filename) is not None
def render(self, text, **kwargs):
return markdown.markdown(text, output_format='html5',
extensions=self.extensions
)
|
Add litcoffee to Markdown extensions
|
Add litcoffee to Markdown extensions
|
Python
|
mit
|
timonwong/OmniMarkupPreviewer,Lyleo/OmniMarkupPreviewer,timonwong/OmniMarkupPreviewer,timonwong/OmniMarkupPreviewer,Lyleo/OmniMarkupPreviewer,timonwong/OmniMarkupPreviewer,Lyleo/OmniMarkupPreviewer,Lyleo/OmniMarkupPreviewer
|
2216caf836c1f2864103e8930f60713c226a8464
|
src/sql/parse.py
|
src/sql/parse.py
|
from ConfigParser import ConfigParser
from sqlalchemy.engine.url import URL
def parse(cell, config):
parts = [part.strip() for part in cell.split(None, 1)]
if not parts:
return {'connection': '', 'sql': ''}
if parts[0].startswith('[') and parts[0].endswith(']'):
parser = ConfigParser()
parser.read(config.dsn_filename)
section = parts[0].lstrip('[').rstrip(']')
connection = str(URL(drivername=parser.get(section, 'drivername'),
username=parser.get(section, 'username'),
password=parser.get(section, 'password'),
host=parser.get(section, 'host'),
database=parser.get(section, 'database')))
sql = parts[1] if len(parts) > 1 else ''
elif '@' in parts[0] or '://' in parts[0]:
connection = parts[0]
if len(parts) > 1:
sql = parts[1]
else:
sql = ''
else:
connection = ''
sql = cell
return {'connection': connection.strip(),
'sql': sql.strip()
}
|
from ConfigParser import ConfigParser
from sqlalchemy.engine.url import URL
def parse(cell, config):
parts = [part.strip() for part in cell.split(None, 1)]
if not parts:
return {'connection': '', 'sql': ''}
if parts[0].startswith('[') and parts[0].endswith(']'):
section = parts[0].lstrip('[').rstrip(']')
parser = ConfigParser()
parser.read(config.dsn_filename)
cfg_dict = dict(parser.items(section))
connection = str(URL(**cfg_dict))
sql = parts[1] if len(parts) > 1 else ''
elif '@' in parts[0] or '://' in parts[0]:
connection = parts[0]
if len(parts) > 1:
sql = parts[1]
else:
sql = ''
else:
connection = ''
sql = cell
return {'connection': connection.strip(),
'sql': sql.strip()}
|
Allow DNS file to be less specific
|
Allow DNS file to be less specific
|
Python
|
mit
|
catherinedevlin/ipython-sql,catherinedevlin/ipython-sql
|
4522de348aab4cc99904b0bc210c223b2477b4b7
|
tests/config.py
|
tests/config.py
|
# our constants.
import os
local_path = os.path.dirname(__file__)
xml_doc = os.path.abspath(os.path.join(local_path, 'data', 'fagatelebay_zone.xml'))
csv_doc = os.path.abspath(os.path.join(local_path, 'data', 'fagatelebay_zone.csv'))
bathy_raster = os.path.abspath(os.path.join(local_path, 'data', 'bathy5m_clip.tif'))
tbx_file = os.path.abspath(os.path.join(local_path, '..', 'Install', 'toolbox', 'btm_model.tbx'))
|
# our constants.
import os
local_path = os.path.dirname(__file__)
xml_doc = os.path.abspath(os.path.join(local_path, 'data', 'fagatelebay_zone.xml'))
csv_doc = os.path.abspath(os.path.join(local_path, 'data', 'fagatelebay_zone.csv'))
bathy_raster = os.path.abspath(os.path.join(local_path, 'data', 'bathy5m_clip.tif'))
pyt_file = os.path.abspath(os.path.join(local_path, '..', 'Install', 'toolbox', 'btm.pyt'))
|
Use pyt file instead of stand-alone tbx for testing.
|
Use pyt file instead of stand-alone tbx for testing.
|
Python
|
mpl-2.0
|
EsriOceans/btm
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.