commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
32508dea4ef00fb54919c0260b7ba2902835faf5
|
prepareupload.py
|
prepareupload.py
|
import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
|
import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
sys.stdout.flush()
sys.stdout.write("\r{0} parsed. \n".format(COUNT))
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
|
Print messages for prepare upload.
|
Print messages for prepare upload.
|
Python
|
bsd-3-clause
|
OLRC/SwiftBulkUploader,cudevmaxwell/SwiftBulkUploader
|
66b20aa7fbd322a051ab7ae26ecd8c46f7605763
|
ptoolbox/tags.py
|
ptoolbox/tags.py
|
# -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
TAG_ORIENTATION = 'Image Orientation'
# XXX: this is a terrible way to retrieve the orientations. Exifread regretfully does not
# get back raw EXIF orientations, and no other library is available on pip as of today.
ORIENTATIONS = [
'Horizontal (normal)',
'Mirrored horizontal',
'Rotated 180',
'Mirrored vertical',
'Mirrored horizontal then rotated 90 CCW',
'Rotated 90 CCW',
'Mirrored horizontal then rotated 90 CW',
'Rotated 90 CW',
]
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
def parse_orientation(tags):
tag = tags.get(TAG_ORIENTATION, None)
if not tag:
raise KeyError(TAG_ORIENTATION)
return ORIENTATIONS.index(str(tag)) + 1 # XXX: convert back to original EXIF orientation
|
# -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
|
Remove orientation tag parsing, not needed.
|
Remove orientation tag parsing, not needed.
|
Python
|
mit
|
vperron/picasa-toolbox
|
ad3a495e38e22f3759a724a23ce0492cd42e0bc4
|
qual/calendar.py
|
qual/calendar.py
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return DateWithCalendar(JulianCalendar, d)
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
Use from_date to construct from year, month, day.
|
Use from_date to construct from year, month, day.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
4303a5cb38f2252dfe09a0ca21320d4bd67bd966
|
byceps/blueprints/user/current/forms.py
|
byceps/blueprints/user/current/forms.py
|
"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = StringField('Telefonnummer', [Optional(), Length(max=20)])
|
"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.fields.html5 import TelField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = TelField('Telefonnummer', [Optional(), Length(max=20)])
|
Use `<input type="tel">` for phone number field
|
Use `<input type="tel">` for phone number field
|
Python
|
bsd-3-clause
|
m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
|
71c9235a7e48882fc8c1393e9527fea4531c536c
|
filter_plugins/fap.py
|
filter_plugins/fap.py
|
#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
class FilterModule(object):
def filters(self):
return {"site_code": site_code}
|
#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
# rest:https://restic.storage.tjoda.fap.no/rpi1.ldn.fap.no
# rclone:Jotta:storage.tjoda.fap.no
# /Volumes/storage/restic/kramacbook
def restic_repo_friendly_name(repo: str) -> str:
if "https://" in repo:
repo = repo.replace("https://", "")
print(repo)
type_, address, *_ = repo.split(":")
(r, *_) = address.split("/")
return "_".join([type_, r]).replace(".", "_")
elif ":" not in repo:
# Most likely a file path
type_ = "disk"
path = list(filter(None, repo.split("/")))
if path[0] == "Volumes":
return "_".join([type_, path[1]])
return "_".join([type_, repo.replace("/", "_")])
else:
type_, *rest = repo.split(":")
return "_".join([type_, rest[0]])
class FilterModule(object):
def filters(self):
return {
"site_code": site_code,
"restic_repo_friendly_name": restic_repo_friendly_name,
}
|
Add really hacky way to reformat restic repos
|
Add really hacky way to reformat restic repos
|
Python
|
mit
|
kradalby/plays,kradalby/plays
|
417415283d87654b066c11d807516d3cd5b5bf3d
|
tests/test_probabilistic_interleave_speed.py
|
tests/test_probabilistic_interleave_speed.py
|
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(100, 200))
for i in range(1000):
method = il.Probabilistic([r1, r2])
ranking = method.interleave()
print(list(ranking))
|
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(50, 150))
r3 = list(range(100, 200))
r4 = list(range(150, 250))
for i in range(1000):
method = il.Probabilistic([r1, r2, r3, r4])
ranking = method.interleave()
method.evaluate(ranking, [0, 1, 2])
|
Add tests for measuring the speed of probabilistic interleaving
|
Add tests for measuring the speed of probabilistic interleaving
|
Python
|
mit
|
mpkato/interleaving
|
b8839af335757f58fa71916ff3394f5a6806165d
|
user_management/api/tests/test_exceptions.py
|
user_management/api/tests/test_exceptions.py
|
from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as e:
raise InvalidExpiredToken
self.assertEqual(e.exception.status_code, HTTP_400_BAD_REQUEST)
message = e.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
|
from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as error:
raise InvalidExpiredToken
self.assertEqual(error.exception.status_code, HTTP_400_BAD_REQUEST)
message = error.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
|
Use more explicit name for error
|
Use more explicit name for error
|
Python
|
bsd-2-clause
|
incuna/django-user-management,incuna/django-user-management
|
287dc6f7a7f0321fec8e35d1dc08f07a3b12f63b
|
test/342-winter-sports-pistes.py
|
test/342-winter-sports-pistes.py
|
# http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
|
# http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
# Way: 49'er (313466490) http://www.openstreetmap.org/way/313466490
assert_has_feature(
16, 10939, 25061, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'intermediate',
'id': 313466490 })
|
Add piste test to catch dev issue
|
Add piste test to catch dev issue
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
40653d829efcc0461d0da9472111aa89b41e08f1
|
hasjob/views/login.py
|
hasjob/views/login.py
|
# -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
|
# -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@app.route('/login/notify')
@lastuser.notification_handler
def lastusernotify(user):
# Save the user object
db.session.commit()
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
|
Support for Lastuser push notifications.
|
Support for Lastuser push notifications.
|
Python
|
agpl-3.0
|
qitianchan/hasjob,qitianchan/hasjob,hasgeek/hasjob,hasgeek/hasjob,nhannv/hasjob,hasgeek/hasjob,sindhus/hasjob,sindhus/hasjob,nhannv/hasjob,sindhus/hasjob,qitianchan/hasjob,sindhus/hasjob,qitianchan/hasjob,ashwin01/hasjob,ashwin01/hasjob,hasgeek/hasjob,ashwin01/hasjob,ashwin01/hasjob,nhannv/hasjob,nhannv/hasjob,qitianchan/hasjob,ashwin01/hasjob,nhannv/hasjob,sindhus/hasjob
|
dce404d65f1f2b8f297cfa066210b885621d38d0
|
graphene/commands/exit_command.py
|
graphene/commands/exit_command.py
|
from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
|
from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
def execute(self, storage_manager, timer=None):
# This should never be used anyway.
pass
|
Fix EXIT command to have execute method for abstract class
|
Fix EXIT command to have execute method for abstract class
|
Python
|
apache-2.0
|
PHB-CS123/graphene,PHB-CS123/graphene,PHB-CS123/graphene
|
c7578896036bc07bb1edc2d79f699968c25ca89e
|
bika/lims/upgrade/to1117.py
|
bika/lims/upgrade/to1117.py
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets')
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets',
run_dependencies=False)
|
Upgrade 1117 - add run_dependencies=False
|
Upgrade 1117 - add run_dependencies=False
Somehow re-importing the 'portlets' step, causes
a beforeDelete handler to fail a HoldingReference
check.
|
Python
|
agpl-3.0
|
labsanmartin/Bika-LIMS,veroc/Bika-LIMS,anneline/Bika-LIMS,veroc/Bika-LIMS,anneline/Bika-LIMS,rockfruit/bika.lims,rockfruit/bika.lims,labsanmartin/Bika-LIMS,anneline/Bika-LIMS,veroc/Bika-LIMS,DeBortoliWines/Bika-LIMS,labsanmartin/Bika-LIMS,DeBortoliWines/Bika-LIMS,DeBortoliWines/Bika-LIMS
|
7cde5e713ace2b0a1d9cdef01ac912f3a53814cd
|
run_scripts/build_phylogenies.py
|
run_scripts/build_phylogenies.py
|
#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run]
for p in processes:
p.start()
for p in processes:
p.join()
def build_phylogeny(seq, settings):
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import joblib
import pickle
#multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\
(seq, settings_file) for seq in seqs_needing_run)
def pool_process(seq, settings_file):
"""
A hacky and unecessary way to provide a pickle serealisable
object for multiprocessing to pass off to workers
- inefficiency in reinstantiating a settings class every time
"""
settings = dg.settings.Settings(settings_file)
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
Change run script to use worker pool
|
Change run script to use worker pool
|
Python
|
bsd-3-clause
|
fmaguire/dendrogenous
|
a11c839988b71e9f769cb5ba856474205b7aeefb
|
jsonschema/tests/fuzz_validate.py
|
jsonschema/tests/fuzz_validate.py
|
"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.instrument_all()
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
Fix fuzzer to include instrumentation
|
Fix fuzzer to include instrumentation
|
Python
|
mit
|
python-jsonschema/jsonschema
|
224d9f4e243f6645e88b32ad7342a55128f19eeb
|
html5lib/__init__.py
|
html5lib/__init__.py
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage::
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
|
Fix formatting of docstring example
|
Fix formatting of docstring example
It runs together in the built HTML.
|
Python
|
mit
|
html5lib/html5lib-python,html5lib/html5lib-python,html5lib/html5lib-python
|
f748facb9edd35ca6c61be336cad3109cafbbc89
|
tests/test_authentication.py
|
tests/test_authentication.py
|
import unittest
from flask import json
from api import create_app, db
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app(config_name='TestingEnv')
self.client = self.app.test_client()
# Binds the app to current context
with self.app.app_context():
# Create all tables
db.create_all()
def tearDown(self):
# Drop all tables
with self.app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
def test_something(self):
self.assertTrue(1)
if __name__ == '__main__':
unittest.main()
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
Add test for index route
|
Add test for index route
|
Python
|
mit
|
patlub/BucketListAPI,patlub/BucketListAPI
|
644896c856b1e6ad20a3790234439b8ac8403917
|
examples/dft/12-camb3lyp.py
|
examples/dft/12-camb3lyp.py
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
The default XC functional library (libxc) supports the energy and nuclear
gradients for range separated functionals. Nuclear Hessian and TDDFT gradients
need xcfun library. See also example 32-xcfun_as_default.py for how to set
xcfun library as the default XC functional library.
'''
from pyscf import gto, dft
mol = gto.M(atom="H; F 1 1.", basis='631g')
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
from pyscf.hessian import uks as uks_hess
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
from pyscf import tdscf
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''Density functional calculations can be run with either the default
backend library, libxc, or an alternative library, xcfun. See also
example 32-xcfun_as_default.py for how to set xcfun as the default XC
functional library.
'''
from pyscf import gto, dft
from pyscf.hessian import uks as uks_hess
from pyscf import tdscf
mol = gto.M(atom="H; F 1 1.", basis='631g')
# Calculation using libxc
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
# We can also evaluate the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# or TDDFT gradients
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
# Switch to the xcfun library on the fly
mf._numint.libxc = dft.xcfun
# Repeat the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# and the TDDFT gradient calculation
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
|
Update the camb3lyp example to libxc 5 series
|
Update the camb3lyp example to libxc 5 series
|
Python
|
apache-2.0
|
sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf
|
bf007267246bd317dc3ccad9f5cf8a9f452b3e0b
|
firecares/utils/__init__.py
|
firecares/utils/__init__.py
|
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
def convert_png_to_jpg(img):
"""
Converts a png to a jpg.
:param img: Absolute path to the image.
:returns: the filename
"""
im = Image.open(img)
bg = Image.new("RGB", im.size, (255, 255, 255))
bg.paste(im, im)
filename = img.replace('png', 'jpg')
bg.save(filename, quality=85)
return filename
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
|
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
|
Remove the unused convert_png_to_jpg method.
|
Remove the unused convert_png_to_jpg method.
|
Python
|
mit
|
FireCARES/firecares,FireCARES/firecares,meilinger/firecares,meilinger/firecares,FireCARES/firecares,meilinger/firecares,HunterConnelly/firecares,HunterConnelly/firecares,FireCARES/firecares,HunterConnelly/firecares,FireCARES/firecares,meilinger/firecares,HunterConnelly/firecares
|
49f5802a02a550cc8cee3be417426a83c31de5c9
|
Source/Git/Experiments/git_log.py
|
Source/Git/Experiments/git_log.py
|
#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
|
#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
if index > 1:
break
|
Exit the loop early when experimenting.
|
Exit the loop early when experimenting.
|
Python
|
apache-2.0
|
barry-scott/scm-workbench,barry-scott/scm-workbench,barry-scott/scm-workbench
|
4e6ec9cc5b052341094723433f58a21020fa82f0
|
tools/scheduler/scheduler/core.py
|
tools/scheduler/scheduler/core.py
|
# scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles):
self.roles = roles
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
|
# scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles, binary_url):
self.roles = roles
self.binary_url = binary_url
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
|
Add binary_url member to Job.
|
Add binary_url member to Job.
|
Python
|
apache-2.0
|
DaMSL/K3,DaMSL/K3,yliu120/K3
|
40d204c996e41a030dac240c99c66a25f8f8586e
|
scripts/generate-bcrypt-hashed-password.py
|
scripts/generate-bcrypt-hashed-password.py
|
#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(bytes(password), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
Fix "string argument without an encoding" python3 error in bcrypt script
|
Fix "string argument without an encoding" python3 error in bcrypt script
generate-bcrypt-hashed-password raises an error on python3 since
`bytes` is called without an encoding argument. Replacing it with
`.encode` should fix the problem.
|
Python
|
mit
|
alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws
|
4037036de79f6503921bbd426bb5352f2f86f12b
|
plyer/platforms/android/camera.py
|
plyer/platforms/android/camera.py
|
from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
activity.unbind(on_activity_result=self._on_activity_result)
activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
|
import android
import android.activity
from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
android.activity.unbind(on_activity_result=self._on_activity_result)
android.activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
android.activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
|
Revert "Activity was imported twice"
|
Revert "Activity was imported twice"
This reverts commit a0600929774c1e90c7dc43043ff87b5ea84213b4.
|
Python
|
mit
|
johnbolia/plyer,johnbolia/plyer,kived/plyer,kivy/plyer,cleett/plyer,kived/plyer,kivy/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer,cleett/plyer,kostyll/plyer,KeyWeeUsr/plyer,kostyll/plyer
|
7a5b86bcb8c0a2e8c699c7602cef50bed2acef1b
|
src/keybar/tests/factories/user.py
|
src/keybar/tests/factories/user.py
|
import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='md5')
return super(UserFactory, cls)._prepare(create, **kwargs)
|
import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='pbkdf2_sha256')
return super(UserFactory, cls)._prepare(create, **kwargs)
|
Use pdbkdf_sha256 hasher for testing too.
|
Use pdbkdf_sha256 hasher for testing too.
|
Python
|
bsd-3-clause
|
keybar/keybar
|
5c29b4322d1a24c4f389076f2a9b8acbeabd89e2
|
python/lumidatumclient/classes.py
|
python/lumidatumclient/classes.py
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = model_id
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = model_if if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = str(model_if) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
Fix for os.path.join with model_id, was breaking on non-string model_id values.
|
Fix for os.path.join with model_id, was breaking on non-string model_id values.
|
Python
|
mit
|
Lumidatum/lumidatumclients,Lumidatum/lumidatumclients,daws/lumidatumclients,Lumidatum/lumidatumclients
|
7b2dac39cdcbc8d5f05d4979df06bf1ab1ae065f
|
goetia/pythonizors/pythonize_parsing.py
|
goetia/pythonizors/pythonize_parsing.py
|
from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
pair = self.next()
left = pair.left if pair.has_left else None
right = pair.right if pair.has_right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__
|
from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
left, right = self.next()
left, right = left.value() if left else None, right.value() if right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__
|
Fix std::optional access in SplitPairedReader pythonization
|
Fix std::optional access in SplitPairedReader pythonization
|
Python
|
mit
|
camillescott/boink,camillescott/boink,camillescott/boink,camillescott/boink
|
ccb7446b02b394af308f4fba0500d402240f117e
|
home/migrations/0002_create_homepage.py
|
home/migrations/0002_create_homepage.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page=homepage, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page_id=homepage.id, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
Remove any existing localhost sites and use the page id rather than the object to set the default homepage.
|
Remove any existing localhost sites and use the page id rather than the object to set the default homepage.
|
Python
|
mit
|
OpenCanada/lindinitiative,OpenCanada/lindinitiative
|
1179163881fe1dedab81a02a940c711479a334ab
|
Instanssi/admin_auth/forms.py
|
Instanssi/admin_auth/forms.py
|
# -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana")
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
# -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana", widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
Use passwordinput in password field.
|
admin_auth: Use passwordinput in password field.
|
Python
|
mit
|
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
|
ab7856950c058d00aac99874669839e09bc116c6
|
models.py
|
models.py
|
from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
|
from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
class Meta:
ordering = ["-timestamp"]
|
Order feedback items by their timestamp.
|
Order feedback items by their timestamp.
|
Python
|
bsd-3-clause
|
littleweaver/django-talkback,littleweaver/django-talkback,littleweaver/django-talkback
|
cdaffa187b41f3a84cb5a6b44f2e781a9b249f2b
|
tests/test_users.py
|
tests/test_users.py
|
from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.convert_dict_to_user_instance({})
assert result
|
from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.return_user_instance_or_anonymous({})
assert result
|
Update test to reflect new method name.
|
Update test to reflect new method name.
|
Python
|
mit
|
nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT
|
a3cce9e4840cc687f6dcdd0b88577d2f13f3258e
|
onlineweb4/settings/raven.py
|
onlineweb4/settings/raven.py
|
import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
}
|
import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
'tags': { 'app': config('OW4_RAVEN_APP_NAME', default='') },
}
|
Make it possible to specify which app to represent in sentry
|
Make it possible to specify which app to represent in sentry
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
e2cc9c822abb675a196468ee89b063e0162c16d5
|
changes/api/author_build_index.py
|
changes/api/author_build_index.py
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
if author_id == 'me':
return '', 401
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
Move 'me' check outside of author lookup
|
Move 'me' check outside of author lookup
|
Python
|
apache-2.0
|
wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,dropbox/changes
|
b8a7dd2dfc9322498dc7500f840bedd20d807ae1
|
samples/numpy_blir.py
|
samples/numpy_blir.py
|
import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
x[i-1,j-1] = 10;
}
}
"""
N = 14
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
|
import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
}
}
"""
N = 15
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
|
Fix dumb out of bounds error.
|
Fix dumb out of bounds error.
|
Python
|
bsd-2-clause
|
seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core
|
379c6254da0d6a06f8c01cd7cd2632a1d59624ac
|
comics/sets/context_processors.py
|
comics/sets/context_processors.py
|
from comics.sets.models import UserSet
def user_set(request):
try:
user_set = UserSet.objects.get(user=request.user)
return {
'user_set': user_set,
'user_set_comics': user_set.comics.all(),
}
except UserSet.DoesNotExist:
return {}
|
def user_set(request):
if hasattr(request, 'user_set'):
return {
'user_set': request.user_set,
'user_set_comics': request.user_set.comics.all(),
}
else:
return {}
|
Use request.user_set in context preprocessor
|
Use request.user_set in context preprocessor
|
Python
|
agpl-3.0
|
datagutten/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics
|
3aa2f858f93ed3945bf1960d5c5d1d90df34422c
|
MoodJournal/entries/serializers.py
|
MoodJournal/entries/serializers.py
|
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator, UniqueForDateValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
validators = [
UniqueForDateValidator(
queryset=EntryInstance.objects.all(),
field='category',
date_field='date',
message='You already have an entry for this category on this date!'
)
]
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
Revert "unique for date validator"
|
Revert "unique for date validator"
This reverts commit 7d2eee38eebf62787b77cdd41e7677cfdad6d47b.
|
Python
|
mit
|
swpease/MoodJournal,swpease/MoodJournal,swpease/MoodJournal
|
e394c1889eccb5806a480033dca467da51d515e5
|
scripts/test_setup.py
|
scripts/test_setup.py
|
#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"Jinja2",
"nose",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"sqlalchemy",
"simplejson",
])
else:
deps.append("sqlalchemy")
deps.append("pyzmq")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
|
#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"nose",
"sqlalchemy",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"simplejson",
])
else:
deps.extend([
"pyzmq",
])
# Jinja2 is a bit fragmented...
if python_version < "3.3":
deps.append("Jinja2==2.6")
else:
deps.append("Jinja2")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
|
Test dependencies: On Python 2.5, require Jinja 2.6
|
Test dependencies: On Python 2.5, require Jinja 2.6
|
Python
|
bsd-3-clause
|
omergertel/logbook,pombredanne/logbook,Rafiot/logbook,alonho/logbook,Rafiot/logbook,alonho/logbook,omergertel/logbook,Rafiot/logbook,FintanH/logbook,DasIch/logbook,DasIch/logbook,RazerM/logbook,mitsuhiko/logbook,omergertel/logbook,DasIch/logbook,dommert/logbook,alonho/logbook
|
da54fa6d681ab7f2e3146b55d562e5a4d68623cc
|
luigi/tasks/export/ftp/__init__.py
|
luigi/tasks/export/ftp/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
yield GoAnnotationExport
|
Make GO term export part of FTP export
|
Make GO term export part of FTP export
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
eb496468d61ff3245adbdec4108a04bc40a357fc
|
Grid.py
|
Grid.py
|
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
self.which = 'major'
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style,
which=self.which)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
Allow gridlines on both major and minor axes.
|
Allow gridlines on both major and minor axes.
|
Python
|
bsd-3-clause
|
alexras/boomslang
|
8ae82e08fc42d89402550f5f545dbaa258196c8c
|
ibmcnx/test/test.py
|
ibmcnx/test/test.py
|
#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
|
#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
test = FilesPolicyService.browse( "title", "true", 1, 25 )
print test
|
Customize scripts to work with menu
|
Customize scripts to work with menu
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
fc42c3cf72abeb053560c21e1870e8507aa2d666
|
examples/framework/faren/faren.py
|
examples/framework/faren/faren.py
|
#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__changed(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__insert_text(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
Use insert_text instead of changed
|
Use insert_text instead of changed
|
Python
|
lgpl-2.1
|
stoq/kiwi
|
a859890c9f17b2303061b2d68e5c58ad27e07b35
|
grizli/pipeline/__init__.py
|
grizli/pipeline/__init__.py
|
"""
Automated processing of associated exposures
"""
|
"""
Automated processing of associated exposures
"""
def fetch_from_AWS_bucket(root='j022644-044142', id=1161, product='.beams.fits', bucket_name='aws-grivam', verbose=True, dryrun=False, output_path='./', get_fit_args=False, skip_existing=True):
"""
Fetch products from the Grizli AWS bucket.
Boto3 will require that you have set up your AWS credentials in, e.g.,
~/.aws/credentials
"""
import os
import boto3
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
bkt = s3.Bucket(bucket_name)
files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/{0}_{1:05d}{2}'.format(root, id, product))]
if get_fit_args:
files += ['Pipeline/{0}/Extractions/fit_args.npy'.format(root)]
for file in files:
local = os.path.join(output_path, os.path.basename(file))
if verbose:
print('{0} -> {1}'.format(file, output_path))
if not dryrun:
if os.path.exists(local) & skip_existing:
continue
bkt.download_file(file, local,
ExtraArgs={"RequestPayer": "requester"})
|
Add script to fetch data from AWS
|
Add script to fetch data from AWS
|
Python
|
mit
|
gbrammer/grizli
|
f46059285851d47a9bee2174e32e9e084efe1182
|
jirafs/constants.py
|
jirafs/constants.py
|
from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@adamcoddington.net>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
|
from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@localhost>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
|
Remove my personal domain from the public jirafs git config.
|
Remove my personal domain from the public jirafs git config.
|
Python
|
mit
|
coddingtonbear/jirafs,coddingtonbear/jirafs
|
1690c1981614e20183d33de4d117af0aa62ae9c5
|
kboard/board/urls.py
|
kboard/board/urls.py
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-\w]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-\w]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
Modify board_slug in url regex to pass numeric letter
|
Modify board_slug in url regex to pass numeric letter
|
Python
|
mit
|
kboard/kboard,guswnsxodlf/k-board,kboard/kboard,cjh5414/kboard,hyesun03/k-board,cjh5414/kboard,hyesun03/k-board,guswnsxodlf/k-board,kboard/kboard,hyesun03/k-board,darjeeling/k-board,cjh5414/kboard,guswnsxodlf/k-board
|
ff34a0b9ffc3fed7be9d30d65f9e8f0c24a3cf83
|
abusehelper/contrib/spamhaus/xbl.py
|
abusehelper/contrib/spamhaus/xbl.py
|
"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
entries = []
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
entries.append(line)
self.log.info("Read %d entries" % len(entries))
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
for entry in entries:
event = events.Event()
event.add("ip", entry)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + entry)
yield idiokit.send(event)
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
|
"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
event = events.Event()
event.add("ip", line)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + line)
yield idiokit.send(event)
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
|
Make the bot to save memory by sending events as soon as it reads through the corresponding lines of the input file.
|
Make the bot to save memory by sending events as soon as it reads through the corresponding lines of the input file.
|
Python
|
mit
|
abusesa/abusehelper
|
0d73cc1b38703653c3302d8f9ff4efbeaaa2b406
|
credentials/apps/records/models.py
|
credentials/apps/records/models.py
|
"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
from credentials.apps.credentials.models import ProgramCertificate
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
certificate = models.ForeignKey(ProgramCertificate, null=True)
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
Revert early removal of certificate field
|
Revert early removal of certificate field
|
Python
|
agpl-3.0
|
edx/credentials,edx/credentials,edx/credentials,edx/credentials
|
efd44be24e84a35db353ac79dae7cc7392a18b0c
|
matador/commands/deploy_ticket.py
|
matador/commands/deploy_ticket.py
|
#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project)
self._checkout_ticket(project, 'test')
|
#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-t', '--ticket',
type=str,
required=True,
help='Ticket name')
parser.add_argument(
'-b', '--branch',
type=str,
default='master',
help='Branch name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project, self.args.branch)
self._checkout_ticket(project, self.args.ticket)
|
Add ticket and branch arguments
|
Add ticket and branch arguments
|
Python
|
mit
|
Empiria/matador
|
6795e02c14fa99da2c0812fe6694bbd503f89ad1
|
tests/mock_vws/test_invalid_given_id.py
|
tests/mock_vws/test_invalid_given_id.py
|
"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
|
"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
any_endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
endpoint = any_endpoint
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
|
Use any_endpoint on invalid id test
|
Use any_endpoint on invalid id test
|
Python
|
mit
|
adamtheturtle/vws-python,adamtheturtle/vws-python
|
4eda3f3535d28e2486745f33504c417ba6837c3a
|
stdnum/nz/__init__.py
|
stdnum/nz/__init__.py
|
# __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
|
# __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
# provide aliases
from stdnum.nz import ird as vat # noqa: F401
|
Add missing vat alias for New Zealand
|
Add missing vat alias for New Zealand
Closes https://github.com/arthurdejong/python-stdnum/pull/202
|
Python
|
lgpl-2.1
|
arthurdejong/python-stdnum,arthurdejong/python-stdnum,arthurdejong/python-stdnum
|
ea3a72443f2fa841ea0bc73ec461968c447f39c1
|
egg_timer/apps/utils/management/commands/check_requirements.py
|
egg_timer/apps/utils/management/commands/check_requirements.py
|
import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def _get_file_contents(self, name):
req_file = open('requirements/%s.txt' % name)
reqs = req_file.read()
req_file.close()
req_list = reqs.split('\n')
if req_list[0].startswith('-r'):
req_list = req_list[1:]
return req_list
def handle(self, *args, **options):
check_prod = False
if len(args) == 1:
if args[0] == 'prod':
check_prod = True
else:
print "Unrecognized option %s; defaulting to checking dev requirements." % args[0]
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
req_list = self._get_file_contents('common')
if check_prod:
req_list.extend(self._get_file_contents('prod'))
else:
req_list.extend(self._get_file_contents('dev'))
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
for req_item in req_list:
if req_item not in freeze_results:
print "Required item is not installed: %s" % req_item
|
import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def handle(self, *args, **options):
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
common_file = open('requirements/common.txt')
reqs = common_file.read()
common_file.close()
req_list = reqs.split('\n')
dev_file = open('requirements/dev.txt')
reqs = dev_file.read()
dev_file.close()
req_list.extend(reqs.split('\n')[1:])
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
|
Revert "Added a prod option to the rquirements checker"
|
Revert "Added a prod option to the rquirements checker"
This reverts commit 5b9ae76d157d068ef456d5caa5c4352a139f528b.
|
Python
|
mit
|
jessamynsmith/eggtimer-server,jessamynsmith/eggtimer-server,jessamynsmith/eggtimer-server,jessamynsmith/eggtimer-server
|
8014285e5dc8fb13377b729f9fd19b4187fbaf29
|
fireplace/carddata/spells/other.py
|
fireplace/carddata/spells/other.py
|
from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
|
from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
# RFG
# Adrenaline Rush
class NEW1_006(Card):
action = drawCard
combo = drawCards(2)
|
Implement Adrenaline Rush why not
|
Implement Adrenaline Rush why not
|
Python
|
agpl-3.0
|
Ragowit/fireplace,butozerca/fireplace,Meerkov/fireplace,smallnamespace/fireplace,amw2104/fireplace,liujimj/fireplace,Ragowit/fireplace,oftc-ftw/fireplace,oftc-ftw/fireplace,butozerca/fireplace,NightKev/fireplace,jleclanche/fireplace,liujimj/fireplace,Meerkov/fireplace,beheh/fireplace,smallnamespace/fireplace,amw2104/fireplace
|
9410ceb83d85d70a484bbf08ecc274216fa0589f
|
mythril/support/source_support.py
|
mythril/support/source_support.py
|
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
|
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
"""Class to handle to source data"""
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
"""
:param source_type: whether it is a solidity-file or evm-bytecode
:param source_format: whether it is bytecode, ethereum-address or text
:param source_list: List of files
:param meta: meta data
"""
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
"""
get the source data from the contracts list
:param contracts: the list of contracts
:return:
"""
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
|
Add documentation for Source class
|
Add documentation for Source class
|
Python
|
mit
|
b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril
|
db46374695aed370aa8d8a51c34043d6a48a702d
|
waldo/tests/unit/test_contrib_config.py
|
waldo/tests/unit/test_contrib_config.py
|
# pylint: disable=R0904,W0212
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
# pylint: disable=C0103,C0111,R0903,R0904,W0212,W0232
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
Add common ignore list per README
|
Add common ignore list per README
|
Python
|
apache-2.0
|
checkmate/simpl,ryandub/simpl,ziadsawalha/simpl,samstav/simpl,larsbutler/simpl
|
ad98e3c25434dc251fe6d7ace3acfe418a4d8955
|
simplekv/db/mongo.py
|
simplekv/db/mongo.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = self.db[self.collection].find({"_id": key}).next()
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = next(self.db[self.collection].find({"_id": key}))
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
|
Fix Python3s lack of .next().
|
Fix Python3s lack of .next().
|
Python
|
mit
|
mbr/simplekv,karteek/simplekv,mbr/simplekv,fmarczin/simplekv,karteek/simplekv,fmarczin/simplekv
|
f87a923678f5d7e9f6390ffcb42eae6b2a0f9cc2
|
services/views.py
|
services/views.py
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
outgoing['api_key'] = settings.OPEN311['API_KEY']
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
if outgoing.get('internal_feedback', False):
if 'internal_feedback' in outgoing:
del outgoing['internal_feedback']
api_key = settings.OPEN311['INTERNAL_FEEDBACK_API_KEY']
else:
api_key = settings.OPEN311['API_KEY']
outgoing['api_key'] = api_key
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
Use separate API key for feedback about app.
|
Use separate API key for feedback about app.
|
Python
|
agpl-3.0
|
City-of-Helsinki/smbackend,City-of-Helsinki/smbackend
|
b92c1caa8e19376c17f503de1464d4466e547cdf
|
api/base/content_negotiation.py
|
api/base/content_negotiation.py
|
from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the first renderer in the `.renderer_classes` list.
"""
return (renderers[0], renderers[0].media_type)
|
from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the third renderer in the `.renderer_classes` list for the browsable API,
otherwise use the first renderer which has media_type "application/vnd.api+json"
"""
if 'text/html' in request.META['HTTP_ACCEPT'] :
return (renderers[2], renderers[2].media_type)
return (renderers[0], renderers[0].media_type)
|
Select third renderer if 'text/html' in accept
|
Select third renderer if 'text/html' in accept
|
Python
|
apache-2.0
|
brianjgeiger/osf.io,arpitar/osf.io,TomHeatwole/osf.io,Nesiehr/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,doublebits/osf.io,crcresearch/osf.io,amyshi188/osf.io,acshi/osf.io,monikagrabowska/osf.io,petermalcolm/osf.io,GageGaskins/osf.io,kwierman/osf.io,TomHeatwole/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,njantrania/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,doublebits/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,MerlinZhang/osf.io,samchrisinger/osf.io,crcresearch/osf.io,mluke93/osf.io,Johnetordoff/osf.io,amyshi188/osf.io,abought/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,kwierman/osf.io,samanehsan/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,baylee-d/osf.io,mattclark/osf.io,ticklemepierce/osf.io,MerlinZhang/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,kch8qx/osf.io,aaxelb/osf.io,zachjanicki/osf.io,cslzchen/osf.io,chennan47/osf.io,cwisecarver/osf.io,mluo613/osf.io,emetsger/osf.io,binoculars/osf.io,sloria/osf.io,danielneis/osf.io,leb2dg/osf.io,laurenrevere/osf.io,zamattiac/osf.io,ckc6cz/osf.io,rdhyee/osf.io,acshi/osf.io,petermalcolm/osf.io,SSJohns/osf.io,acshi/osf.io,billyhunt/osf.io,amyshi188/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,cosenal/osf.io,abought/osf.io,arpitar/osf.io,mluke93/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,SSJohns/osf.io,KAsante95/osf.io,haoyuchen1992/osf.io,pattisdr/osf.io,binoculars/osf.io,caseyrollins/osf.io,arpitar/osf.io,felliott/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,sbt9uc/osf.io,ticklemepierce/osf.io,caneruguz/osf.io,emetsger/osf.io,MerlinZhang/osf.io,cosenal/osf.io,baylee-d/osf.io,emetsger/osf.io,haoyuchen1992/osf.io,DanielSBrown/osf.io,ckc6cz/osf.io,HalcyonChimera/osf.io,petermalcolm/osf.io,chennan47/osf.io,brandonPurvis/osf.io,aaxelb/osf.io,pattisdr/osf.io,sloria/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,KAsante95/osf.io,hmoco/osf.io,ckc6cz/osf.io,chrisseto/osf.io,mfraezz/osf.io,leb2dg/osf.io,kch8qx/osf.io,kch8qx/osf.io,hmoco/osf.io,mluke93/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,mfraezz/osf.io,kch8qx/osf.io,billyhunt/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,caneruguz/osf.io,TomBaxter/osf.io,wearpants/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,felliott/osf.io,mattclark/osf.io,aaxelb/osf.io,saradbowman/osf.io,adlius/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,jmcarp/osf.io,brandonPurvis/osf.io,adlius/osf.io,alexschiller/osf.io,acshi/osf.io,sbt9uc/osf.io,ticklemepierce/osf.io,laurenrevere/osf.io,asanfilippo7/osf.io,zachjanicki/osf.io,emetsger/osf.io,haoyuchen1992/osf.io,jmcarp/osf.io,doublebits/osf.io,caseyrygt/osf.io,abought/osf.io,mluo613/osf.io,jnayak1/osf.io,TomBaxter/osf.io,caseyrygt/osf.io,laurenrevere/osf.io,zamattiac/osf.io,mluo613/osf.io,icereval/osf.io,zamattiac/osf.io,alexschiller/osf.io,haoyuchen1992/osf.io,Nesiehr/osf.io,ZobairAlijan/osf.io,alexschiller/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,njantrania/osf.io,SSJohns/osf.io,acshi/osf.io,saradbowman/osf.io,billyhunt/osf.io,leb2dg/osf.io,arpitar/osf.io,billyhunt/osf.io,mluo613/osf.io,alexschiller/osf.io,RomanZWang/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,jmcarp/osf.io,chennan47/osf.io,KAsante95/osf.io,GageGaskins/osf.io,jmcarp/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,DanielSBrown/osf.io,erinspace/osf.io,amyshi188/osf.io,mfraezz/osf.io,GageGaskins/osf.io,mfraezz/osf.io,baylee-d/osf.io,asanfilippo7/osf.io,samanehsan/osf.io,mattclark/osf.io,KAsante95/osf.io,adlius/osf.io,chrisseto/osf.io,kwierman/osf.io,rdhyee/osf.io,erinspace/osf.io,danielneis/osf.io,caseyrygt/osf.io,aaxelb/osf.io,mluo613/osf.io,doublebits/osf.io,Ghalko/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,abought/osf.io,njantrania/osf.io,crcresearch/osf.io,KAsante95/osf.io,Ghalko/osf.io,caseyrygt/osf.io,sbt9uc/osf.io,erinspace/osf.io,chrisseto/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,icereval/osf.io,MerlinZhang/osf.io,wearpants/osf.io,chrisseto/osf.io,danielneis/osf.io,cosenal/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,njantrania/osf.io,SSJohns/osf.io,GageGaskins/osf.io,pattisdr/osf.io,monikagrabowska/osf.io,kwierman/osf.io,leb2dg/osf.io,samchrisinger/osf.io,caneruguz/osf.io,samanehsan/osf.io,felliott/osf.io,samanehsan/osf.io,icereval/osf.io,jnayak1/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,Ghalko/osf.io,caseyrollins/osf.io,adlius/osf.io,ckc6cz/osf.io,brianjgeiger/osf.io,wearpants/osf.io,jnayak1/osf.io,cosenal/osf.io,sbt9uc/osf.io,danielneis/osf.io,mluke93/osf.io,sloria/osf.io,Ghalko/osf.io,monikagrabowska/osf.io
|
69de2261c30a8bab1ac4d0749cf32baec49e0cc4
|
webapp/byceps/blueprints/board/views.py
|
webapp/byceps/blueprints/board/views.py
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get(id)
return {'topic': topic}
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
Throw 404 if category/topic with given id is not found.
|
Throw 404 if category/topic with given id is not found.
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps
|
b890c9046d36687a65d46be724cfaa8726417b5d
|
selectable/tests/runtests.py
|
selectable/tests/runtests.py
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
SITE_ID=1,
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
Add SITE_ID to test settings setup for Django 1.3.
|
Add SITE_ID to test settings setup for Django 1.3.
|
Python
|
bsd-2-clause
|
mlavin/django-selectable,affan2/django-selectable,makinacorpus/django-selectable,makinacorpus/django-selectable,affan2/django-selectable,mlavin/django-selectable,mlavin/django-selectable,affan2/django-selectable
|
2d3b899011c79324195a36aaf3bd53dae6abe961
|
seleniumrequests/__init__.py
|
seleniumrequests/__init__.py
|
from selenium.webdriver import Firefox, Chrome, Ie, Edge, Opera, Safari, BlackBerry, PhantomJS, Android, Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, Firefox):
pass
class Chrome(RequestsSessionMixin, Chrome):
pass
class Ie(RequestsSessionMixin, Ie):
pass
class Edge(RequestsSessionMixin, Edge):
pass
class Opera(RequestsSessionMixin, Opera):
pass
class Safari(RequestsSessionMixin, Safari):
pass
class BlackBerry(RequestsSessionMixin, BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, PhantomJS):
pass
class Android(RequestsSessionMixin, Android):
pass
class Remote(RequestsSessionMixin, Remote):
pass
|
from selenium.webdriver import _Firefox, _Chrome, _Ie, _Edge, _Opera, _Safari, _BlackBerry, _PhantomJS, _Android, \
_Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, _Firefox):
pass
class Chrome(RequestsSessionMixin, _Chrome):
pass
class Ie(RequestsSessionMixin, _Ie):
pass
class Edge(RequestsSessionMixin, _Edge):
pass
class Opera(RequestsSessionMixin, _Opera):
pass
class Safari(RequestsSessionMixin, _Safari):
pass
class BlackBerry(RequestsSessionMixin, _BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, _PhantomJS):
pass
class Android(RequestsSessionMixin, _Android):
pass
class Remote(RequestsSessionMixin, _Remote):
pass
|
Fix PyCharm warnings like this: "Cannot find reference `request` in `PhantomJS | WebDriver`"
|
Fix PyCharm warnings like this: "Cannot find reference `request` in `PhantomJS | WebDriver`"
|
Python
|
mit
|
cryzed/Selenium-Requests
|
1e0327c852b851f867d21a182ba7604b42d15331
|
examples/charts/file/stacked_bar.py
|
examples/charts/file/stacked_bar.py
|
from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
from bokeh.models.tools import HoverTool
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals")
bar.add_tools(HoverTool(tooltips=[('medal', '@medal'), ('country', '@abbr')]))
output_file("stacked_bar.html")
show(bar)
|
from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals",
hover=[('medal', '@medal'), ('country', '@abbr')])
output_file("stacked_bar.html")
show(bar)
|
Update stacked bar example to use the hover kwarg.
|
Update stacked bar example to use the hover kwarg.
|
Python
|
bsd-3-clause
|
Karel-van-de-Plassche/bokeh,rs2/bokeh,jakirkham/bokeh,msarahan/bokeh,DuCorey/bokeh,schoolie/bokeh,schoolie/bokeh,quasiben/bokeh,timsnyder/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,stonebig/bokeh,bokeh/bokeh,ericmjl/bokeh,bokeh/bokeh,aavanian/bokeh,dennisobrien/bokeh,clairetang6/bokeh,DuCorey/bokeh,ericmjl/bokeh,azjps/bokeh,Karel-van-de-Plassche/bokeh,stonebig/bokeh,justacec/bokeh,ptitjano/bokeh,rs2/bokeh,timsnyder/bokeh,draperjames/bokeh,msarahan/bokeh,ptitjano/bokeh,dennisobrien/bokeh,justacec/bokeh,aiguofer/bokeh,draperjames/bokeh,ptitjano/bokeh,jakirkham/bokeh,azjps/bokeh,mindriot101/bokeh,philippjfr/bokeh,mindriot101/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,philippjfr/bokeh,timsnyder/bokeh,ptitjano/bokeh,ericmjl/bokeh,Karel-van-de-Plassche/bokeh,jakirkham/bokeh,dennisobrien/bokeh,aiguofer/bokeh,schoolie/bokeh,phobson/bokeh,mindriot101/bokeh,schoolie/bokeh,KasperPRasmussen/bokeh,rs2/bokeh,draperjames/bokeh,bokeh/bokeh,phobson/bokeh,aavanian/bokeh,philippjfr/bokeh,ericmjl/bokeh,clairetang6/bokeh,draperjames/bokeh,percyfal/bokeh,percyfal/bokeh,aavanian/bokeh,mindriot101/bokeh,azjps/bokeh,KasperPRasmussen/bokeh,rs2/bokeh,bokeh/bokeh,percyfal/bokeh,philippjfr/bokeh,ptitjano/bokeh,aavanian/bokeh,quasiben/bokeh,percyfal/bokeh,msarahan/bokeh,DuCorey/bokeh,aiguofer/bokeh,Karel-van-de-Plassche/bokeh,phobson/bokeh,phobson/bokeh,timsnyder/bokeh,timsnyder/bokeh,phobson/bokeh,rs2/bokeh,bokeh/bokeh,justacec/bokeh,schoolie/bokeh,stonebig/bokeh,KasperPRasmussen/bokeh,justacec/bokeh,clairetang6/bokeh,azjps/bokeh,jakirkham/bokeh,draperjames/bokeh,Karel-van-de-Plassche/bokeh,msarahan/bokeh,dennisobrien/bokeh,quasiben/bokeh,percyfal/bokeh,aiguofer/bokeh,jakirkham/bokeh,DuCorey/bokeh,DuCorey/bokeh,philippjfr/bokeh,clairetang6/bokeh,stonebig/bokeh,azjps/bokeh,dennisobrien/bokeh,aiguofer/bokeh
|
05419e49c438c3f867c1ab4bd37021755ec09332
|
skimage/exposure/__init__.py
|
skimage/exposure/__init__.py
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log']
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
from .unwrap import unwrap
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log',
'unwrap']
|
Make unwrap visible in the exposure package.
|
Make unwrap visible in the exposure package.
|
Python
|
bsd-3-clause
|
SamHames/scikit-image,ClinicalGraphics/scikit-image,chintak/scikit-image,bennlich/scikit-image,robintw/scikit-image,rjeli/scikit-image,youprofit/scikit-image,ClinicalGraphics/scikit-image,rjeli/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,blink1073/scikit-image,youprofit/scikit-image,GaZ3ll3/scikit-image,Britefury/scikit-image,Hiyorimi/scikit-image,ofgulban/scikit-image,dpshelio/scikit-image,michaelpacer/scikit-image,rjeli/scikit-image,oew1v07/scikit-image,juliusbierk/scikit-image,ofgulban/scikit-image,paalge/scikit-image,almarklein/scikit-image,emon10005/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,chintak/scikit-image,SamHames/scikit-image,almarklein/scikit-image,michaelaye/scikit-image,newville/scikit-image,Hiyorimi/scikit-image,jwiggins/scikit-image,Midafi/scikit-image,robintw/scikit-image,bsipocz/scikit-image,michaelaye/scikit-image,emon10005/scikit-image,paalge/scikit-image,blink1073/scikit-image,warmspringwinds/scikit-image,chintak/scikit-image,SamHames/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,Midafi/scikit-image,paalge/scikit-image,bsipocz/scikit-image,oew1v07/scikit-image,michaelpacer/scikit-image,ajaybhat/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,keflavich/scikit-image,newville/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,juliusbierk/scikit-image,ajaybhat/scikit-image,warmspringwinds/scikit-image,almarklein/scikit-image,dpshelio/scikit-image,bennlich/scikit-image,WarrenWeckesser/scikits-image,keflavich/scikit-image,Britefury/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,GaZ3ll3/scikit-image,chriscrosscutler/scikit-image
|
27e137ef5f3b6c4f6c8679edc6412b2c237b8fb4
|
plasmapy/physics/tests/test_parameters_cython.py
|
plasmapy/physics/tests/test_parameters_cython.py
|
"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from ...utils.exceptions import RelativityWarning, RelativityError
from ...utils.exceptions import PhysicsError
from ...constants import c, m_p, m_e, e, mu0
from ..parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStr
|
"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from plasmapy.utils.exceptions import RelativityWarning, RelativityError
from plasmapy.utils.exceptions import PhysicsError
from plasmapy.constants import c, m_p, m_e, e, mu0
from plasmapy.physics.parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStr
|
Update tests for cython parameters
|
Update tests for cython parameters
|
Python
|
bsd-3-clause
|
StanczakDominik/PlasmaPy
|
81bb47c28af70936be76f319ba780f2ad89ba2a0
|
Train_SDAE/tools/evaluate_model.py
|
Train_SDAE/tools/evaluate_model.py
|
import numpy as np
# import pandas as pd
# import sys
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(nHLayers, exp_data, labels, *args):
print len(args[0]), len(args[0][0]), len(args[0][1])
print len(args[0][2])
print "NewLine!\n", len(args[0][3])
print "NewLine!\n", len(args[0][4])
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
for i in range(nHLayers):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(args[0][i]).shape, np.asarray(args[0][nHLayers + i]).shape
act = get_activations(act.T, args[0][i], args[0][nHLayers + i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
|
import numpy as np
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(exp_data, labels, weights, biases, n_layers=None):
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
# Using ternary operator for shortness
n = n_layers if n_layers else len(weights)
for i in range(n):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(weights[i]).shape, np.asarray(biases[i]).shape
act = get_activations(act.T, weights[i], biases[i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
|
Support for variable number of layers
|
Support for variable number of layers
|
Python
|
apache-2.0
|
glrs/StackedDAE,glrs/StackedDAE
|
4986f02edbe45d73f8509b01270490cd8c8f90dd
|
docs/source/examples/chapel.sfile-inline.py
|
docs/source/examples/chapel.sfile-inline.py
|
from pych.extern import Chapel
@Chapel(sfile="/home/safl/pychapel/module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
|
from pych.extern import Chapel
import os
currentloc = os.getcwd();
# Note: depends on test living in a specific location relative to
# mymodule.chpl. Not ideal, but also not a huge issue.
@Chapel(sfile=currentloc + "/../../../module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
|
Use repository hierarchy instead of absolute path for sfile
|
Use repository hierarchy instead of absolute path for sfile
The test chapel.sfile-inline.py was depending on an absolute path to find the
location of a chapel file that was outside the normal sfile storage location.
The absolute location was both machine and user-specific. I replaced the path
with a relative path that depends on the directory structure for the repository.
This allows a wider ability to successfully run this test. It's a little
finicky, but I intend to make the test more robust later.
|
Python
|
apache-2.0
|
chapel-lang/pychapel,chapel-lang/pychapel,russel/pychapel,safl/pychapel,chapel-lang/pychapel,safl/pychapel,safl/pychapel,russel/pychapel,russel/pychapel,safl/pychapel
|
90974a088813dcc3a0c4a7cae5758f67c4b52a15
|
qual/tests/test_calendar.py
|
qual/tests/test_calendar.py
|
import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def test_valid_date(self):
d = self.calendar.date(1200, 2, 29)
self.assertIsNotNone(d)
|
import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def check_valid_date(self, year, month, day):
d = self.calendar.date(year, month, day)
self.assertIsNotNone(d)
def test_leap_year_from_before_1582(self):
"""Pope Gregory introduced the calendar in 1582"""
self.check_valid_date(1200, 2, 29)
|
Check a leap year date from before the start of the calendar.
|
Check a leap year date from before the start of the calendar.
This is not really a strong test of the proleptic calendar. All days back to year 1 which are valid in the Julian calendar are valid in the Gregorian calendar.
|
Python
|
apache-2.0
|
jwg4/qual,jwg4/calexicon
|
dc70fb35a104e260b40425fce23cba84b9770994
|
addons/event/models/res_partner.py
|
addons/event/models/res_partner.py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
self.event_count = 0
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
|
Set default value for event_count
|
[FIX] event: Set default value for event_count
Fixes https://github.com/odoo/odoo/pull/39583
This commit adds a default value for event_count
Assigning default value for non-stored compute fields is required in 13.0
closes odoo/odoo#39974
X-original-commit: 9ca72b98f54d7686c0e6019870b40f14dbdd2881
Signed-off-by: Victor Feyens (vfe) <433cda6c0f0b5b2dac2ef769109a6da90db60157@odoo.com>
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
4f776ca2260419c06c2594568c73ce279426d039
|
GenotypeNetwork/test_genotype_network.py
|
GenotypeNetwork/test_genotype_network.py
|
import GenotypeNetwork as gn
import os
import networkx as nx
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
|
import GenotypeNetwork as gn
import os
import networkx as nx
# Change cwd for tests to the current path.
here = os.path.dirname(os.path.realpath(__file__))
os.chdir(here)
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
|
Make tests run from correct directory.
|
Make tests run from correct directory.
|
Python
|
mit
|
ericmjl/genotype-network
|
453af98b1a05c62acd55afca431236d8f54fdae3
|
test_bert_trainer.py
|
test_bert_trainer.py
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
if __name__ == '__main__':
unittest.main()
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
Test for deterministic results when testing BERT model
|
Test for deterministic results when testing BERT model
|
Python
|
apache-2.0
|
googleinterns/smart-news-query-embeddings,googleinterns/smart-news-query-embeddings
|
1d3e956dcf667601feb871eab2a462fa09d0d101
|
tests/test_length.py
|
tests/test_length.py
|
from math import sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
|
from math import fabs, sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import floats, isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
@given(v=vectors())
def test_length_zero(v: Vector):
"""1st axiom of normed vector spaces: |v| = 0 iff v = 0"""
assert (v.length == 0) == (v == (0, 0))
@given(v=vectors(), scalar=floats())
def test_length_scalar(v: Vector, scalar: float):
"""2nd axiom of normed vector spaces: |λv| = |λ| |v|"""
assert isclose((scalar * v).length, fabs(scalar) * v.length)
@given(v=vectors(), w=vectors())
def test_length_triangle(v: Vector, w: Vector):
"""3rd axiom of normed vector spaces: |v+w| = |v| + |w|"""
assert (v + w).length <= v.length + w.length
|
Test the axioms of normed vector spaces
|
tests/length: Test the axioms of normed vector spaces
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
052f06b0ef4f3c2befaf0cbbfd605e42553b48da
|
h2o-hadoop-common/tests/python/pyunit_trace.py
|
h2o-hadoop-common/tests/python/pyunit_trace.py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys
import os
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
assert err is not None
assert str(err.message).startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys, os
sys.path.insert(1, os.path.join("..", "..", "..", "h2o-py"))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
msg = str(err.message)
assert err is not None
print("<Error message>")
print(msg)
print("</Error Message>")
# exact message depends on Jetty Version
assert (msg.startswith("HTTP 500") and "TRACE method is not supported" in msg) or \
msg.startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
|
Fix TRACE test also in rel-yau
|
Fix TRACE test also in rel-yau
|
Python
|
apache-2.0
|
h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3
|
209a8e029e14766027376bb6d8f0b2e0a4a07f1b
|
simulator-perfect.py
|
simulator-perfect.py
|
#!/usr/bin/env python3
import timer
import sys
import utils
# A set of files already in the storage
seen = set()
# The total number of uploads
total_uploads = 0
# The number of files in the storage
files_in = 0
tmr = timer.Timer()
for (hsh, _) in utils.read_upload_stream():
if hsh not in seen:
files_in += 1
seen.add(hsh)
total_uploads += 1
if total_uploads % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, %s" % (
utils.num_fmt(total_uploads),
1 - files_in / total_uploads,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - files_in / total_uploads
print("+++ Simulation complete. dedup_percentage=%f" % dedup_percentage,
file=sys.stderr)
|
#!/usr/bin/env python3
import timer
import sys
import utils
def simulate():
# A set of files already in the storage
seen = set()
# The size of the all uploads combined (deduplicated or not)
total_in = 0
# The size of the data sent to the service
data_in = 0
tmr = timer.Timer()
for (i, (hsh, size)) in enumerate(utils.read_upload_stream()):
total_in += size
if hsh not in seen:
data_in += size
seen.add(hsh)
if (i + 1) % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, mem[%s]" % (
utils.num_fmt(i),
1 - data_in / total_in,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - data_in / total_in
print("Simulation complete. stored=%s, uploaded=%s, dedup_percentage=%f" % (
utils.sizeof_fmt(data_in), utils.sizeof_fmt(total_in), dedup_percentage),
file=sys.stderr)
if __name__ == "__main__":
simulate()
|
Modify the perfect simulator to calculate dedup percentages based on file sizes (1 - data_in / data_total)
|
Modify the perfect simulator to calculate dedup percentages based on file sizes (1 - data_in / data_total)
|
Python
|
apache-2.0
|
sjakthol/dedup-simulator,sjakthol/dedup-simulator
|
dbe57e9b76194b13d90834163ebe8bf924464dd0
|
src/mcedit2/util/lazyprop.py
|
src/mcedit2/util/lazyprop.py
|
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
|
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import weakref
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
class weakrefprop(object):
def __init__(self, name):
self.name = "__weakprop__" + name
def __get__(self, instance, owner):
ref = getattr(instance, self.name, None)
if ref is None:
return None
return ref()
def __set__(self, instance, value):
setattr(instance, self.name, weakref.ref(value))
|
Add a property descriptor for weakref'd members
|
Add a property descriptor for weakref'd members
|
Python
|
bsd-3-clause
|
vorburger/mcedit2,Rubisk/mcedit2,Rubisk/mcedit2,vorburger/mcedit2
|
c89e30d1a33df2d9d8c5ceb03df98d29b3b08724
|
spacy/tests/en/test_exceptions.py
|
spacy/tests/en/test_exceptions.py
|
# coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
|
# coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
@pytest.mark.parametrize('text', ["1am", "12a.m.", "11p.m.", "4pm"])
def test_tokenizer_handles_times(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 2
assert tokens[1].lemma_ in ["a.m.", "p.m."]
|
Add test for English time exceptions ("1a.m." etc.)
|
Add test for English time exceptions ("1a.m." etc.)
|
Python
|
mit
|
honnibal/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,recognai/spaCy,raphael0202/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,honnibal/spaCy,explosion/spaCy,raphael0202/spaCy,spacy-io/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,raphael0202/spaCy,recognai/spaCy,raphael0202/spaCy,recognai/spaCy,Gregory-Howard/spaCy,explosion/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,explosion/spaCy,recognai/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,spacy-io/spaCy,explosion/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,raphael0202/spaCy,aikramer2/spaCy,explosion/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,Gregory-Howard/spaCy
|
99b1610fad7224d2efe03547c5114d2f046f50ca
|
bin/cgroup-limits.py
|
bin/cgroup-limits.py
|
#!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
|
#!/usr/bin/python
from __future__ import print_function
import sys
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit is None:
print("Warning: Can't detect memory limit from cgroups",
file=sys.stderr)
return
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
print("Warning: Can't detect number of CPU cores from cgroups",
file=sys.stderr)
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
|
Print warnings to standard error
|
Print warnings to standard error
|
Python
|
apache-2.0
|
soltysh/sti-base,mfojtik/sti-base,hhorak/sti-base,bparees/sti-base,openshift/sti-base,sclorg/s2i-base-container,openshift/sti-base,mfojtik/sti-base,bparees/sti-base
|
e93dadc8215f3946e4e7b64ca8ab3481fcf3c197
|
froide/foirequestfollower/apps.py
|
froide/foirequestfollower/apps.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
|
import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
def export_user_data(user):
from .models import FoiRequestFollower
from froide.foirequest.models.request import get_absolute_domain_short_url
following = FoiRequestFollower.objects.filter(
user=user
)
if not following:
return
yield ('followed_requests.json', json.dumps([
{
'timestamp': frf.timestamp.isoformat(),
'url': get_absolute_domain_short_url(frf.request_id),
}
for frf in following]).encode('utf-8')
)
|
Add user data export for foirequest follower
|
Add user data export for foirequest follower
|
Python
|
mit
|
fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide
|
29ffe1df88927aa568d3e86b07e372e5ba589310
|
indra/sources/eidos/server.py
|
indra/sources/eidos/server.py
|
"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
res = er.reground_texts([text], wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
|
"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
if isinstance(text, str):
res = er.reground_texts([text], wm_yml)
elif isinstance(text, list):
res = er.reground_texts(text, wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
|
Allow one or multiple texts to reground
|
Allow one or multiple texts to reground
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,johnbachman/indra,bgyori/indra,johnbachman/belpy,bgyori/indra,johnbachman/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/belpy,bgyori/indra,sorgerlab/belpy,sorgerlab/indra
|
46b60e3bb2b84685e27035a270e8ae81551f3f72
|
silver/management/commands/generate_docs.py
|
silver/management/commands/generate_docs.py
|
from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate(settings.LANGUAGE_CODE)
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
|
from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate('en-us')
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
|
Add language code in the command
|
Add language code in the command
|
Python
|
apache-2.0
|
PressLabs/silver,PressLabs/silver,PressLabs/silver
|
13be198c8aec08f5738eecbb7da2bfdcafd57a48
|
pygraphc/clustering/MaxCliquesPercolationSA.py
|
pygraphc/clustering/MaxCliquesPercolationSA.py
|
from MaxCliquesPercolation import MaxCliquesPercolationWeighted
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
def get_maxcliques_percolation_sa(self):
pass
|
from MaxCliquesPercolation import MaxCliquesPercolationWeighted
from pygraphc.optimization.SimulatedAnnealing import SimulatedAnnealing
from numpy import linspace
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold, tmin, tmax, alpha, energy_type, max_iteration):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_maxcliques_percolation_sa(self):
# run max_clique
max_cliques = self._find_maxcliques()
# get maximal node for all maximal cliques to generate k
max_node = 0
for max_clique in max_cliques:
current_len = len(max_clique)
if max_node < current_len:
max_node = current_len
parameters = {
'k': list(xrange(2, max_node)),
'I': linspace(0.1, 0.9, 9)
}
sa = SimulatedAnnealing(self.Tmin, self.Tmax, self.alpha, parameters, self.energy_type, self.max_iteration)
initial_parameter = sa.get_parameter()
# get maximal clique percolation
|
Add constructor and get method with SA
|
Add constructor and get method with SA
|
Python
|
mit
|
studiawan/pygraphc
|
33309df85823bde19fcdd2b21b73db9f1da131ab
|
requests_oauthlib/compliance_fixes/facebook.py
|
requests_oauthlib/compliance_fixes/facebook.py
|
from json import dumps
from oauthlib.common import urldecode
from urlparse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
|
from json import dumps
try:
from urlparse import parse_qsl
except ImportError:
from urllib.parse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
|
Remove unused import. Facebook compliance support python3
|
Remove unused import. Facebook compliance support python3
|
Python
|
isc
|
abhi931375/requests-oauthlib,gras100/asks-oauthlib,requests/requests-oauthlib,singingwolfboy/requests-oauthlib,jayvdb/requests-oauthlib,lucidbard/requests-oauthlib,dongguangming/requests-oauthlib,jsfan/requests-oauthlib,jayvdb/requests-oauthlib,sigmavirus24/requests-oauthlib,elafarge/requests-oauthlib
|
60a9ace22f219f7b125b3a618090c4dd36cded4c
|
api/base/exceptions.py
|
api/base/exceptions.py
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
if response is not None and response.data['errors'][0].get('detail') == "Authentication credentials were not provided.":
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
error_message = response.data['errors'][0].get('detail')
errors_401 = ["Authentication credentials were not provided.", 'Incorrect authentication credentials.']
if response is not None and error_message in errors_401:
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
Add additional error detail to cover other circumstances that intend to throw 401
|
Add additional error detail to cover other circumstances that intend to throw 401
|
Python
|
apache-2.0
|
abought/osf.io,mfraezz/osf.io,rdhyee/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,Ghalko/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,cosenal/osf.io,danielneis/osf.io,caseyrygt/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,acshi/osf.io,emetsger/osf.io,Nesiehr/osf.io,samanehsan/osf.io,saradbowman/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,chrisseto/osf.io,kwierman/osf.io,ticklemepierce/osf.io,kch8qx/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,chennan47/osf.io,njantrania/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,cslzchen/osf.io,mluo613/osf.io,monikagrabowska/osf.io,acshi/osf.io,samanehsan/osf.io,alexschiller/osf.io,doublebits/osf.io,danielneis/osf.io,DanielSBrown/osf.io,hmoco/osf.io,mluke93/osf.io,njantrania/osf.io,abought/osf.io,leb2dg/osf.io,mluo613/osf.io,kch8qx/osf.io,ZobairAlijan/osf.io,kwierman/osf.io,doublebits/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,danielneis/osf.io,icereval/osf.io,adlius/osf.io,Johnetordoff/osf.io,sbt9uc/osf.io,ticklemepierce/osf.io,jnayak1/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,adlius/osf.io,monikagrabowska/osf.io,felliott/osf.io,Ghalko/osf.io,kch8qx/osf.io,SSJohns/osf.io,zachjanicki/osf.io,billyhunt/osf.io,SSJohns/osf.io,RomanZWang/osf.io,haoyuchen1992/osf.io,mluo613/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,mfraezz/osf.io,cslzchen/osf.io,baylee-d/osf.io,samanehsan/osf.io,abought/osf.io,cosenal/osf.io,cwisecarver/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,mfraezz/osf.io,wearpants/osf.io,mattclark/osf.io,cosenal/osf.io,jnayak1/osf.io,sloria/osf.io,Nesiehr/osf.io,mluke93/osf.io,brianjgeiger/osf.io,erinspace/osf.io,cwisecarver/osf.io,cosenal/osf.io,crcresearch/osf.io,TomHeatwole/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,alexschiller/osf.io,chennan47/osf.io,acshi/osf.io,laurenrevere/osf.io,jnayak1/osf.io,asanfilippo7/osf.io,sbt9uc/osf.io,samchrisinger/osf.io,pattisdr/osf.io,SSJohns/osf.io,caseyrollins/osf.io,amyshi188/osf.io,alexschiller/osf.io,chennan47/osf.io,arpitar/osf.io,njantrania/osf.io,brianjgeiger/osf.io,hmoco/osf.io,GageGaskins/osf.io,RomanZWang/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,arpitar/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,caseyrygt/osf.io,caneruguz/osf.io,felliott/osf.io,danielneis/osf.io,doublebits/osf.io,zachjanicki/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,acshi/osf.io,petermalcolm/osf.io,cwisecarver/osf.io,brandonPurvis/osf.io,binoculars/osf.io,mattclark/osf.io,zamattiac/osf.io,adlius/osf.io,ticklemepierce/osf.io,arpitar/osf.io,mattclark/osf.io,RomanZWang/osf.io,hmoco/osf.io,petermalcolm/osf.io,alexschiller/osf.io,amyshi188/osf.io,wearpants/osf.io,zachjanicki/osf.io,chrisseto/osf.io,wearpants/osf.io,kwierman/osf.io,wearpants/osf.io,caseyrollins/osf.io,doublebits/osf.io,zachjanicki/osf.io,kch8qx/osf.io,brandonPurvis/osf.io,felliott/osf.io,crcresearch/osf.io,leb2dg/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,laurenrevere/osf.io,RomanZWang/osf.io,binoculars/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,alexschiller/osf.io,haoyuchen1992/osf.io,KAsante95/osf.io,petermalcolm/osf.io,brandonPurvis/osf.io,mluke93/osf.io,crcresearch/osf.io,KAsante95/osf.io,cslzchen/osf.io,adlius/osf.io,KAsante95/osf.io,sloria/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,DanielSBrown/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,aaxelb/osf.io,GageGaskins/osf.io,emetsger/osf.io,emetsger/osf.io,zamattiac/osf.io,aaxelb/osf.io,asanfilippo7/osf.io,abought/osf.io,baylee-d/osf.io,sbt9uc/osf.io,mluke93/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,icereval/osf.io,icereval/osf.io,amyshi188/osf.io,TomBaxter/osf.io,petermalcolm/osf.io,erinspace/osf.io,baylee-d/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,felliott/osf.io,erinspace/osf.io,zamattiac/osf.io,caneruguz/osf.io,billyhunt/osf.io,rdhyee/osf.io,ticklemepierce/osf.io,kwierman/osf.io,mluo613/osf.io,arpitar/osf.io,Ghalko/osf.io,caseyrygt/osf.io,emetsger/osf.io,jnayak1/osf.io,hmoco/osf.io,saradbowman/osf.io,billyhunt/osf.io,mluo613/osf.io,TomBaxter/osf.io,njantrania/osf.io,leb2dg/osf.io,TomBaxter/osf.io,SSJohns/osf.io,mfraezz/osf.io,brandonPurvis/osf.io,aaxelb/osf.io,caseyrollins/osf.io,sbt9uc/osf.io,Nesiehr/osf.io,acshi/osf.io,samanehsan/osf.io,CenterForOpenScience/osf.io,haoyuchen1992/osf.io,TomHeatwole/osf.io,Ghalko/osf.io,zamattiac/osf.io,doublebits/osf.io,RomanZWang/osf.io,sloria/osf.io,KAsante95/osf.io
|
18d06379a2dd89ef3d8db0d045f563b8f38f57db
|
badgekit_webhooks/urls.py
|
badgekit_webhooks/urls.py
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", views.InstanceListView.as_view()),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", staff_member_required(views.InstanceListView.as_view()),
name="badge_instance_list"),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
|
Mark instance list as staff-only, and give it a view name
|
Mark instance list as staff-only, and give it a view name
|
Python
|
mit
|
tgs/django-badgekit-webhooks
|
fb15b0735a8d2710baa33ac4e74d1dc88de209bc
|
suplemon/lexer.py
|
suplemon/lexer.py
|
# -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
|
# -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
if not type(code) is str:
# if not suitable lexer is found, return decoded code
code = code.decode("utf-8")
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
|
Make sure that Lexer.lex() returns str instead of bytes
|
Make sure that Lexer.lex() returns str instead of bytes
|
Python
|
mit
|
twolfson/suplemon,richrd/suplemon,richrd/suplemon,severin31/suplemon,twolfson/suplemon,trylle/suplemon
|
d8247d43c8026a8de39b09856a3f7beb235dc4f6
|
antxetamedia/multimedia/handlers.py
|
antxetamedia/multimedia/handlers.py
|
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
Handle the case where the bucket already exists
|
Handle the case where the bucket already exists
|
Python
|
agpl-3.0
|
GISAElkartea/antxetamedia,GISAElkartea/antxetamedia,GISAElkartea/antxetamedia
|
e61bd9a56b31dde461ad0cb82e3140bd0dbfa958
|
ckanext/tayside/logic/action/update.py
|
ckanext/tayside/logic/action/update.py
|
from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
|
from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.upload(uploader.get_max_image_size())
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
|
Fix bug for saving images in config
|
Fix bug for saving images in config
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside
|
b0fd983269fca4c514a8a21d0bb17d47d46780c3
|
system_maintenance/tests/functional/base.py
|
system_maintenance/tests/functional/base.py
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.password_inputbox.send_keys(Keys.ENTER)
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.login_button.click()
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
|
Make functional testing compatible with selenium 3.141.0
|
Make functional testing compatible with selenium 3.141.0
|
Python
|
bsd-3-clause
|
mfcovington/django-system-maintenance,mfcovington/django-system-maintenance,mfcovington/django-system-maintenance
|
7408862af1a6dc618e9dd78ece2120533466ab75
|
test/settings/gyptest-settings.py
|
test/settings/gyptest-settings.py
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
Make new settings test not run for xcode generator.
|
Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006
|
Python
|
bsd-3-clause
|
csulmone/gyp,csulmone/gyp,csulmone/gyp,csulmone/gyp
|
ff2def37816fbf1a8cf726914368036c0081e869
|
tests/integration/shared.py
|
tests/integration/shared.py
|
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='sleep 10',
type='org.tyrion.service.bash',
output='',
error=None,
code='15',
timeout=2,
)
|
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='echo test\nsleep 10',
type='org.tyrion.service.bash',
output='test',
error=None,
code='15',
timeout=1,
)
|
Tweak integration timeout test to match gtest
|
Tweak integration timeout test to match gtest
|
Python
|
mit
|
silas/tyrion,silas/tyrion,silas/tyrion,silas/tyrion,silas/tyrion
|
9aaf3bd6c376f608911b232d5f811e0b7964022f
|
tests/django_mysql_tests/tests.py
|
tests/django_mysql_tests/tests.py
|
# -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
|
# -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
def test_two(self):
MyModel.objects.create()
MyModel.objects.create()
|
Add second test, trying to trigger travis
|
Add second test, trying to trigger travis
|
Python
|
mit
|
nickmeharry/django-mysql,nickmeharry/django-mysql,arnau126/django-mysql,adamchainz/django-mysql,arnau126/django-mysql,graingert/django-mysql,graingert/django-mysql
|
b0dd95950058d174e50589ceeb18c6a0e2a16ec8
|
docs/source/_static/export_all_data.py
|
docs/source/_static/export_all_data.py
|
#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=False)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(filepath, update_sourceid=True, quiet=False)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data() # call the export function
|
#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None, quiet=True):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=quiet)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(
filepath, update_sourceid=True, quiet=quiet)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data(quiet=False) # call the export function
|
Add parameter "quiet" to export function
|
Add parameter "quiet" to export function
|
Python
|
mit
|
jmenglund/CollectionBatchTool
|
3d027b8d4d39fcdbc839bd0e186ea225e1c7b976
|
tests/__init__.py
|
tests/__init__.py
|
from .test_great_expectations import *
from .test_util import *
from .test_dataset import *
from .test_pandas_dataset import *
from tests.pandas.test_pandas_dataset_distributional_expectations import *
from .test_expectation_decorators import *
from .test_cli import *
|
# from .test_great_expectations import *
# from .test_util import *
# from .test_dataset import *
# from .test_pandas_dataset import *
# from tests.pandas.test_pandas_dataset_distributional_expectations import *
# from .test_expectation_decorators import *
# from .test_cli import *
|
Remove explicit import in tests module.
|
Remove explicit import in tests module.
|
Python
|
apache-2.0
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
fa1b111e63ebd069c027a3b969f679b2de54949f
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
from sanic import Sanic
from sanic_openapi import swagger_blueprint
@pytest.fixture()
def app():
app = Sanic('test')
app.blueprint(swagger_blueprint)
return app
|
import pytest
from sanic import Sanic
import sanic_openapi
@pytest.fixture()
def app():
app = Sanic("test")
app.blueprint(sanic_openapi.swagger_blueprint)
yield app
# Clean up
sanic_openapi.swagger.definitions = {}
sanic_openapi.swagger._spec = {}
|
Add clean up in app fixture
|
Test: Add clean up in app fixture
|
Python
|
mit
|
channelcat/sanic-openapi,channelcat/sanic-openapi
|
04e5083006ee1faffbbdc73bd71b4601ff1db3ae
|
tests/workers/test_merge.py
|
tests/workers/test_merge.py
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
def test_on_idle_with_commits_and_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", "merges")
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges == []
|
Test merge worker with commits and merges
|
test: Test merge worker with commits and merges
|
Python
|
apache-2.0
|
rowhit/gitfs,bussiere/gitfs,PressLabs/gitfs,PressLabs/gitfs,ksmaheshkumar/gitfs
|
3a2936bf55019dfd9203031ebe73966846b6f041
|
tests/test_dpp.py
|
tests/test_dpp.py
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
import replay_buffer
from test_dqn_like import _TestDQNLike
from chainer import testing
@testing.parameterize(
{'eta': 1e-2},
{'eta': 1e-1},
{'eta': 1e-0},
{'eta': 1e+1},
)
class TestDQN(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100,
eta=self.eta)
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
from agents.dpp import DPPL
from agents.dpp import DPPGreedy
import replay_buffer
from test_dqn_like import _TestDQNLike
class TestDPP(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPP doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPP doesn't support continuous action spaces.")
class TestDPPL(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPL(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPPL doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPPL doesn't support continuous action spaces.")
class TestDPPGreedy(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPGreedy(q_func, opt, rbuf, gpu=gpu, gamma=0.9,
explorer=explorer,
replay_start_size=100, target_update_frequency=100)
|
Add tests for DPPL and DPPGreedy.
|
Add tests for DPPL and DPPGreedy.
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
7c1b539436b1f27896bc0e193b52838e2323519b
|
tutorials/urls.py
|
tutorials/urls.py
|
from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view()),
url(r'add/', views.NewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]
|
from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view(), name='list_tutorials'),
url(r'add/', views.CreateNewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/delete/', views.DeleteTutorial.as_view(), name='delete_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]
|
Add url name to ListView, New url for delete view, Refactor ViewClass name for NewTutorials to CreateNewTutorials
|
Add url name to ListView, New url for delete view, Refactor ViewClass name for NewTutorials to CreateNewTutorials
|
Python
|
agpl-3.0
|
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
|
835b8adfb610cdac0233840497f3a1cf9860f946
|
cerebro/tests/core/test_usecases.py
|
cerebro/tests/core/test_usecases.py
|
import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.error_test = ("asd asdasd ")
self.error_test_response = "Sorry, I could not process that."
self.error_command = en.Command(self.error_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
response = uc.process_command(self.error_command)
assert response == self.error_test_response
|
import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
error_test = ("asd asdasd ")
error_test_response = "Sorry, I could not process that."
error_command = en.Command(error_test, self.command_args)
response = uc.process_command(error_command)
assert response == error_test_response
|
Test cases changed and minor optimization
|
Test cases changed and minor optimization
|
Python
|
mit
|
Le-Bot/cerebro
|
a0775510c81494777ab1adf7c822c4ca9a0227b2
|
tensorbayes/distributions.py
|
tensorbayes/distributions.py
|
""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits):
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var):
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
|
""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits, eps=0.0):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit')
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var, eps=0.0):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
|
Add eps factor for numerical stability
|
Add eps factor for numerical stability
|
Python
|
mit
|
RuiShu/tensorbayes
|
f603d382ab8b93677713d6c9c26f9b6a2616ba13
|
src/utils/indices.py
|
src/utils/indices.py
|
import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not Index(APIDoc.Index.name).exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def reset():
index = Index(APIDoc.Index.name)
if index.exists():
index.delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
|
import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
|
Add a few methods used in admin.py
|
Add a few methods used in admin.py
|
Python
|
mit
|
Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI
|
e28c9da712574618eb28b6ff82631462fee67c16
|
changes/utils/times.py
|
changes/utils/times.py
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
if value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
abs_value = abs(value)
if abs_value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif abs_value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
Fix for negative values in duration
|
Fix for negative values in duration
|
Python
|
apache-2.0
|
bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes
|
876d414f85297d45dca4f2c9158f9257dfd6cf5f
|
wagtailgeowidget/edit_handlers.py
|
wagtailgeowidget/edit_handlers.py
|
import warnings
import wagtail
if wagtail.VERSION < (2, 0):
warnings.warn("GeoPanel only works in Wagtail 2+", Warning) # NOQA
warnings.warn("Please import GeoPanel from wagtailgeowidget.legacy_edit_handlers instead", Warning) # NOQA
warnings.warn("All support for Wagtail 1.13 and below will be droppen in April 2018", Warning) # NOQA
from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
|
from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
used_in='GeoPanel',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
|
Remove no-longer needed wagtail 2.0 warning
|
Remove no-longer needed wagtail 2.0 warning
|
Python
|
mit
|
Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget
|
d466785a4faaf1c01519935317ededf336f9dd14
|
contentstore/management/commands/tests/test_sync_schedules.py
|
contentstore/management/commands/tests/test_sync_schedules.py
|
from six import BytesIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = BytesIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
|
from six import StringIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = StringIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
|
Use StringIO instead of BytesIO
|
Use StringIO instead of BytesIO
|
Python
|
bsd-3-clause
|
praekelt/seed-staged-based-messaging,praekelt/seed-stage-based-messaging,praekelt/seed-stage-based-messaging
|
fcd523105e9f158f423018d45b05527435a41fb0
|
geotrek/altimetry/tests/test_models.py
|
geotrek/altimetry/tests/test_models.py
|
import os
from django.test import TestCase
from django.conf import settings
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True)
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'), directory)
|
import os
from django.test import TestCase
from django.conf import settings
from django.utils.translation import get_language
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True, published=True)
response = self.client.get('/media/profiles/trek-%s.png' % trek.pk)
self.assertEqual(response.status_code, 200)
# In PDF
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, str(trek.pk), get_language()), directory)
|
Change test model elevation chart
|
Change test model elevation chart
|
Python
|
bsd-2-clause
|
GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek
|
68452ffc8490d976b043f660a0e3e1f19c4ed98e
|
great_expectations/actions/__init__.py
|
great_expectations/actions/__init__.py
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
SlackNotificationAction
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
Add Slack action to init
|
Add Slack action to init
|
Python
|
apache-2.0
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
dabd787a647e345bdd9f3fd2fee1474b04347512
|
website/addons/base/utils.py
|
website/addons/base/utils.py
|
from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settings': user_settings,
})
addon_settings.append(config)
return addon_settings
|
from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settiongs': user_settings or addon_config.DEFAULT_SETTINGS,
})
addon_settings.append(config)
return addon_settings
|
Use default settings if no user settings
|
Use default settings if no user settings
|
Python
|
apache-2.0
|
aaxelb/osf.io,DanielSBrown/osf.io,bdyetton/prettychart,HalcyonChimera/osf.io,rdhyee/osf.io,caseyrygt/osf.io,dplorimer/osf,icereval/osf.io,alexschiller/osf.io,pattisdr/osf.io,zachjanicki/osf.io,petermalcolm/osf.io,jmcarp/osf.io,KAsante95/osf.io,acshi/osf.io,cwisecarver/osf.io,adlius/osf.io,cldershem/osf.io,kch8qx/osf.io,cslzchen/osf.io,zachjanicki/osf.io,cslzchen/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,wearpants/osf.io,acshi/osf.io,jnayak1/osf.io,lyndsysimon/osf.io,asanfilippo7/osf.io,cosenal/osf.io,baylee-d/osf.io,ZobairAlijan/osf.io,mluo613/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,sbt9uc/osf.io,caseyrollins/osf.io,caneruguz/osf.io,rdhyee/osf.io,caseyrollins/osf.io,lyndsysimon/osf.io,adlius/osf.io,petermalcolm/osf.io,zamattiac/osf.io,chrisseto/osf.io,ckc6cz/osf.io,petermalcolm/osf.io,kch8qx/osf.io,samchrisinger/osf.io,icereval/osf.io,cosenal/osf.io,billyhunt/osf.io,TomHeatwole/osf.io,njantrania/osf.io,ZobairAlijan/osf.io,cosenal/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,zamattiac/osf.io,baylee-d/osf.io,wearpants/osf.io,monikagrabowska/osf.io,abought/osf.io,amyshi188/osf.io,chennan47/osf.io,reinaH/osf.io,zachjanicki/osf.io,brandonPurvis/osf.io,HarryRybacki/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,jolene-esposito/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,cldershem/osf.io,ZobairAlijan/osf.io,Johnetordoff/osf.io,SSJohns/osf.io,crcresearch/osf.io,Ghalko/osf.io,Nesiehr/osf.io,mluo613/osf.io,erinspace/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,doublebits/osf.io,zamattiac/osf.io,zamattiac/osf.io,hmoco/osf.io,brandonPurvis/osf.io,jmcarp/osf.io,jolene-esposito/osf.io,TomBaxter/osf.io,mluke93/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,abought/osf.io,rdhyee/osf.io,ticklemepierce/osf.io,samanehsan/osf.io,Nesiehr/osf.io,erinspace/osf.io,sloria/osf.io,mattclark/osf.io,adlius/osf.io,TomBaxter/osf.io,KAsante95/osf.io,TomHeatwole/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,jolene-esposito/osf.io,haoyuchen1992/osf.io,bdyetton/prettychart,alexschiller/osf.io,doublebits/osf.io,jmcarp/osf.io,reinaH/osf.io,samanehsan/osf.io,binoculars/osf.io,cslzchen/osf.io,GageGaskins/osf.io,alexschiller/osf.io,cldershem/osf.io,kwierman/osf.io,njantrania/osf.io,DanielSBrown/osf.io,reinaH/osf.io,samanehsan/osf.io,brandonPurvis/osf.io,TomBaxter/osf.io,asanfilippo7/osf.io,danielneis/osf.io,arpitar/osf.io,cldershem/osf.io,KAsante95/osf.io,danielneis/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,jmcarp/osf.io,zachjanicki/osf.io,mfraezz/osf.io,RomanZWang/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,doublebits/osf.io,danielneis/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,RomanZWang/osf.io,hmoco/osf.io,abought/osf.io,binoculars/osf.io,crcresearch/osf.io,felliott/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,SSJohns/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,HarryRybacki/osf.io,njantrania/osf.io,TomHeatwole/osf.io,laurenrevere/osf.io,MerlinZhang/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,mluo613/osf.io,asanfilippo7/osf.io,HalcyonChimera/osf.io,HarryRybacki/osf.io,acshi/osf.io,brianjgeiger/osf.io,felliott/osf.io,cwisecarver/osf.io,cslzchen/osf.io,aaxelb/osf.io,mattclark/osf.io,amyshi188/osf.io,caneruguz/osf.io,felliott/osf.io,TomHeatwole/osf.io,icereval/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,sbt9uc/osf.io,jnayak1/osf.io,doublebits/osf.io,rdhyee/osf.io,HarryRybacki/osf.io,leb2dg/osf.io,billyhunt/osf.io,kwierman/osf.io,kch8qx/osf.io,dplorimer/osf,mluo613/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,leb2dg/osf.io,abought/osf.io,wearpants/osf.io,arpitar/osf.io,Ghalko/osf.io,leb2dg/osf.io,cosenal/osf.io,arpitar/osf.io,HalcyonChimera/osf.io,petermalcolm/osf.io,lyndsysimon/osf.io,mluke93/osf.io,ckc6cz/osf.io,emetsger/osf.io,haoyuchen1992/osf.io,mluke93/osf.io,Johnetordoff/osf.io,bdyetton/prettychart,laurenrevere/osf.io,haoyuchen1992/osf.io,caseyrollins/osf.io,adlius/osf.io,sloria/osf.io,ticklemepierce/osf.io,MerlinZhang/osf.io,KAsante95/osf.io,laurenrevere/osf.io,chrisseto/osf.io,sbt9uc/osf.io,GageGaskins/osf.io,baylee-d/osf.io,samchrisinger/osf.io,mattclark/osf.io,chrisseto/osf.io,saradbowman/osf.io,ckc6cz/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,arpitar/osf.io,billyhunt/osf.io,acshi/osf.io,dplorimer/osf,samanehsan/osf.io,bdyetton/prettychart,ckc6cz/osf.io,crcresearch/osf.io,samchrisinger/osf.io,acshi/osf.io,erinspace/osf.io,RomanZWang/osf.io,mluke93/osf.io,mfraezz/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,mluo613/osf.io,aaxelb/osf.io,saradbowman/osf.io,dplorimer/osf,caneruguz/osf.io,njantrania/osf.io,jnayak1/osf.io,haoyuchen1992/osf.io,pattisdr/osf.io,kwierman/osf.io,caneruguz/osf.io,KAsante95/osf.io,hmoco/osf.io,binoculars/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,mfraezz/osf.io,SSJohns/osf.io,danielneis/osf.io,mfraezz/osf.io,jolene-esposito/osf.io,MerlinZhang/osf.io,chennan47/osf.io,MerlinZhang/osf.io,jnayak1/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,amyshi188/osf.io,GageGaskins/osf.io,Ghalko/osf.io,emetsger/osf.io,felliott/osf.io,chennan47/osf.io,emetsger/osf.io,leb2dg/osf.io,reinaH/osf.io,caseyrygt/osf.io,emetsger/osf.io,doublebits/osf.io,sbt9uc/osf.io,sloria/osf.io
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.