commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
093c9065de9e0e08f248bbb84696bf30309bd536
|
examples/parallel/timer.py
|
examples/parallel/timer.py
|
import rx
import concurrent.futures
import time
seconds = [5, 1, 2, 4, 3]
def sleep(t):
time.sleep(t)
return t
def output(result):
print '%d seconds' % result
with concurrent.futures.ProcessPoolExecutor(5) as executor:
rx.Observable.from_(seconds).flat_map(
lambda s: executor.submit(sleep, s)
).subscribe(output)
# 1 seconds
# 2 seconds
# 3 seconds
# 4 seconds
# 5 seconds
|
from __future__ import print_function
import rx
import concurrent.futures
import time
seconds = [5, 1, 2, 4, 3]
def sleep(t):
time.sleep(t)
return t
def output(result):
print('%d seconds' % result)
with concurrent.futures.ProcessPoolExecutor(5) as executor:
rx.Observable.from_(seconds).flat_map(
lambda s: executor.submit(sleep, s)
).subscribe(output)
# 1 seconds
# 2 seconds
# 3 seconds
# 4 seconds
# 5 seconds
|
Fix parallel example for Python 3
|
Fix parallel example for Python 3
|
Python
|
mit
|
dbrattli/RxPY,ReactiveX/RxPY,ReactiveX/RxPY
|
351dd3d0540b6169a58897f9cb2ec6b1c20d57a5
|
core/forms/games.py
|
core/forms/games.py
|
from crispy_forms.bootstrap import FormActions
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, HTML, Submit, Button, Fieldset
from django.forms import ModelForm, Textarea
from core.models import Game
class GameForm(ModelForm):
class Meta:
model = Game
exclude = ['owner', 'date_published']
widgets = {
'description': Textarea(attrs={'cols': 100, 'rows': 15})
}
def __init__(self, *args, **kwargs):
super(GameForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
'{{ heading }}',
'name',
'description',
'link',
HTML("""{% if form.image.value %}<img class="img-responsive" src="{{ MEDIA_URL }}{{ form.image.value }}">
{% endif %}"""),
'image',
'tags',
'group',
'event_name',
'game_file'
),
FormActions(
Submit('save', 'Save'),
Button('cancel', 'Cancel', onclick='history.go(-1);', css_class="btn-default")
)
)
|
from crispy_forms.bootstrap import FormActions
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, HTML, Submit, Button, Fieldset
from django.forms import ModelForm, Textarea
from core.models import Game
class GameForm(ModelForm):
class Meta:
model = Game
exclude = ['owner', 'date_published']
widgets = {
'description': Textarea(attrs={'cols': 100, 'rows': 15})
}
def __init__(self, *args, **kwargs):
super(GameForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
'{{ heading }}',
'name',
'description',
HTML("""{% if form.image.value %}<img class="img-responsive" src="{{ MEDIA_URL }}{{ form.image.value }}">
{% endif %}"""),
'image',
'tags',
'group',
'event_name',
'game_file'
),
FormActions(
Submit('save', 'Save'),
Button('cancel', 'Cancel', onclick='history.go(-1);', css_class="btn-default")
)
)
|
Remove unused field from game form
|
Remove unused field from game form
|
Python
|
mit
|
joshsamara/game-website,joshsamara/game-website,joshsamara/game-website
|
51f6272870e4e72d2364b2c2f660457b5c9286ef
|
doc/sample_code/search_forking_pro.py
|
doc/sample_code/search_forking_pro.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
sys.path.append('./../../')
from pyogi.ki2converter import *
from pyogi.kifu import *
if __name__ == '__main__':
for n in range(0, 50000):
n1 = (n // 10000)
n2 = int(n < 10000)
relpath = '~/data/shogi/2chkifu/{0}000{1}/{2:0>5}.KI2'.format(n1, n2, n)
kifile = os.path.expanduser(relpath)
if not os.path.exists(kifile):
continue
ki2converter = Ki2converter()
ki2converter.from_path(kifile)
csa = ki2converter.to_csa()
kifu = Kifu(csa)
res = kifu.get_forking(['OU', 'HI'])
if res[2] or res[3]:
print(kifu.players)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import pandas as pd
sys.path.append('./../../')
from pyogi.ki2converter import *
from pyogi.kifu import *
if __name__ == '__main__':
res_table = []
for n in range(0, 50000):
n1 = (n // 10000)
n2 = int(n < 10000)
relpath = '~/data/shogi/2chkifu/{0}000{1}/{2:0>5}.KI2'.format(n1, n2, n)
kifile = os.path.expanduser(relpath)
if not os.path.exists(kifile):
continue
ki2converter = Ki2converter()
ki2converter.from_path(kifile)
csa = ki2converter.to_csa()
if not csa:
continue
kifu = Kifu(csa)
res = kifu.get_forking(['OU', 'HI'])
if res[2] or res[3]:
print(kifu.players)
# Output
# 1. sente forked | gote forked
# 2. (sente won & sente forked) | (gote won & gote forked)
res_table.append(
[res[2] != [] or res[3] != [],
(kifu.sente_win and res[2]!=[]) or
((not kifu.sente_win) and res[3]!=[])])
df = pd.DataFrame(res_table, columns=['fork', 'fork&win'])
pd.crosstab(df.loc[:, 'fork'], df.loc[:, 'fork&win'])
|
Add sum up part using pd.crosstab
|
Add sum up part using pd.crosstab
|
Python
|
mit
|
tosh1ki/pyogi,tosh1ki/pyogi
|
4db93f27d6d4f9b05b33af96bff15108272df6ce
|
src/webapp/public.py
|
src/webapp/public.py
|
import json
from flask import Blueprint, render_template
import database as db
from database.model import Team
bp = Blueprint('public', __name__)
@bp.route("/map")
def map_page():
return render_template("public/map.html")
@bp.route("/map_teams")
def map_teams():
qry = db.session.query(Team).filter_by(confirmed=True).filter_by(deleted=False).filter_by(backup=False)
data = []
for item in qry:
if item.location is not None:
data.append({"lat": item.location.lat,
"lon": item.location.lon,
"name": item.name})
return json.dumps(data)
|
import json
from flask import Blueprint, render_template
import database as db
from database.model import Team
bp = Blueprint('public', __name__)
@bp.route("/map")
def map_page():
return render_template("public/map.html")
@bp.route("/map_teams")
def map_teams():
qry = db.session.query(Team).filter_by(confirmed=True).filter_by(deleted=False).filter_by(backup=False)
data_dict = {}
for item in qry:
if item.location is not None:
ident = "%s%s" % (item.location.lat, item.location.lon)
if ident not in data_dict:
data_dict[ident] = {
"lat": item.location.lat,
"lon": item.location.lon,
"name": item.name
}
else:
data_dict[ident]["name"] += "<br>" + item.name
data = [entry for entry in data_dict.itervalues()]
return json.dumps(data)
|
Add multi team output in markers
|
Add multi team output in markers
Signed-off-by: Dominik Pataky <46f1a0bd5592a2f9244ca321b129902a06b53e03@netdecorator.org>
|
Python
|
bsd-3-clause
|
eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system
|
3885e8fd36f419976d0b002c391dc246588929c7
|
admin/metrics/views.py
|
admin/metrics/views.py
|
from django.views.generic import TemplateView
from admin.base.settings import KEEN_CREDENTIALS
from admin.base.utils import OSFAdmin
class MetricsView(OSFAdmin, TemplateView):
template_name = 'metrics/osf_metrics.html'
def get_context_data(self, **kwargs):
kwargs.update(KEEN_CREDENTIALS.copy())
return super(MetricsView, self).get_context_data(**kwargs)
|
from django.views.generic import TemplateView
from django.contrib.auth.mixins import PermissionRequiredMixin
from admin.base.settings import KEEN_CREDENTIALS
from admin.base.utils import OSFAdmin
class MetricsView(OSFAdmin, TemplateView, PermissionRequiredMixin):
template_name = 'metrics/osf_metrics.html'
permission_required = 'admin.view_metrics'
def get_context_data(self, **kwargs):
kwargs.update(KEEN_CREDENTIALS.copy())
return super(MetricsView, self).get_context_data(**kwargs)
|
Add view metrics permission to metrics view
|
Add view metrics permission to metrics view
|
Python
|
apache-2.0
|
sloria/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,sloria/osf.io,erinspace/osf.io,TomBaxter/osf.io,mfraezz/osf.io,chrisseto/osf.io,crcresearch/osf.io,adlius/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,acshi/osf.io,crcresearch/osf.io,cwisecarver/osf.io,icereval/osf.io,cwisecarver/osf.io,aaxelb/osf.io,adlius/osf.io,acshi/osf.io,sloria/osf.io,HalcyonChimera/osf.io,icereval/osf.io,adlius/osf.io,erinspace/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,erinspace/osf.io,binoculars/osf.io,binoculars/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,cslzchen/osf.io,icereval/osf.io,chrisseto/osf.io,laurenrevere/osf.io,hmoco/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,cslzchen/osf.io,aaxelb/osf.io,crcresearch/osf.io,Nesiehr/osf.io,leb2dg/osf.io,pattisdr/osf.io,felliott/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,felliott/osf.io,caseyrollins/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,leb2dg/osf.io,caneruguz/osf.io,chrisseto/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,monikagrabowska/osf.io,felliott/osf.io,hmoco/osf.io,aaxelb/osf.io,mfraezz/osf.io,mfraezz/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,caneruguz/osf.io,saradbowman/osf.io,cwisecarver/osf.io,caneruguz/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,acshi/osf.io,acshi/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,felliott/osf.io,acshi/osf.io,pattisdr/osf.io,Nesiehr/osf.io,mattclark/osf.io,TomBaxter/osf.io,mattclark/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,chennan47/osf.io,baylee-d/osf.io,saradbowman/osf.io,chennan47/osf.io,baylee-d/osf.io,baylee-d/osf.io,mattclark/osf.io
|
d10720d1dd7997b5e1543cb27f2cd3e1088f30f5
|
server/fulltext.py
|
server/fulltext.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
"""
from bottle import route, run, template, request
import urllib2
import urllib
import sys
import os
from whoosh.index import create_in, open_dir
from whoosh.fields import *
from whoosh.qparser import QueryParser, MultifieldParser
from whoosh.query import *
@route('/')
def index():
return template('search')
@route('/epsg',method="POST")
def index():
ix = open_dir("../index")
result = []
with ix.searcher(closereader=False) as searcher:
parser = MultifieldParser(["code","name","area","type"], ix.schema)
query = request.POST.get('fulltext').strip()
myquery = parser.parse(query)
results = searcher.search(myquery, limit = 600)
num_results = len(results)
for r in results:
result.append(r)
return template('results',result=result, query=query,num_results=num_results)
run(host='localhost', port=8080)
|
#!/usr/bin/env python
# encoding: utf-8
"""
"""
from bottle import route, run, template, request
import urllib2
import urllib
import sys
import os
from whoosh.index import create_in, open_dir
from whoosh.fields import *
from whoosh.qparser import QueryParser, MultifieldParser
from whoosh.query import *
@route('/')
def index():
return template('search')
@route('/epsg',method="POST")
def index():
ix = open_dir("../index")
result = []
with ix.searcher(closereader=False) as searcher:
parser = MultifieldParser(["code","name","area","type"], ix.schema)
query = request.POST.get('fulltext').strip()
select = request.POST.get('type').strip()
status = request.POST.get('invalid')
print status, "status"
if status == None:
status = u"Valid"
print status, "status2"
query = query + " type:" + select + " status:" + status # change status from id tu text
print query
myquery = parser.parse(query)
results = searcher.search(myquery, limit = 600)
num_results = len(results)
for r in results:
result.append(r)
return template('results',result=result, query=query,num_results=num_results)
run(host='localhost', port=8080)
|
Add advanced search by select type or status
|
Add advanced search by select type or status
|
Python
|
bsd-2-clause
|
klokantech/epsg.io,dudaerich/epsg.io,dudaerich/epsg.io,klokantech/epsg.io,klokantech/epsg.io,dudaerich/epsg.io,dudaerich/epsg.io,klokantech/epsg.io
|
a01a3f9c07e0e5d93fc664df118c6085668410c1
|
test/test_url_subcommand.py
|
test/test_url_subcommand.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from .common import print_traceback
from .dataset import complex_json
class Test_url_subcommand(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
|
Fix a test class name
|
Fix a test class name
|
Python
|
mit
|
thombashi/sqlitebiter,thombashi/sqlitebiter
|
4c9e18f39908e9b1a36989b3e4097ca458d94af4
|
docs/conf.py
|
docs/conf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath('.')))
import youtube_dl_server as ydl_server
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinxcontrib.httpdomain',
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'youtube-dl-api-server'
copyright = '2013, Jaime Marquínez Ferrándiz'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = ydl_server.__version__
# The short X.Y version.
version = '.'.join(release.split('.')[:2])
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath('.')))
import youtube_dl_server as ydl_server
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinxcontrib.httpdomain',
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'youtube-dl-api-server'
copyright = '2013-{now:%Y}, Jaime Marquínez Ferrándiz'.format(now=datetime.datetime.now())
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = ydl_server.__version__
# The short X.Y version.
version = '.'.join(release.split('.')[:2])
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
|
Use the current year in the copyright
|
docs: Use the current year in the copyright
|
Python
|
unlicense
|
jaimeMF/youtube-dl-api-server,apllicationCOM/youtube-dl-api-server,apllicationCOM/youtube-dl-api-server,jaimeMF/youtube-dl-api-server,apllicationCOM/youtube-dl-api-server,jaimeMF/youtube-dl-api-server
|
eaf74f092e73dcb832d624d9f19e9eaee5fbc244
|
pyfakefs/pytest_plugin.py
|
pyfakefs/pytest_plugin.py
|
"""A pytest plugin for using pyfakefs as a fixture
When pyfakefs is installed, the "fs" fixture becomes available.
:Usage:
def my_fakefs_test(fs):
fs.create_file('/var/data/xx1.txt')
assert os.path.exists('/var/data/xx1.txt')
"""
import py
import pytest
from pyfakefs.fake_filesystem_unittest import Patcher
Patcher.SKIPMODULES.add(py) # Ignore pytest components when faking filesystem
@pytest.fixture
def fs(request):
""" Fake filesystem. """
patcher = Patcher()
patcher.setUp()
request.addfinalizer(patcher.tearDown)
return patcher.fs
|
"""A pytest plugin for using pyfakefs as a fixture
When pyfakefs is installed, the "fs" fixture becomes available.
:Usage:
def my_fakefs_test(fs):
fs.create_file('/var/data/xx1.txt')
assert os.path.exists('/var/data/xx1.txt')
"""
import linecache
import py
import pytest
from pyfakefs.fake_filesystem_unittest import Patcher
Patcher.SKIPMODULES.add(py) # Ignore pytest components when faking filesystem
Patcher.SKIPMODULES.add(linecache) # Seems to be used by pytest internally
@pytest.fixture
def fs(request):
""" Fake filesystem. """
patcher = Patcher()
patcher.setUp()
request.addfinalizer(patcher.tearDown)
return patcher.fs
|
Add linecache module to skipped modules for pytest plugin
|
Add linecache module to skipped modules for pytest plugin
- see #381
- fixes the problem under Python 3, but not under Python 2
|
Python
|
apache-2.0
|
mrbean-bremen/pyfakefs,pytest-dev/pyfakefs,mrbean-bremen/pyfakefs,jmcgeheeiv/pyfakefs
|
0770a8e77463ee70851404a37138da050aead5bb
|
pymatgen/core/__init__.py
|
pymatgen/core/__init__.py
|
"""
This package contains core modules and classes for representing structures and
operations on them.
"""
__author__ = "Shyue Ping Ong"
__date__ = "Dec 15, 2010 7:21:29 PM"
from .periodic_table import *
from .composition import *
from .structure import *
from .structure_modifier import *
from .bonds import *
from .lattice import *
from .sites import *
from .operations import *
|
"""
This package contains core modules and classes for representing structures and
operations on them.
"""
__author__ = "Shyue Ping Ong"
__date__ = "Dec 15, 2010 7:21:29 PM"
from .periodic_table import *
from .composition import *
from .structure import *
from .structure_modifier import *
from .bonds import *
from .lattice import *
from .sites import *
from .operations import *
from .units import *
|
Add units to Core import.
|
Add units to Core import.
Former-commit-id: 0f1c678c7da36ebc85827601645f6729a11e5f41 [formerly 80676409b706f3927b463afef6aa844d00aeb107]
Former-commit-id: f99f3956f55a26845ce5ce583545a0413e4f36ce
|
Python
|
mit
|
tallakahath/pymatgen,matk86/pymatgen,aykol/pymatgen,matk86/pymatgen,gpetretto/pymatgen,setten/pymatgen,tschaume/pymatgen,ndardenne/pymatgen,gVallverdu/pymatgen,dongsenfo/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,johnson1228/pymatgen,nisse3000/pymatgen,ndardenne/pymatgen,tschaume/pymatgen,davidwaroquiers/pymatgen,vorwerkc/pymatgen,setten/pymatgen,aykol/pymatgen,gpetretto/pymatgen,setten/pymatgen,gmatteo/pymatgen,Bismarrck/pymatgen,fraricci/pymatgen,aykol/pymatgen,mbkumar/pymatgen,Bismarrck/pymatgen,mbkumar/pymatgen,czhengsci/pymatgen,vorwerkc/pymatgen,czhengsci/pymatgen,richardtran415/pymatgen,Bismarrck/pymatgen,dongsenfo/pymatgen,vorwerkc/pymatgen,davidwaroquiers/pymatgen,nisse3000/pymatgen,richardtran415/pymatgen,richardtran415/pymatgen,vorwerkc/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,gpetretto/pymatgen,xhqu1981/pymatgen,Bismarrck/pymatgen,ndardenne/pymatgen,nisse3000/pymatgen,tallakahath/pymatgen,gmatteo/pymatgen,montoyjh/pymatgen,gVallverdu/pymatgen,tschaume/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,xhqu1981/pymatgen,gVallverdu/pymatgen,gVallverdu/pymatgen,johnson1228/pymatgen,davidwaroquiers/pymatgen,matk86/pymatgen,richardtran415/pymatgen,fraricci/pymatgen,fraricci/pymatgen,czhengsci/pymatgen,tschaume/pymatgen,mbkumar/pymatgen,czhengsci/pymatgen,Bismarrck/pymatgen,gpetretto/pymatgen,blondegeek/pymatgen,blondegeek/pymatgen,setten/pymatgen,montoyjh/pymatgen,blondegeek/pymatgen,blondegeek/pymatgen,dongsenfo/pymatgen,xhqu1981/pymatgen,matk86/pymatgen,mbkumar/pymatgen,dongsenfo/pymatgen,tschaume/pymatgen,nisse3000/pymatgen,tallakahath/pymatgen
|
7255033298cad9a4a7c51bdceafe84c0536e78ba
|
pytopkapi/infiltration.py
|
pytopkapi/infiltration.py
|
"""Infiltration module.
"""
import numpy as np
from scipy.optimize import fsolve
def green_ampt_cum_infiltration(F, psi, dtheta, K, t):
"""The Green-Ampt cumulative infiltration equation.
"""
tmp = psi*dtheta
# np.log(x) computes ln(x)
return F - tmp*np.log(1 + F/tmp) - K*t
if __name__ == '__main__':
psi = 16.7
dtheta = 0.34
K = 0.65
t = 1
F = K*t # initial guess
print fsolve(green_ampt_cum_infiltration,
F, args=(psi, dtheta, K, t), full_output=True)
|
"""Infiltration module.
"""
import numpy as np
from scipy.optimize import fsolve
def _green_ampt_cum_eq(F, psi, dtheta, K, t):
"""The Green-Ampt cumulative infiltration equation
"""
tmp = psi*dtheta
# np.log(x) computes ln(x)
return F - tmp*np.log(1 + F/tmp) - K*t
def green_ampt_cum_infiltration(psi, dtheta, K, t):
"""Compute the Green-Ampt cumulative infiltration
Compute the potential cumulative infiltration up to time `t`,
using Green-Ampt.
Parameters
----------
psi : array_like
Soil suction head at wetting front.
dtheta : array_like
Ratio of initial effective saturation to effective porosity.
K : array_like
Saturated hydraulic conductivity.
t : array_like
Time since beginning of event
Returns
-------
soln : array_like
Cumulative infiltration up to time `t`.
Raises
------
ValueError - If no solution can be found.
"""
F = K*t # initial guess
soln, infodict, ierr, mesg = fsolve(_green_ampt_cum_eq, F,
args=(psi, dtheta, K, t),
full_output=True)
if ierr == 1:
return soln
else:
raise ValueError(mesg)
def test_basic_green_ampt():
"""Test the Green-Ampt cumulative infiltration solution"""
psi = 16.7
dtheta = 0.34
K = 0.65
t = 1
result = green_ampt_cum_infiltration(psi, dtheta, K, t)
assert np.allclose(result, [3.16641923])
|
Change the API and add a test and documentation
|
ENH: Change the API and add a test and documentation
|
Python
|
bsd-3-clause
|
scottza/PyTOPKAPI,sahg/PyTOPKAPI
|
1223c77fb3ada03d32e6c9da0a08dd43bfc5ad7b
|
docs/test.py
|
docs/test.py
|
import sys, os
if sys.version_info >= (2, 4):
import doctest
else:
raise ImportError("Python 2.4 doctest required")
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def test():
for doc in ['SQLObject.txt']:
doctest.testfile(doc, optionflags=doctest.ELLIPSIS)
if __name__ == '__main__':
test()
|
import sys, os
if sys.version_info >= (2, 4):
import doctest
else:
raise ImportError("Python 2.4 doctest required")
sys.path.insert(
0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def test():
for doc in ['SQLObject.txt']:
doctest.testfile(doc, optionflags=doctest.ELLIPSIS)
if __name__ == '__main__':
test()
|
Make sure checkout is first on sys.path
|
Make sure checkout is first on sys.path
git-svn-id: fe2f45b2405132b4a9af5caedfc153c2e6f542f4@894 95a46c32-92d2-0310-94a5-8d71aeb3d4b3
|
Python
|
lgpl-2.1
|
sqlobject/sqlobject,drnlm/sqlobject,sqlobject/sqlobject,drnlm/sqlobject
|
bb3ec131261f0619a86f21f549d6b1cb47f2c9ad
|
graph/serializers.py
|
graph/serializers.py
|
from rest_framework import serializers
from measurement.models import Measurement
from threshold_value.models import ThresholdValue
from calendar import timegm
from alarm.models import Alarm
class GraphSeriesSerializer(serializers.ModelSerializer):
x = serializers.SerializerMethodField('get_time')
y = serializers.SerializerMethodField('get_value')
class Meta:
fields = ['x', 'y']
def get_time(self, obj):
return int(timegm(obj.time.utctimetuple())) * 1000 # Milliseconds since epoch, UTC
def get_value(self, obj):
return obj.value
class MeasurementGraphSeriesSerializer(GraphSeriesSerializer):
alarm = serializers.SerializerMethodField('get_alarm')
def __init__(self, *args, **kwargs):
self.alarm_dict = kwargs.pop('alarm_dict', None)
super(MeasurementGraphSeriesSerializer, self).__init__(*args, **kwargs)
if not self.alarm_dict:
self.fields.pop('alarm')
def get_alarm(self, obj):
if obj.id in self.alarm_dict:
alarm = self.alarm_dict[obj.id]
serializer = SimpleAlarmSerializer(alarm)
return serializer.data
return None
class Meta(GraphSeriesSerializer.Meta):
model = Measurement
fields = GraphSeriesSerializer.Meta.fields + ['alarm']
class ThresholdValueGraphSeriesSerializer(GraphSeriesSerializer):
class Meta(GraphSeriesSerializer.Meta):
model = ThresholdValue
class SimpleAlarmSerializer(serializers.ModelSerializer):
class Meta:
model = Alarm
fields = ('id', 'time_created', 'is_treated', 'treated_text')
|
from rest_framework import serializers
from measurement.models import Measurement
from threshold_value.models import ThresholdValue
from calendar import timegm
from alarm.models import Alarm
class GraphSeriesSerializer(serializers.ModelSerializer):
x = serializers.SerializerMethodField('get_time')
y = serializers.SerializerMethodField('get_value')
class Meta:
fields = ['x', 'y']
def get_time(self, obj):
return int(timegm(obj.time.utctimetuple())) * 1000 # Milliseconds since epoch, UTC
def get_value(self, obj):
return obj.value
class MeasurementGraphSeriesSerializer(GraphSeriesSerializer):
alarm = serializers.SerializerMethodField('get_alarm')
def __init__(self, *args, **kwargs):
self.alarm_dict = kwargs.pop('alarm_dict', None)
super(MeasurementGraphSeriesSerializer, self).__init__(*args, **kwargs)
if not self.alarm_dict:
self.fields.pop('alarm')
def get_alarm(self, obj):
if obj.id in self.alarm_dict:
alarm = self.alarm_dict[obj.id]
serializer = SimpleAlarmSerializer(alarm)
return serializer.data
return None
class Meta(GraphSeriesSerializer.Meta):
model = Measurement
fields = GraphSeriesSerializer.Meta.fields + ['alarm']
class ThresholdValueGraphSeriesSerializer(GraphSeriesSerializer):
class Meta(GraphSeriesSerializer.Meta):
model = ThresholdValue
class SimpleAlarmSerializer(serializers.ModelSerializer):
class Meta:
model = Alarm
fields = ['is_treated']
|
Simplify SimpleAlarmSerializer to improve the performance of the graph_data endpoint
|
Simplify SimpleAlarmSerializer to improve the performance of the graph_data endpoint
|
Python
|
mit
|
sigurdsa/angelika-api
|
dc82d59b739934d093ed0d704583e7edf1278fc3
|
core/management/commands/delete_old_sessions.py
|
core/management/commands/delete_old_sessions.py
|
from datetime import datetime
from django.core.management.base import BaseCommand
from django.contrib.sessions.models import Session
class Command(BaseCommand):
args = '<count count ...>'
help = "Delete old sessions"
def handle(self, *args, **options):
old_sessions = Session.objects.filter(expire_date__lt=datetime.now())
self.stdout.write("Deleting {0} expired sessions".format(
old_sessions.count()
)
)
for index, session in enumerate(old_sessions):
session.delete()
if str(index).endswith('000'):
self.stdout.write("{0} records deleted".format(index))
self.stdout.write("{0} expired sessions remaining".format(
Session.objects.filter(expire_date__lt=datetime.now())
)
)
|
from datetime import datetime
from django.core.management.base import NoArgsCommand
from django.contrib.sessions.models import Session
class Command(NoArgsCommand):
help = "Delete old sessions"
def handle_noargs(self, **options):
old_sessions = Session.objects.filter(expire_date__lt=datetime.now())
self.stdout.write("Deleting {0} expired sessions".format(
old_sessions.count()
)
)
for index, session in enumerate(old_sessions)[:10000]:
session.delete()
if str(index).endswith('000'):
self.stdout.write("{0} records deleted".format(index))
self.stdout.write("{0} expired sessions remaining".format(
Session.objects.filter(expire_date__lt=datetime.now())
)
)
|
Add delete old sessions command
|
Add delete old sessions command
|
Python
|
mit
|
QLGu/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,QLGu/djangopackages,pydanny/djangopackages,pydanny/djangopackages,nanuxbe/djangopackages,QLGu/djangopackages,nanuxbe/djangopackages
|
591a40b6e1f4ac8b1d21050ccfa10779dc9dbf7c
|
analytic_code.py
|
analytic_code.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013 XCG Consulting (www.xcg-consulting.fr)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013 XCG Consulting (www.xcg-consulting.fr)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", "Dimensions", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
Add string to display the name of the field Dimension during the import
|
Add string to display the name of the field Dimension during the import
|
Python
|
agpl-3.0
|
xcgd/analytic_structure
|
6782ad40a405f79f07fa1527131634f96944ffd6
|
apps/innovate/views.py
|
apps/innovate/views.py
|
import random
import jingo
from users.models import Profile
from projects.models import Project
from events.models import Event
from feeds.models import Entry
def splash(request):
"""Display splash page. With featured project, event, person, blog post."""
def get_random(cls, **kwargs):
choices = cls.objects.filter(**kwargs)
return choices and random.choice(choices) or None
return jingo.render(request, 'innovate/splash.html', {
'featured_project': get_random(Project, featured=True),
'featured_event': get_random(Event, featured=True),
'featured_user': get_random(Profile, featured=True),
'entry': get_random(Entry, link__featured=True)
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html')
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html')
|
import random
import jingo
from users.models import Profile
from projects.models import Project
from events.models import Event
from feeds.models import Entry
def splash(request):
"""Display splash page. With featured project, event, person, blog post."""
def get_random(cls, **kwargs):
choices = cls.objects.filter(**kwargs)
return choices and random.choice(choices) or None
return jingo.render(request, 'innovate/splash.html', {
'featured_project': get_random(Project, featured=True),
'featured_event': get_random(Event, featured=True),
'featured_user': get_random(Profile, featured=True),
'entry': get_random(Entry, link__featured=True)
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html', status=500)
|
Add status codes to the 404/500 error handlers.
|
Add status codes to the 404/500 error handlers.
|
Python
|
bsd-3-clause
|
mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite
|
8d651ed493d2787da478f0c7c120917d3335b4d5
|
email_from_template/utils.py
|
email_from_template/utils.py
|
from . import app_settings
_render_method = None
def get_render_method():
global _render_method
if _render_method is None:
_render_method = from_dotted_path(app_settings.EMAIL_RENDER_METHOD)
return _render_method
_context_processors = None
def get_context_processors():
global _context_processors
if _context_processors is None:
_context_processors = [
from_dotted_path(x) for x in app_settings.EMAIL_CONTEXT_PROCESSORS
]
return _context_processors
def from_dotted_path(fullpath):
"""
Returns the specified attribute of a module, specified by a string.
``from_dotted_path('a.b.c.d')`` is roughly equivalent to::
from a.b.c import d
except that ``d`` is returned and not entered into the current namespace.
"""
module, attr = fullpath.rsplit('.', 1)
return getattr(__import__(module, {}, {}, (attr,)), attr)
|
from django.utils.functional import memoize
from . import app_settings
def get_render_method():
return from_dotted_path(app_settings.EMAIL_RENDER_METHOD)
get_render_method = memoize(get_render_method, {}, 0)
def get_context_processors():
return [from_dotted_path(x) for x in app_settings.EMAIL_CONTEXT_PROCESSORS]
get_context_processors = memoize(get_context_processors, {}, 0)
def from_dotted_path(fullpath):
"""
Returns the specified attribute of a module, specified by a string.
``from_dotted_path('a.b.c.d')`` is roughly equivalent to::
from a.b.c import d
except that ``d`` is returned and not entered into the current namespace.
"""
module, attr = fullpath.rsplit('.', 1)
return getattr(__import__(module, {}, {}, (attr,)), attr)
|
Use Django's memoize over a custom one.
|
Use Django's memoize over a custom one.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>
|
Python
|
bsd-3-clause
|
playfire/django-email-from-template,lamby/django-email-from-template
|
07d7eddac89d5ac54af62e185801cdbe71720b7c
|
hybridJaccardTest.py
|
hybridJaccardTest.py
|
import argparse
import sys
import hybridJaccard as hj
def main():
"Command line testinterface."
parser = argparse.ArgumentParser()
parser.add_argument('-c','--configFile', help="Configuration file (JSON).", required=False)
parser.add_argument('-i','--input', help="Input file of phrases to test.", required=True)
parser.add_argument('-r','--referenceFile', help="Reference file.", required=False)
args = parser.parse_args()
sm = hj.HybridJaccard(ref_path=args.referenceFile, config_path=args.configFile)
with open("input.txt") as input:
for line in input:
line = line.strip()
match = sm.findBestMatchStringCached(line)
if match is None:
match = "(NONE)"
print(line+" => "+match)
# call main() if this is run as standalone
if __name__ == "__main__":
sys.exit(main())
|
import argparse
import sys
import hybridJaccard as hj
def main():
"Command line testinterface."
parser = argparse.ArgumentParser()
parser.add_argument('-c','--configFile', help="Configuration file (JSON).", required=False)
parser.add_argument('-i','--input', help="Input file of phrases to test.", required=True)
parser.add_argument('-r','--referenceFile', help="Reference file.", required=False)
args = parser.parse_args()
sm = hj.HybridJaccard(ref_path=args.referenceFile, config_path=args.configFile)
with open(args.input) as input:
for line in input:
line = line.strip()
match = sm.findBestMatchStringCached(line)
if match is None:
match = "(NONE)"
print(line+" => "+match)
# call main() if this is run as standalone
if __name__ == "__main__":
sys.exit(main())
|
Read the intput file specified on the command line.
|
Read the intput file specified on the command line.
|
Python
|
apache-2.0
|
usc-isi-i2/hybrid-jaccard
|
fcbb2ec6ebceebea0012971a831f2941d1943708
|
src/knesset/links/managers.py
|
src/knesset/links/managers.py
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_unicode
class LinksManager(models.Manager):
def for_model(self, model):
"""
QuerySet for all links for a particular model (either an instance or
a class).
"""
ct = ContentType.objects.get_for_model(model)
qs = self.get_query_set().filter(active=True, content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_unicode(model._get_pk_val()))
return qs
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_unicode
class LinksManager(models.Manager):
def for_model(self, model):
"""
QuerySet for all links for a particular model (either an instance or
a class).
"""
ct = ContentType.objects.get_for_model(model)
qs = self.get_query_set().select_related('link_type').filter(
active=True, content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_unicode(model._get_pk_val()))
return qs
|
Use select related for link_type
|
Use select related for link_type
|
Python
|
bsd-3-clause
|
habeanf/Open-Knesset,jspan/Open-Knesset,noamelf/Open-Knesset,navotsil/Open-Knesset,OriHoch/Open-Knesset,noamelf/Open-Knesset,habeanf/Open-Knesset,noamelf/Open-Knesset,daonb/Open-Knesset,Shrulik/Open-Knesset,ofri/Open-Knesset,OriHoch/Open-Knesset,ofri/Open-Knesset,noamelf/Open-Knesset,MeirKriheli/Open-Knesset,DanaOshri/Open-Knesset,ofri/Open-Knesset,OriHoch/Open-Knesset,alonisser/Open-Knesset,MeirKriheli/Open-Knesset,daonb/Open-Knesset,otadmor/Open-Knesset,DanaOshri/Open-Knesset,alonisser/Open-Knesset,habeanf/Open-Knesset,alonisser/Open-Knesset,MeirKriheli/Open-Knesset,otadmor/Open-Knesset,Shrulik/Open-Knesset,daonb/Open-Knesset,alonisser/Open-Knesset,habeanf/Open-Knesset,ofri/Open-Knesset,daonb/Open-Knesset,otadmor/Open-Knesset,Shrulik/Open-Knesset,jspan/Open-Knesset,navotsil/Open-Knesset,otadmor/Open-Knesset,DanaOshri/Open-Knesset,jspan/Open-Knesset,jspan/Open-Knesset,Shrulik/Open-Knesset,OriHoch/Open-Knesset,DanaOshri/Open-Knesset,navotsil/Open-Knesset,MeirKriheli/Open-Knesset,navotsil/Open-Knesset
|
79dd629be9b858fd7bc73e7d16aecbb25de0d5db
|
fireplace/cards/wog/rogue.py
|
fireplace/cards/wog/rogue.py
|
from ..utils import *
##
# Minions
##
# Spells
class OG_073:
"Thistle Tea"
play = Draw(CONTROLLER).then(Give(CONTROLLER, Copy(Draw.CARD)) * 2)
|
from ..utils import *
##
# Minions
class OG_070:
"Bladed Cultist"
combo = Buff(SELF, "OG_070e")
OG_070e = buff(+1, +1)
class OG_267:
"Southsea Squidface"
deathrattle = Buff(FRIENDLY_WEAPON, "OG_267e")
OG_267e = buff(atk=2)
##
# Spells
class OG_073:
"Thistle Tea"
play = Draw(CONTROLLER).then(Give(CONTROLLER, Copy(Draw.CARD)) * 2)
class OG_176:
"Shadow Strike"
play = Hit(TARGET, 5)
|
Implement Bladed Cultist, Southsea Squidface, and Shadow Strike
|
Implement Bladed Cultist, Southsea Squidface, and Shadow Strike
|
Python
|
agpl-3.0
|
NightKev/fireplace,jleclanche/fireplace,beheh/fireplace
|
b8ad378a796ee867acfa3198e04d47a500dd90d3
|
mla/neuralnet/activations.py
|
mla/neuralnet/activations.py
|
import autograd.numpy as np
"""
References:
https://en.wikipedia.org/wiki/Activation_function
"""
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def softmax(z):
# Avoid numerical overflow by removing max
e = np.exp(z - np.amax(z, axis=1, keepdims=True))
return e / np.sum(e, axis=1, keepdims=True)
def linear(z):
return z
def softplus(z):
"""Smooth relu."""
# Avoid numerical overflow, see:
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.logaddexp.html
return np.logaddexp(0.0, z)
def softsign(z):
return z / (1 + np.abs(z))
def tanh(z):
return np.tanh(z)
def relu(z):
return np.maximum(0, z)
def get_activation(name):
"""Return activation function by name"""
try:
return globals()[name]
except:
raise ValueError('Invalid activation function.')
|
import autograd.numpy as np
"""
References:
https://en.wikipedia.org/wiki/Activation_function
"""
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def softmax(z):
# Avoid numerical overflow by removing max
e = np.exp(z - np.amax(z, axis=1, keepdims=True))
return e / np.sum(e, axis=1, keepdims=True)
def linear(z):
return z
def softplus(z):
"""Smooth relu."""
# Avoid numerical overflow, see:
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.logaddexp.html
return np.logaddexp(0.0, z)
def softsign(z):
return z / (1 + np.abs(z))
def tanh(z):
return np.tanh(z)
def relu(z):
return np.maximum(0, z)
def leakyrelu(z, a=0.01):
return np.maximum(z * a, z)
def get_activation(name):
"""Return activation function by name"""
try:
return globals()[name]
except:
raise ValueError('Invalid activation function.')
|
Add Leaky ReLU activation. Differentiation with autograd package confirmed to work correctly.
|
Add Leaky ReLU activation.
Differentiation with autograd package confirmed to work correctly.
|
Python
|
mit
|
rushter/MLAlgorithms
|
f6cad3a2bfeb4238da359c882fe7cbbaedb5d8b7
|
setuptools/extension.py
|
setuptools/extension.py
|
from distutils.core import Extension as _Extension
from dist import _get_unpatched
_Extension = _get_unpatched(_Extension)
try:
from Pyrex.Distutils.build_ext import build_ext
except ImportError:
have_pyrex = False
else:
have_pyrex = True
class Extension(_Extension):
"""Extension that uses '.c' files in place of '.pyx' files"""
if not have_pyrex:
# convert .pyx extensions to .c
def __init__(self,*args,**kw):
_Extension.__init__(self,*args,**kw)
sources = []
for s in self.sources:
if s.endswith('.pyx'):
sources.append(s[:-3]+'c')
else:
sources.append(s)
self.sources = sources
class Library(Extension):
"""Just like a regular Extension, but built as a library instead"""
import sys, distutils.core, distutils.extension
distutils.core.Extension = Extension
distutils.extension.Extension = Extension
if 'distutils.command.build_ext' in sys.modules:
sys.modules['distutils.command.build_ext'].Extension = Extension
|
from distutils.core import Extension as _Extension
from setuptools.dist import _get_unpatched
_Extension = _get_unpatched(_Extension)
try:
from Pyrex.Distutils.build_ext import build_ext
except ImportError:
have_pyrex = False
else:
have_pyrex = True
class Extension(_Extension):
"""Extension that uses '.c' files in place of '.pyx' files"""
if not have_pyrex:
# convert .pyx extensions to .c
def __init__(self,*args,**kw):
_Extension.__init__(self,*args,**kw)
sources = []
for s in self.sources:
if s.endswith('.pyx'):
sources.append(s[:-3]+'c')
else:
sources.append(s)
self.sources = sources
class Library(Extension):
"""Just like a regular Extension, but built as a library instead"""
import sys, distutils.core, distutils.extension
distutils.core.Extension = Extension
distutils.extension.Extension = Extension
if 'distutils.command.build_ext' in sys.modules:
sys.modules['distutils.command.build_ext'].Extension = Extension
|
Fix import that was breaking py3k
|
Fix import that was breaking py3k
--HG--
branch : distribute
extra : rebase_source : 76bf8f9213536189bce76a41e798c44c5f468cbd
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
bd78b8c1bab94b5f048f8bc4895657f1fd36ddfc
|
project_generator/commands/clean.py
|
project_generator/commands/clean.py
|
# Copyright 2014-2015 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from ..generate import Generator
help = 'Clean generated projects'
def run(args):
if os.path.exists(args.file):
generator = Generator(args.file)
for project in generator.generate(args.project):
project.clean(args.tool)
else:
# not project known by pgen
logging.warning("%s not found." % args.file)
return -1
return 0
def setup(subparser):
subparser.add_argument("-f", "--file", help="YAML projects file", default='projects.yaml')
subparser.add_argument("-p", "--project", required = True, help="Specify which project to be removed")
subparser.add_argument(
"-t", "--tool", required = True, help="Clean project files for specified tool")
|
# Copyright 2014-2015 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from ..generate import Generator
help = 'Clean generated projects'
def run(args):
if os.path.exists(args.file):
generator = Generator(args.file)
for project in generator.generate(args.project):
project.clean(args.tool)
else:
# not project known by pgen
logging.warning("%s not found." % args.file)
return -1
return 0
def setup(subparser):
subparser.add_argument("-f", "--file", help="YAML projects file", default='projects.yaml')
subparser.add_argument("-p", "--project", required = True, help="Specify which project to be removed")
subparser.add_argument(
"-t", "--tool", help="Clean project files")
|
Clean - tool is not required, as tool_supported are there
|
Clean - tool is not required, as tool_supported are there
|
Python
|
apache-2.0
|
0xc0170/project_generator,sarahmarshy/project_generator,hwfwgrp/project_generator,molejar/project_generator,project-generator/project_generator,ohagendorf/project_generator
|
2bf883741ce763bde729f2930af913c44a807cb5
|
jiraconfig-sample.py
|
jiraconfig-sample.py
|
JIRA = {
"server": "https://example.com/jira/",
"user": "user",
"password": "password"
}
|
import keyring
JIRA = {
"server": "https://example.com/jira/",
"user": "user",
"password": keyring.get_password("system", "user")
}
|
Add keyring to example config
|
Add keyring to example config
|
Python
|
mit
|
mrts/ask-jira
|
235f8061caa667f7c9bc1f424e14326c22932547
|
Examples/Infovis/Python/cone_layout.py
|
Examples/Infovis/Python/cone_layout.py
|
from vtk import *
reader = vtkXMLTreeReader()
reader.SetFileName("vtkclasses.xml")
reader.Update()
print reader.GetOutput()
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(reader.GetOutputPort())
view.SetVertexLabelArrayName("id")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex id")
view.SetColorVertices(True)
view.SetLayoutStrategyToCone()
view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom
view.SetLabelPlacementModeToLabelPlacer()
theme = vtkViewTheme.CreateMellowTheme()
theme.SetCellColor(.2,.2,.6)
theme.SetLineWidth(2)
theme.SetPointSize(10)
view.ApplyViewTheme(theme)
theme.FastDelete()
window = vtkRenderWindow()
window.SetSize(600, 600)
view.SetupRenderWindow(window)
view.GetRenderer().ResetCamera()
window.Render()
window.GetInteractor().Start()
|
from vtk import *
reader = vtkXMLTreeReader()
reader.SetFileName("vtkclasses.xml")
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(reader.GetOutputPort())
view.SetVertexLabelArrayName("id")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex id")
view.SetColorVertices(True)
view.SetLayoutStrategyToCone()
view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom
view.SetLabelPlacementModeToLabelPlacer()
theme = vtkViewTheme.CreateMellowTheme()
theme.SetCellColor(.2,.2,.6)
theme.SetLineWidth(2)
theme.SetPointSize(10)
view.ApplyViewTheme(theme)
theme.FastDelete()
window = vtkRenderWindow()
window.SetSize(600, 600)
view.SetupRenderWindow(window)
view.GetRenderer().ResetCamera()
window.Render()
window.GetInteractor().Start()
|
Remove errant printout in python cone layout example.
|
ENH: Remove errant printout in python cone layout example.
|
Python
|
bsd-3-clause
|
daviddoria/PointGraphsPhase1,jmerkow/VTK,mspark93/VTK,hendradarwin/VTK,aashish24/VTK-old,Wuteyan/VTK,jmerkow/VTK,berendkleinhaneveld/VTK,msmolens/VTK,ashray/VTK-EVM,msmolens/VTK,hendradarwin/VTK,sumedhasingla/VTK,ashray/VTK-EVM,johnkit/vtk-dev,sumedhasingla/VTK,SimVascular/VTK,sankhesh/VTK,candy7393/VTK,jmerkow/VTK,spthaolt/VTK,candy7393/VTK,mspark93/VTK,mspark93/VTK,spthaolt/VTK,msmolens/VTK,Wuteyan/VTK,gram526/VTK,jeffbaumes/jeffbaumes-vtk,biddisco/VTK,sumedhasingla/VTK,demarle/VTK,aashish24/VTK-old,gram526/VTK,sankhesh/VTK,candy7393/VTK,cjh1/VTK,keithroe/vtkoptix,ashray/VTK-EVM,keithroe/vtkoptix,naucoin/VTKSlicerWidgets,biddisco/VTK,cjh1/VTK,msmolens/VTK,jeffbaumes/jeffbaumes-vtk,ashray/VTK-EVM,arnaudgelas/VTK,jeffbaumes/jeffbaumes-vtk,spthaolt/VTK,spthaolt/VTK,sankhesh/VTK,candy7393/VTK,keithroe/vtkoptix,spthaolt/VTK,SimVascular/VTK,collects/VTK,sankhesh/VTK,gram526/VTK,ashray/VTK-EVM,sumedhasingla/VTK,demarle/VTK,Wuteyan/VTK,jmerkow/VTK,candy7393/VTK,msmolens/VTK,berendkleinhaneveld/VTK,spthaolt/VTK,biddisco/VTK,SimVascular/VTK,collects/VTK,daviddoria/PointGraphsPhase1,jmerkow/VTK,hendradarwin/VTK,demarle/VTK,msmolens/VTK,biddisco/VTK,SimVascular/VTK,sumedhasingla/VTK,Wuteyan/VTK,johnkit/vtk-dev,msmolens/VTK,sankhesh/VTK,cjh1/VTK,aashish24/VTK-old,naucoin/VTKSlicerWidgets,keithroe/vtkoptix,johnkit/vtk-dev,sankhesh/VTK,jeffbaumes/jeffbaumes-vtk,daviddoria/PointGraphsPhase1,demarle/VTK,berendkleinhaneveld/VTK,cjh1/VTK,demarle/VTK,msmolens/VTK,aashish24/VTK-old,keithroe/vtkoptix,daviddoria/PointGraphsPhase1,biddisco/VTK,berendkleinhaneveld/VTK,naucoin/VTKSlicerWidgets,berendkleinhaneveld/VTK,cjh1/VTK,collects/VTK,johnkit/vtk-dev,naucoin/VTKSlicerWidgets,SimVascular/VTK,SimVascular/VTK,johnkit/vtk-dev,gram526/VTK,Wuteyan/VTK,jeffbaumes/jeffbaumes-vtk,hendradarwin/VTK,ashray/VTK-EVM,candy7393/VTK,jmerkow/VTK,keithroe/vtkoptix,arnaudgelas/VTK,sumedhasingla/VTK,naucoin/VTKSlicerWidgets,arnaudgelas/VTK,daviddoria/PointGraphsPhase1,johnkit/vtk-dev,gram526/VTK,gram526/VTK,jmerkow/VTK,sumedhasingla/VTK,arnaudgelas/VTK,hendradarwin/VTK,spthaolt/VTK,gram526/VTK,cjh1/VTK,biddisco/VTK,collects/VTK,berendkleinhaneveld/VTK,sankhesh/VTK,demarle/VTK,sumedhasingla/VTK,aashish24/VTK-old,aashish24/VTK-old,biddisco/VTK,mspark93/VTK,jeffbaumes/jeffbaumes-vtk,hendradarwin/VTK,mspark93/VTK,candy7393/VTK,candy7393/VTK,jmerkow/VTK,collects/VTK,keithroe/vtkoptix,keithroe/vtkoptix,Wuteyan/VTK,mspark93/VTK,collects/VTK,gram526/VTK,johnkit/vtk-dev,SimVascular/VTK,arnaudgelas/VTK,ashray/VTK-EVM,mspark93/VTK,hendradarwin/VTK,Wuteyan/VTK,mspark93/VTK,berendkleinhaneveld/VTK,SimVascular/VTK,naucoin/VTKSlicerWidgets,demarle/VTK,arnaudgelas/VTK,sankhesh/VTK,demarle/VTK,ashray/VTK-EVM,daviddoria/PointGraphsPhase1
|
72d0ca4e2f4be7969498b226af4243315f2dff0c
|
tests/test_colors.py
|
tests/test_colors.py
|
"""Test imagemagick functions."""
import unittest
from pywal import colors
class TestGenColors(unittest.TestCase):
"""Test the gen_colors functions."""
def test_gen_colors(self):
"""> Generate a colorscheme."""
result = colors.get("tests/test_files/test.jpg")
self.assertEqual(result["colors"]["color0"], "#0D191B")
if __name__ == "__main__":
unittest.main()
|
"""Test imagemagick functions."""
import unittest
from pywal import colors
class TestGenColors(unittest.TestCase):
"""Test the gen_colors functions."""
def test_gen_colors(self):
"""> Generate a colorscheme."""
result = colors.get("tests/test_files/test.jpg")
self.assertEqual(len(result["colors"]["color0"]), 7)
if __name__ == "__main__":
unittest.main()
|
Check color length instead of value since the tests will fail on other versions of imageamgick
|
tests: Check color length instead of value since the tests will fail on other versions of imageamgick
|
Python
|
mit
|
dylanaraps/pywal,dylanaraps/pywal,dylanaraps/pywal
|
323cc3f50fa0bbd072bfe243443adf12e1b25220
|
bluebottle/projects/migrations/0019_auto_20170118_1537.py
|
bluebottle/projects/migrations/0019_auto_20170118_1537.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-01-18 14:37
from __future__ import unicode_literals
import binascii
import os
from django.db import migrations
def generate_key():
return binascii.hexlify(os.urandom(20)).decode()
def create_auth_token(apps, schema_editor):
Member = apps.get_model('members', 'member')
Token = apps.get_model('authtoken', 'token')
member = Member.objects.create(
email='devteam+accounting@onepercentclub.com',
username='accounting'
)
token = Token.objects.create(
user=member,
key=generate_key()
)
class Migration(migrations.Migration):
dependencies = [
('projects', '0018_merge_20170118_1533'),
('authtoken', '0001_initial'),
]
operations = [
migrations.RunPython(create_auth_token)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-01-18 14:37
from __future__ import unicode_literals
import binascii
import os
from django.db import migrations
def generate_key():
return binascii.hexlify(os.urandom(20)).decode()
def create_auth_token(apps, schema_editor):
Member = apps.get_model('members', 'member')
Token = apps.get_model('authtoken', 'token')
member = Member.objects.create(
email='devteam+accounting@onepercentclub.com',
username='accounting'
)
token = Token.objects.create(
user=member,
key=generate_key()
)
class Migration(migrations.Migration):
dependencies = [
('projects', '0018_merge_20170118_1533'),
('authtoken', '0001_initial'),
('quotes', '0005_auto_20180717_1017'),
('slides', '0006_auto_20180717_1017'),
]
operations = [
migrations.RunPython(create_auth_token)
]
|
Add dependency on different migrations
|
Add dependency on different migrations
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
23e1766731dbd08d3d6c55d9d1fe2bbf1be42614
|
sncosmo/tests/test_builtins.py
|
sncosmo/tests/test_builtins.py
|
import pytest
import sncosmo
@pytest.mark.might_download
def test_hst_bands():
""" check that the HST and JWST bands are accessible """
for bandname in ['f606w', 'uvf606w', 'f125w', 'f127m',
'f115w']: # jwst nircam
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_jwst_miri_bands():
for bandname in ['f1130w']:
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_ztf_bandpass():
bp = sncosmo.get_bandpass('ztfg')
@pytest.mark.might_download
def test_roman_bandpass():
bp = sncosmo.get_bandpass('f087')
|
import pytest
import sncosmo
@pytest.mark.might_download
def test_hst_bands():
""" check that the HST and JWST bands are accessible """
for bandname in ['f606w', 'uvf606w', 'f125w', 'f127m',
'f115w']: # jwst nircam
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_jwst_miri_bands():
for bandname in ['f1130w']:
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_ztf_bandpass():
bp = sncosmo.get_bandpass('ztfg')
@pytest.mark.might_download
def test_roman_bandpass():
sncosmo.get_bandpass('f062')
sncosmo.get_bandpass('f087')
sncosmo.get_bandpass('f106')
sncosmo.get_bandpass('f129')
sncosmo.get_bandpass('f158')
sncosmo.get_bandpass('f184')
sncosmo.get_bandpass('f213')
sncosmo.get_bandpass('f146')
|
Update tests to cover every Roman WFI filter
|
Update tests to cover every Roman WFI filter
|
Python
|
bsd-3-clause
|
sncosmo/sncosmo,sncosmo/sncosmo,sncosmo/sncosmo
|
d4033694f7686fe1ad48a185ae740c4d966d40d8
|
classes/dnsresolver.py
|
classes/dnsresolver.py
|
import dns
import dns.resolver
import dns.rdatatype
from typing import Union, List
class DNSResolver(dns.resolver.Resolver):
def __init__(self, filename='/etc/resolv.conf', configure=False,
nameservers: Union[str, List[str]] = None):
# Run the dns.resolver.Resolver superclass init call to configure
# the object. Then, depending on the value in configure argument,
# do something with the nameservers argument, which is unique to this
# class object instead.
super(DNSResolver, self).__init__(filename, configure)
if not configure:
if isinstance(nameservers, str):
self.nameservers = [nameservers]
elif isinstance(nameservers, list):
self.nameservers = nameservers
else:
self.nameservers = ['8.8.8.8, 8.8.4.4']
def dns_resolve(domain: str, resolver: DNSResolver = DNSResolver(configure=True)) -> list:
addrs = []
for answer in resolver.query(domain, 'A').response.answer:
for item in answer:
addrs.append(item.address)
for answer in resolver.query(domain, 'AAAA').response.answer:
for item in answer:
addrs.append(item.address)
return addrs
|
import dns
import dns.resolver
import dns.rdatatype
from typing import Union, List
class DNSResolver(dns.resolver.Resolver):
def __init__(self, filename='/etc/resolv.conf', configure=False,
nameservers: Union[str, List[str]] = None):
# Run the dns.resolver.Resolver superclass init call to configure
# the object. Then, depending on the value in configure argument,
# do something with the nameservers argument, which is unique to this
# class object instead.
super(DNSResolver, self).__init__(filename, configure)
if not configure:
if isinstance(nameservers, str):
self.nameservers = [nameservers]
elif isinstance(nameservers, list):
self.nameservers = nameservers
else:
self.nameservers = ['8.8.8.8, 8.8.4.4']
def dns_resolve(domain: str, resolver: DNSResolver = DNSResolver(configure=True)) -> list:
addrs = []
try:
for answer in resolver.query(domain, 'A').response.answer:
for item in answer:
if item.rdtype == dns.rdatatype.A:
addrs.append(item.address)
except dns.resolver.NoAnswer:
pass
try:
for answer in resolver.query(domain, 'AAAA').response.answer:
for item in answer:
if item.rdtype == dns.rdatatype.AAAA:
addrs.append(item.address)
except dns.resolver.NoAnswer:
pass
return addrs
|
Implement rdatatype-aware and NoAnswer-aware DNS handling
|
Implement rdatatype-aware and NoAnswer-aware DNS handling
This will work for CNAME entries because CNAMEs hit by A or AAAA lookups behave like `dig` does - they will trigger a second resultset for the CNAME entry in order to return the IP address.
This also is amended to handle a "NoAnswer" response - i.e. if there are no IPv4 or IPv6 addresses for a given CNAME or records lookup. The list will therefore have all the CNAME-resolved IP addresses as independent strings.
|
Python
|
apache-2.0
|
Charcoal-SE/SmokeDetector,Charcoal-SE/SmokeDetector
|
c81fff4ff4cccc51faf47c7ca9a63cd9eb6a2699
|
projects/tests/factories.py
|
projects/tests/factories.py
|
import factory
from django.contrib.auth.models import User
from accounts.tests.factories import UserFactory
from .. import models
class OrganizationFactory(factory.DjangoModelFactory):
"""Organization factory"""
FACTORY_FOR = models.Organization
name = factory.Sequence(lambda n: 'organization {}'.format(n))
@factory.post_generation
def users(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for user in extracted:
self.users.add(user)
else:
self.users = UserFactory.create_batch(10)
class ProjectFactory(factory.DjangoModelFactory):
"""Project factory"""
FACTORY_FOR = models.Project
name = factory.Sequence(lambda n: 'project{}'.format(n))
url = factory.Sequence(lambda n: 'http://test{}.com'.format(n))
organization = factory.SubFactory(OrganizationFactory)
@factory.sequence
def owner(n):
return User.objects.create_user('user{}'.format(n))
|
import factory
from django.contrib.auth.models import User
from accounts.tests.factories import UserFactory
from .. import models
class OrganizationFactory(factory.DjangoModelFactory):
"""Organization factory"""
FACTORY_FOR = models.Organization
name = factory.Sequence(lambda n: 'organization {}'.format(n))
@factory.post_generation
def users(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for user in extracted:
self.users.add(user)
else:
self.users = UserFactory.create_batch(10)
class ProjectFactory(factory.DjangoModelFactory):
"""Project factory"""
FACTORY_FOR = models.Project
name = factory.Sequence(lambda n: 'project{}'.format(n))
url = factory.Sequence(lambda n: 'http://test{}.com'.format(n))
organization = factory.SubFactory(OrganizationFactory)
run_here = False
@factory.sequence
def owner(n):
return User.objects.create_user('user{}'.format(n))
|
Change project factory default values
|
Change project factory default values
|
Python
|
mit
|
nvbn/coviolations_web,nvbn/coviolations_web
|
1c6fcd2e1ab02fef60e3507ba57cb9224b19d616
|
elephantblog/context_processors.py
|
elephantblog/context_processors.py
|
from django.utils import translation
from feincms.module.page.models import Page
def blog_page(request):
""" Used to get the feincms page navigation within the blog app. """
from feincms.module.page.models import Page
try:
return {'blog_page' : Page.objects.get(slug='blog', language=translation.get_language())}
except:
return {}
|
from feincms.module.page.models import Page
from feincms.translations import short_language_code
def blog_page(request):
""" Used to get the feincms page navigation within the blog app. """
from feincms.module.page.models import Page
return {'blog_page': Page.objects.get(slug='blog', language=short_language_code())}
except:
try:
return {'blog_page': Page.objects.get(slug='blog')}
except:
return {}
|
Handle page module without translations extension too
|
blog_page: Handle page module without translations extension too
|
Python
|
bsd-3-clause
|
matthiask/feincms-elephantblog,feincms/feincms-elephantblog,joshuajonah/feincms-elephantblog,matthiask/feincms-elephantblog,feincms/feincms-elephantblog,michaelkuty/feincms-elephantblog,matthiask/feincms-elephantblog,sbaechler/feincms-elephantblog,joshuajonah/feincms-elephantblog,sbaechler/feincms-elephantblog,sbaechler/feincms-elephantblog,michaelkuty/feincms-elephantblog,joshuajonah/feincms-elephantblog,michaelkuty/feincms-elephantblog
|
c9aa7b60e3e985883854e7aba38838c7a45aa6fa
|
matches/models.py
|
matches/models.py
|
from django.db import models
from wrestlers.models import WrestlingEntity
class Card(models.Model):
date = models.DateField()
def __unicode__(self):
return unicode(self.date)
class Match(models.Model):
card = models.ForeignKey(Card)
participants = models.ManyToManyField(WrestlingEntity)
winner = models.ForeignKey(WrestlingEntity, related_name="won_matches",
null=True, blank=True)
def __unicode__(self):
return " vs. ".join([p.name for p in self.participants.all()])
|
from django.contrib.auth.models import User
from django.db import models
from wrestlers.models import WrestlingEntity
class Review(models.Model):
reviewed_by = models.ForeignKey(User)
reviewed_at = models.DateTimeField()
class Meta:
abstract = True
class Card(models.Model):
date = models.DateField()
def __unicode__(self):
return unicode(self.date)
class Match(Review):
card = models.ForeignKey(Card)
participants = models.ManyToManyField(WrestlingEntity)
winner = models.ForeignKey(WrestlingEntity, related_name="won_matches",
null=True, blank=True)
def __unicode__(self):
return " vs. ".join([p.name for p in self.participants.all()])
|
Add basic Review model and use it for matches.
|
Add basic Review model and use it for matches.
|
Python
|
agpl-3.0
|
OddBloke/moore
|
11238c63240fa19b87fc478916bac3a4bdd86df5
|
django_project/realtime/tasks/test/test_celery_tasks.py
|
django_project/realtime/tasks/test/test_celery_tasks.py
|
# coding=utf-8
import logging
import unittest
from django import test
from timeout_decorator import timeout_decorator
from realtime.app_settings import LOGGER_NAME
from realtime.tasks import check_realtime_broker
from realtime.tasks.realtime.celery_app import app as realtime_app
from realtime.utils import celery_worker_connected
__author__ = 'Rizky Maulana Nugraha <lana.pcfre@gmail.com>'
__date__ = '12/4/15'
LOGGER = logging.getLogger(LOGGER_NAME)
# minutes test timeout
LOCAL_TIMEOUT = 10 * 60
class CeleryTaskTest(test.SimpleTestCase):
@timeout_decorator.timeout(LOCAL_TIMEOUT)
@unittest.skipUnless(
celery_worker_connected(realtime_app, 'inasafe-realtime'),
'Realtime Worker needs to be run')
def test_indicator(self):
"""Test broker connection."""
result = check_realtime_broker.delay()
self.assertTrue(result.get())
|
# coding=utf-8
import logging
import unittest
from django import test
from timeout_decorator import timeout_decorator
from realtime.app_settings import LOGGER_NAME
from realtime.tasks import check_realtime_broker, \
retrieve_felt_earthquake_list
from realtime.tasks.realtime.celery_app import app as realtime_app
from realtime.utils import celery_worker_connected
__author__ = 'Rizky Maulana Nugraha <lana.pcfre@gmail.com>'
__date__ = '12/4/15'
LOGGER = logging.getLogger(LOGGER_NAME)
# minutes test timeout
LOCAL_TIMEOUT = 10 * 60
class CeleryTaskTest(test.SimpleTestCase):
@timeout_decorator.timeout(LOCAL_TIMEOUT)
@unittest.skipUnless(
celery_worker_connected(realtime_app, 'inasafe-realtime'),
'Realtime Worker needs to be run')
def test_indicator(self):
"""Test broker connection."""
result = check_realtime_broker.delay()
self.assertTrue(result.get())
@timeout_decorator.timeout(LOCAL_TIMEOUT)
@unittest.skipUnless(
celery_worker_connected(realtime_app, 'inasafe-django'),
'Realtime Worker needs to be run')
def test_indicator(self):
"""Test broker connection."""
result = retrieve_felt_earthquake_list.delay()
self.assertTrue(result.get())
|
Add unittests for BMKG EQ List Scrapper
|
Add unittests for BMKG EQ List Scrapper
|
Python
|
bsd-2-clause
|
AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django
|
38ce0d6b0433a68787c18691407c815d4eb1fdb2
|
txscrypt/__init__.py
|
txscrypt/__init__.py
|
"""
A Twisted-friendly wrapper for scrypt.
"""
from txscrypt.wrapper import computeKey, verifyPassword
from txscrypt._version import __version__
__all__ = ["computeKey", "verifyPassword"]
|
"""
A Twisted-friendly wrapper for scrypt.
"""
from txscrypt.wrapper import checkPassword, computeKey
from txscrypt._version import __version__
__all__ = ["verifyPassword", "computeKey"]
|
Make checkPassword the only public API, remove verifyPassword
|
Make checkPassword the only public API, remove verifyPassword
|
Python
|
isc
|
lvh/txscrypt
|
f360c61cbe0a895ca3d8efe5be97f08ea7ff5682
|
packages/vic/git/__init__.py
|
packages/vic/git/__init__.py
|
from mykde import Action
class Action(Action):
name = 'git'
description = "Git with helper programs and custom settings"
packages = ['git', 'gitk', 'giggle']
def proceed(self):
# useful aliases
self.call('git config --global alias.ci "commit -a"')
self.call('git config --global alias.co checkout')
self.call('git config --global alias.st status')
self.call('git config --global alias.br branch')
# push only current branch
self.call('git config --global push.default current')
# colorize UI
self.call('git config --global color.ui true')
|
from mykde import Action
class Action(Action):
name = 'git'
description = "Git with helper programs and custom settings"
packages = ['git', 'gitk', 'giggle']
def proceed(self):
# useful aliases
self.call('git config --global alias.ci "commit -a"')
self.call('git config --global alias.co checkout')
self.call('git config --global alias.st status')
self.call('git config --global alias.br branch')
# push only current branch
self.call('git config --global push.default current')
# colorize UI
self.call('git config --global color.ui true')
# do not call pager for content less than one page
self.call('git config --global --add core.pager "less -F -X"')
|
Add one more default option for git.
|
Add one more default option for git.
|
Python
|
bsd-3-clause
|
warvariuc/mykde,warvariuc/mykde
|
b2f1f97000c8d3479e1df6778f0cc85ec0680571
|
garden-watering01/mybuddy.py
|
garden-watering01/mybuddy.py
|
import machine
def setntptime(maxretries=10):
# ntptime is a helper module which gets packaged into the firmware
# Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py
import ntptime
for i in range (maxretries):
try:
ntptime.settime()
break
except:
if i+1 == maxretries:
raise
def deepsleep(sleeptime=15*60*1000):
# configure RTC.ALARM0 to be able to wake the device
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
# set RTC.ALARM0 to fire after some time. Time is given in milliseconds here
rtc.alarm(rtc.ALARM0, sleeptime)
#Make sure you have GPIO16 connected RST to wake from deepSleep.
# put the device to sleep
print ("Going into Sleep now")
machine.deepsleep()
|
import machine
def have_internet():
import urequests
try:
resp = urequests.request("HEAD", "http://jsonip.com/")
return True
except:
return False
def setntptime(maxretries=10):
# ntptime is a helper module which gets packaged into the firmware
# Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py
import ntptime
for i in range (maxretries):
try:
ntptime.settime()
break
except:
if i+1 == maxretries:
raise
def deepsleep(sleeptime=15*60*1000):
# configure RTC.ALARM0 to be able to wake the device
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
# set RTC.ALARM0 to fire after some time. Time is given in milliseconds here
rtc.alarm(rtc.ALARM0, sleeptime)
#Make sure you have GPIO16 connected RST to wake from deepSleep.
# put the device to sleep
print ("Going into Sleep now")
machine.deepsleep()
|
Add a function to check status of internet connectivity
|
Add a function to check status of internet connectivity
|
Python
|
mit
|
fuzzyhandle/esp8266hangout,fuzzyhandle/esp8266hangout,fuzzyhandle/esp8266hangout
|
8db3ee0d6b73b864a91cd3617342138f05175d9d
|
accounts/models.py
|
accounts/models.py
|
# coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from registration.signals import user_activated
from django.dispatch import receiver
class UserAccount(models.Model):
"""
A user account. Used to store any information related to users.
"""
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
@receiver(user_activated)
def registration_completed(sender, user, request, **kwargs):
account, created = UserAccount.objects.get_or_create(user=user)
print account, created
|
# coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from registration.signals import user_activated
from django.dispatch import receiver
class UserAccount(models.Model):
"""
A user account. Used to store any information related to users.
"""
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
def __unicode__(self):
return u'{}'.format(self.user.username)
@receiver(user_activated)
def registration_completed(sender, user, request, **kwargs):
account, created = UserAccount.objects.get_or_create(user=user)
print account, created
|
Add __unicode__ method to UserAccount model
|
Add __unicode__ method to UserAccount model
|
Python
|
agpl-3.0
|
coders4help/volunteer_planner,alper/volunteer_planner,klinger/volunteer_planner,pitpalme/volunteer_planner,volunteer-planner/volunteer_planner,pitpalme/volunteer_planner,christophmeissner/volunteer_planner,pitpalme/volunteer_planner,christophmeissner/volunteer_planner,alper/volunteer_planner,flindenberg/volunteer_planner,klinger/volunteer_planner,coders4help/volunteer_planner,klinger/volunteer_planner,christophmeissner/volunteer_planner,volunteer-planner/volunteer_planner,coders4help/volunteer_planner,flindenberg/volunteer_planner,volunteer-planner/volunteer_planner,christophmeissner/volunteer_planner,alper/volunteer_planner,volunteer-planner/volunteer_planner,klinger/volunteer_planner,flindenberg/volunteer_planner,pitpalme/volunteer_planner,coders4help/volunteer_planner
|
ec1a25c541770a82953c743f13d525a447f3bd2d
|
syntacticframes_project/syntacticframes/management/commands/update_members_and_translations.py
|
syntacticframes_project/syntacticframes/management/commands/update_members_and_translations.py
|
"""
Updates members and translations for all classes
When LVF and LADL mappings change, everything under this change could change.
When a frameset is hidden or shown, everything in that class could change.
When the algorithm changes, everything in VerbeNet could change.
This command ensures that after an algorithmic change, everything is
consistent.
"""
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
for vn_class in VerbNetClass.objects.all():
print(vn_class.name)
vn_class.update_members_and_translations()
|
"""
Updates members and translations for all classes
When LVF and LADL mappings change, everything under this change could change.
When a frameset is hidden or shown, everything in that class could change.
When the algorithm changes, everything in VerbeNet could change.
This command ensures that after an algorithmic change, everything is
consistent.
"""
from time import gmtime, strftime
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
when = strftime("%d/%m/%Y %H:%M:%S", gmtime())
verb_logger.info("{}: Start full update of verbs (members and translations)".format(when))
for vn_class in VerbNetClass.objects.all():
print(vn_class.name)
vn_class.update_members_and_translations()
when = strftime("%d/%m/%Y %H:%M:%S", gmtime())
verb_logger.info("{}: Ended full update of verbs (members and translations)".format(when))
|
Include time of update start/end
|
Include time of update start/end
|
Python
|
mit
|
aymara/verbenet-editor,aymara/verbenet-editor,aymara/verbenet-editor
|
743f4affcd89aa3d9fd37774e2e5f8e05525cb04
|
api/sync_wallet.py
|
api/sync_wallet.py
|
import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
response_data = syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
response = { 'status': 'OK', 'data': response_data }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
|
import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
response = { 'status': 'OK' }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
|
Clean up return value for API
|
Clean up return value for API
|
Python
|
agpl-3.0
|
ripper234/omniwallet,maran/omniwallet,maran/omniwallet,Nevtep/omniwallet,FuzzyBearBTC/omniwallet,FuzzyBearBTC/omniwallet,achamely/omniwallet,curtislacy/omniwallet,habibmasuro/omniwallet,OmniLayer/omniwallet,ripper234/omniwallet,habibmasuro/omniwallet,ripper234/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet,curtislacy/omniwallet,OmniLayer/omniwallet,dexX7/omniwallet,arowser/omniwallet,habibmasuro/omniwallet,dexX7/omniwallet,Nevtep/omniwallet,VukDukic/omniwallet,arowser/omniwallet,achamely/omniwallet,FuzzyBearBTC/omniwallet,maran/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,Nevtep/omniwallet,achamely/omniwallet,arowser/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,achamely/omniwallet,dexX7/omniwallet,curtislacy/omniwallet
|
d81fe16eda36d3a5fa23d163de27bd46f84c4815
|
app.py
|
app.py
|
from flask import Flask, render_template
import os
app = Flask(__name__)
@app.route('/')
def webprint():
return(render_template('index.html'))
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
from flask import Flask, render_template
import os
app = Flask(__name__)
@app.route('/')
def webprint():
return 'Hello world!'
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
Return text message on /
|
Return text message on /
|
Python
|
mit
|
fablabjoinville/groselha,fablabjoinville/groselha,fablabjoinville/groselha,fablabjoinville/groselha
|
cfcee83354f4917e719c3ef4236a2644dc98e153
|
ophyd/__init__.py
|
ophyd/__init__.py
|
import logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
from . import *
# Signals
from .signal import (Signal, EpicsSignal, EpicsSignalRO)
# Positioners
from .positioner import Positioner
from .epics_motor import EpicsMotor
from .pv_positioner import (PVPositioner, PVPositionerPC)
from .pseudopos import (PseudoPositioner, PseudoSingle)
# Devices
from .scaler import EpicsScaler
from .device import (Device, Component, DynamicDeviceComponent)
from .mca import EpicsMCA, EpicsDXP
# Areadetector-related
from .areadetector import *
from ._version import get_versions
from .commands import (mov, movr, set_pos, wh_pos, set_lm, log_pos,
log_pos_diff, log_pos_mov)
__version__ = get_versions()['version']
del get_versions
|
import logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
from . import *
# Signals
from .signal import (Signal, EpicsSignal, EpicsSignalRO)
# Positioners
from .positioner import Positioner
from .epics_motor import EpicsMotor
from .pv_positioner import (PVPositioner, PVPositionerPC)
from .pseudopos import (PseudoPositioner, PseudoSingle)
# Devices
from .scaler import EpicsScaler
from .device import (Device, Component, DynamicDeviceComponent)
from .ophydobj import StatusBase
from .mca import EpicsMCA, EpicsDXP
# Areadetector-related
from .areadetector import *
from ._version import get_versions
from .commands import (mov, movr, set_pos, wh_pos, set_lm, log_pos,
log_pos_diff, log_pos_mov)
__version__ = get_versions()['version']
del get_versions
|
Add StatusBase to top-level API.
|
MNT: Add StatusBase to top-level API.
|
Python
|
bsd-3-clause
|
dchabot/ophyd,dchabot/ophyd
|
17c90fd954441c2623495e50a2f89790e1ff5489
|
projects/tests/test_tools.py
|
projects/tests/test_tools.py
|
from mock import MagicMock
from django.core.exceptions import PermissionDenied
from django.test import TestCase
from accounts.tests.factories import UserFactory
from ..utils import ProjectAccessMixin
from ..models import Project
from . import factories
class ProjectAccessMixinCase(TestCase):
"""Project access mixin case"""
def setUp(self):
self._orig_can_access = Project.can_access
Project.can_access = MagicMock()
self._orig_update = Project.objects.update_user_projects
Project.objects.update_user_projects = MagicMock()
self.mixin = ProjectAccessMixin()
self.project = factories.ProjectFactory()
self.mixin.get_project = MagicMock(return_value=self.project)
self.user = UserFactory()
def tearDown(self):
Project.can_access = self._orig_can_access
Project.objects.update_user_projects = self._orig_update
def test_can_access(self):
"""Test can access"""
Project.can_access.return_value = True
self.assertIsNone(self.mixin.check_can_access(
MagicMock(user=self.user),
))
def test_call_update_if_organization(self):
"""Test call update if organization"""
Project.can_access.return_value = False
with self.assertRaises(PermissionDenied):
self.mixin.check_can_access(MagicMock(user=self.user))
Project.objects.update_user_projects.asset_called_once_with(
self.user,
)
|
import sure
from mock import MagicMock
from django.core.exceptions import PermissionDenied
from django.test import TestCase
from accounts.tests.factories import UserFactory
from ..utils import ProjectAccessMixin
from ..models import Project
from . import factories
class ProjectAccessMixinCase(TestCase):
"""Project access mixin case"""
def setUp(self):
self._orig_can_access = Project.can_access
Project.can_access = MagicMock()
self._orig_update = Project.objects.update_user_projects
Project.objects.update_user_projects = MagicMock()
self.mixin = ProjectAccessMixin()
self.project = factories.ProjectFactory()
self.mixin.get_project = MagicMock(return_value=self.project)
self.user = UserFactory()
def tearDown(self):
Project.can_access = self._orig_can_access
Project.objects.update_user_projects = self._orig_update
def test_can_access(self):
"""Test can access"""
Project.can_access.return_value = True
self.mixin.check_can_access(
MagicMock(user=self.user),
).should.be.none
def test_call_update_if_organization(self):
"""Test call update if organization"""
Project.can_access.return_value = False
self.mixin.check_can_access.when\
.called_with(MagicMock(user=self.user))\
.should.throw(PermissionDenied)
Project.objects.update_user_projects.asset_called_once_with(
self.user,
)
|
Use sure in project tools cases
|
Use sure in project tools cases
|
Python
|
mit
|
nvbn/coviolations_web,nvbn/coviolations_web
|
db08c5ae962c2e66c8ad2e668f530d08934200af
|
geometry.py
|
geometry.py
|
from geom2d import *
l1 = []
for i in range(-5, 6):
l1.append(Point(i, i*i))
l2 = []
for el in l1:
l2.append(Point(el.x, -el.y))
print(l1)
print(l2)
# List comprehension
l1c = [Point(i, i*i) for i in range(-5, 6)]
l2c = [Point(el.x, -el.y) for el in l1c]
print("List comprehension")
print(l1c)
print(l2c)
|
from geom2d import *
l1 = list(map(lambda i: Point(i, i*i), range(-5, 6)))
# l2 = list(map(lambda p: Point(p.x, -p.y), l1))
# l2 = list(filter(lambda p: p.x > 0, l1))
l2 = list(filter(lambda p: p.x % 2 == 0, l1))
print(l1)
print(l2)
|
Work with lists in functional way (map, filter)
|
Work with lists in functional way (map, filter)
|
Python
|
apache-2.0
|
maciekp85/python-for-testers
|
2812f11bdc86495dd9ef62b4b45d90335bcbda7d
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
try:
from django import setup
except ImportError:
def setup():
pass
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS=(
'selectable',
),
SITE_ID=1,
SECRET_KEY='super-secret',
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests():
setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
args = sys.argv[1:] or ['selectable', ]
failures = test_runner.run_tests(args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
try:
from django import setup
except ImportError:
def setup():
pass
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
INSTALLED_APPS=(
'selectable',
),
SITE_ID=1,
SECRET_KEY='super-secret',
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests():
setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
args = sys.argv[1:] or ['selectable', ]
failures = test_runner.run_tests(args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
Set middleware classes to suppress warning on 1.7+
|
Set middleware classes to suppress warning on 1.7+
|
Python
|
bsd-2-clause
|
mlavin/django-selectable,affan2/django-selectable,affan2/django-selectable,mlavin/django-selectable,mlavin/django-selectable,affan2/django-selectable
|
aa89bed3502e4a94ab41005dd9265bfee58fd784
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import os
from django.core.management import call_command
if __name__ == '__main__':
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
if hasattr(django, 'setup'):
django.setup()
call_command('test', nomigrations=True)
|
#!/usr/bin/env python
import os
from django.core.management import execute_from_command_line
if __name__ == '__main__':
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
execute_from_command_line(['manage.py', 'test', '--nomigrations'])
|
Use a higher level command line api
|
Use a higher level command line api
|
Python
|
mit
|
henriquebastos/django-test-without-migrations,henriquebastos/django-test-without-migrations
|
e98201ae47f3af4fe8756c69464986dc524206e5
|
corehq/apps/hqwebapp/management/commands/list_waf_allow_patterns.py
|
corehq/apps/hqwebapp/management/commands/list_waf_allow_patterns.py
|
import re
from django.core.management import BaseCommand
from django.urls import get_resolver
from corehq.apps.hqwebapp.decorators import waf_allow
class Command(BaseCommand):
def handle(self, *args, **options):
resolver = get_resolver()
for kind, views in waf_allow.views.items():
print(kind)
print('--------')
patterns = []
for view in views:
if isinstance(view, str):
# waf_allow(kind, hard_code_pattern=r'^/url/pattern/$')
patterns.append(view)
else:
# @waf_allow(kind)
for urlmatch in resolver.reverse_dict.getlist(view):
patterns.append(resolver.regex.pattern + urlmatch[1])
patterns = sorted(_remove_regex_groups(pattern) for pattern in patterns)
for pattern in patterns:
print(pattern)
def _remove_regex_groups(regex_string):
return re.sub(r'\?P<[^>]+>', '', regex_string)
|
import re
from django.core.management import BaseCommand
from django.urls import get_resolver
from corehq.apps.hqwebapp.decorators import waf_allow
class Command(BaseCommand):
def handle(self, *args, **options):
resolver = get_resolver()
for kind, views in waf_allow.views.items():
print(kind)
print('--------')
patterns = []
for view in views:
if isinstance(view, str):
# waf_allow(kind, hard_code_pattern=r'^/url/pattern/$')
patterns.append(view)
else:
# @waf_allow(kind)
for urlmatch in resolver.reverse_dict.getlist(view):
patterns.append(str(resolver.pattern) + urlmatch[1])
patterns = sorted(_remove_regex_groups(pattern) for pattern in patterns)
for pattern in patterns:
print(pattern)
def _remove_regex_groups(regex_string):
return re.sub(r'\?P<[^>]+>', '', regex_string)
|
Fix issue: 'URLResolver' object has no attribute 'regex'
|
Fix issue: 'URLResolver' object has no attribute 'regex'
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
04d85784eeeb619e0e273aa0ffb41f12ffeada43
|
ureport/polls/migrations/0051_auto_20180316_0912.py
|
ureport/polls/migrations/0051_auto_20180316_0912.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-03-16 09:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('polls', '0050_auto_20170615_1455'),
]
def populate_default_backend(apps, schema_editor):
PollResult = apps.get_model("polls", "PollResult")
PollResult.objects.all().update(backend='rapidpro')
operations = [
migrations.AddField(
model_name='poll',
name='backend',
field=models.CharField(default='rapidpro', max_length=16),
),
migrations.AddField(
model_name='pollresult',
name='backend',
field=models.CharField(null=True, max_length=16),
),
migrations.RunPython(populate_default_backend),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-03-16 09:12
from __future__ import unicode_literals
from django.db import migrations, models
from ureport.utils import chunk_list
class Migration(migrations.Migration):
dependencies = [
('polls', '0050_auto_20170615_1455'),
]
def populate_default_backend(apps, schema_editor):
PollResult = apps.get_model("polls", "PollResult")
result_ids = PollResult.objects.all().values_list('id', flat=True)
start = time.time()
i = 0
for res_id_batch in chunk_list(result_ids, 1000):
PollResult.objects.filter(id__in=res_id_batch).update(backend='rapidpro')
i += len(res_id_batch)
print "Processed poll results update %d in %ds" % (i, time.time() - start)
operations = [
migrations.AddField(
model_name='poll',
name='backend',
field=models.CharField(default='rapidpro', max_length=16),
),
migrations.AddField(
model_name='pollresult',
name='backend',
field=models.CharField(null=True, max_length=16),
),
migrations.RunPython(populate_default_backend),
]
|
Update pull results default value in batches
|
Update pull results default value in batches
|
Python
|
agpl-3.0
|
rapidpro/ureport,Ilhasoft/ureport,Ilhasoft/ureport,rapidpro/ureport,Ilhasoft/ureport,rapidpro/ureport,Ilhasoft/ureport,rapidpro/ureport
|
606feda80b4631f9079021214c7b6078beb9a3f4
|
api/v2/views/maintenance_record.py
|
api/v2/views/maintenance_record.py
|
import django_filters
from rest_framework import filters
from rest_framework.serializers import ValidationError
from core.models import AtmosphereUser, MaintenanceRecord
from core.query import only_current
from api.permissions import CanEditOrReadOnly
from api.v2.serializers.details import MaintenanceRecordSerializer
from api.v2.views.base import AuthOptionalViewSet
class MaintenanceRecordFilterBackend(filters.BaseFilterBackend):
"""
Filter MaintenanceRecords using the request_user and 'query_params'
"""
def filter_queryset(self, request, queryset, view):
request_params = request.query_params
active = request_params.get('active')
if isinstance(active, basestring) and active.lower() == 'true'\
or isinstance(active, bool) and active:
queryset = MaintenanceRecord.active()
return queryset
class MaintenanceRecordViewSet(AuthOptionalViewSet):
"""
API endpoint that allows records to be viewed or edited.
"""
http_method_names = ['get', 'post', 'put', 'patch', 'head', 'options', 'trace']
queryset = MaintenanceRecord.objects.order_by('-start_date')
permission_classes = (CanEditOrReadOnly,)
serializer_class = MaintenanceRecordSerializer
filter_backends = (filters.DjangoFilterBackend, filters.SearchFilter, MaintenanceRecordFilterBackend)
|
import django_filters
from rest_framework import filters
from rest_framework.serializers import ValidationError
from core.models import AtmosphereUser, MaintenanceRecord
from core.query import only_current
from api.permissions import CanEditOrReadOnly
from api.v2.serializers.details import MaintenanceRecordSerializer
from api.v2.views.base import AuthOptionalViewSet
class MaintenanceRecordFilterBackend(filters.BaseFilterBackend):
"""
Filter MaintenanceRecords using the request_user and 'query_params'
"""
def filter_queryset(self, request, queryset, view):
request_params = request.query_params
active = request_params.get('active')
if isinstance(active, basestring) and active.lower() == 'true'\
or isinstance(active, bool) and active:
queryset = MaintenanceRecord.active()
return queryset
class MaintenanceRecordViewSet(AuthOptionalViewSet):
"""
API endpoint that allows records to be viewed or edited.
"""
http_method_names = ['get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'trace']
queryset = MaintenanceRecord.objects.order_by('-start_date')
permission_classes = (CanEditOrReadOnly,)
serializer_class = MaintenanceRecordSerializer
filter_backends = (filters.DjangoFilterBackend, filters.SearchFilter, MaintenanceRecordFilterBackend)
|
Add 'DELETE' operation to Maintenance Record
|
[ATMO-1201] Add 'DELETE' operation to Maintenance Record
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
65abd52d1bfd54097ca6bd01b1924e6ffcad8840
|
pytablewriter/_csv_writer.py
|
pytablewriter/_csv_writer.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import dataproperty
from ._text_writer import TextTableWriter
class CsvTableWriter(TextTableWriter):
"""
Concrete class of a table writer for CSV format.
:Examples:
:ref:`example-csv-table-writer`
"""
@property
def support_split_write(self):
return True
def __init__(self):
super(CsvTableWriter, self).__init__()
self.indent_string = u""
self.column_delimiter = u","
self.is_padding = False
self.is_write_header_separator_row = False
def _verify_header(self):
pass
def _write_header(self):
if dataproperty.is_empty_list_or_tuple(self.header_list):
return
super(CsvTableWriter, self)._write_header()
def _get_opening_row_item_list(self):
return []
def _get_value_row_separator_item_list(self):
return []
def _get_closing_row_item_list(self):
return []
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import dataproperty
from ._text_writer import TextTableWriter
class CsvTableWriter(TextTableWriter):
"""
Concrete class of a table writer for CSV format.
:Examples:
:ref:`example-csv-table-writer`
"""
@property
def support_split_write(self):
return True
def __init__(self):
super(CsvTableWriter, self).__init__()
self.indent_string = u""
self.column_delimiter = u","
self.is_padding = False
self.is_write_header_separator_row = False
def _write_header(self):
if dataproperty.is_empty_list_or_tuple(self.header_list):
return
super(CsvTableWriter, self)._write_header()
def _get_opening_row_item_list(self):
return []
def _get_value_row_separator_item_list(self):
return []
def _get_closing_row_item_list(self):
return []
|
Delete redundant lines of code
|
Delete redundant lines of code
|
Python
|
mit
|
thombashi/pytablewriter
|
cdb55b385074d50a98f87027fd46021d663f9df8
|
bin/commands/utils/messages.py
|
bin/commands/utils/messages.py
|
from __future__ import print_function
import sys
def error(message, exit=True):
"""Print an error message and optionally exit."""
assert isinstance(message, str), "message must be a str"
assert isinstance(exit, bool), "exit must be a bool"
print("error:", message, file=sys.stderr)
if exit:
sys.exit(1)
def info(message, quiet=False):
"""Print a simple info message."""
if not quiet:
print(message)
|
from __future__ import print_function
import sys
def error(message, exit=True):
"""Print an error message and optionally exit."""
assert isinstance(message, str), "message must be a str"
assert isinstance(exit, bool), "exit must be a bool"
print("error:", message, file=sys.stderr)
if exit:
sys.exit(1)
def warn(message):
"""Print a simple warning message."""
info('warn: {}'.format(message), False)
def usage(message):
"""Print a simple usage message."""
info('usage: {}'.format(message), False)
def info(message, quiet=False):
"""Print a simple info message."""
if not quiet:
print(message)
|
Add warn and usage message options
|
Add warn and usage message options
|
Python
|
mit
|
Brickstertwo/git-commands
|
3cc1cb9894fdb1b88a84ad8315669ad2f0858fdb
|
cloud_logging.py
|
cloud_logging.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.cloud.logging as glog
import logging
import contextlib
import io
import sys
import os
LOGGING_PROJECT = os.environ.get('LOGGING_PROJECT', '')
def configure(project=LOGGING_PROJECT):
if not project:
print('!! Error: The $LOGGING_PROJECT enviroment '
'variable is required in order to set up cloud logging. '
'Cloud logging is disabled.')
return
logging.basicConfig(level=logging.INFO)
try:
# if this fails, redirect stderr to /dev/null so no startup spam.
with contextlib.redirect_stderr(io.StringIO()):
client = glog.Client(project)
client.setup_logging(logging.INFO)
except:
print('!! Cloud logging disabled')
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.cloud.logging as glog
import logging
import contextlib
import io
import sys
import os
LOGGING_PROJECT = os.environ.get('LOGGING_PROJECT', '')
def configure(project=LOGGING_PROJECT):
if not project:
sys.stderr.write('!! Error: The $LOGGING_PROJECT enviroment '
'variable is required in order to set up cloud logging. '
'Cloud logging is disabled.\n')
return
logging.basicConfig(level=logging.INFO)
try:
# if this fails, redirect stderr to /dev/null so no startup spam.
with contextlib.redirect_stderr(io.StringIO()):
client = glog.Client(project)
client.setup_logging(logging.INFO)
except:
sys.stderr.write('!! Cloud logging disabled\n')
|
Change some errors to go to stderr.
|
Change some errors to go to stderr.
These non-fatal errors violated GTP protocol.
|
Python
|
apache-2.0
|
tensorflow/minigo,tensorflow/minigo,tensorflow/minigo,tensorflow/minigo,tensorflow/minigo,tensorflow/minigo
|
6d13b3b041e3e6cd6089814ad3276a905aa10bc3
|
troposphere/fms.py
|
troposphere/fms.py
|
# Copyright (c) 2012-2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSProperty, AWSObject, Tags
from .validators import json_checker, boolean
class IEMap(AWSProperty):
props = {
'ACCOUNT': ([basestring], False),
}
class Policy(AWSObject):
resource_type = "AWS::FMS::Policy"
props = {
'DeleteAllPolicyResources': (boolean, False),
'ExcludeMap': (IEMap, False),
'ExcludeResourceTags': (boolean, True),
'IncludeMap': (IEMap, False),
'PolicyName': (basestring, True),
'RemediationEnabled': (boolean, True),
'ResourceTags': (Tags, False),
'ResourceType': (basestring, True),
'ResourceTypeList': ([basestring], True),
'SecurityServicePolicyData': (json_checker, True),
'Tags': (Tags, False),
}
class NotificationChannel(AWSObject):
resource_type = "AWS::FMS::NotificationChannel"
props = {
'SnsRoleName': (basestring, True),
'SnsTopicArn': (basestring, True),
}
|
# Copyright (c) 2012-2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSProperty, AWSObject, Tags
from .validators import json_checker, boolean
class IEMap(AWSProperty):
props = {
'ACCOUNT': ([basestring], False),
'ORGUNIT': ([basestring], False),
}
class Policy(AWSObject):
resource_type = "AWS::FMS::Policy"
props = {
'DeleteAllPolicyResources': (boolean, False),
'ExcludeMap': (IEMap, False),
'ExcludeResourceTags': (boolean, True),
'IncludeMap': (IEMap, False),
'PolicyName': (basestring, True),
'RemediationEnabled': (boolean, True),
'ResourceTags': (Tags, False),
'ResourceType': (basestring, True),
'ResourceTypeList': ([basestring], True),
'SecurityServicePolicyData': (json_checker, True),
'Tags': (Tags, False),
}
class NotificationChannel(AWSObject):
resource_type = "AWS::FMS::NotificationChannel"
props = {
'SnsRoleName': (basestring, True),
'SnsTopicArn': (basestring, True),
}
|
Update AWS::FMS::Policy per 2020-06-18 changes
|
Update AWS::FMS::Policy per 2020-06-18 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
fe4cc596e65f6dc5ec7f99d40f7346143b695633
|
slackbot.py
|
slackbot.py
|
#! /usr/bin/env python2.7
import requests
class Slackbot(object):
def __init__(self, slack_name, token):
self.slack_name = slack_name
self.token = token
assert self.token, "Token should not be blank"
self.url = self.sb_url()
def sb_url(self):
url = "https://{}.slack.com/".format(self.slack_name)
url += "services/hooks/slackbot"
return url
def say(self, channel, statement):
"""
channel should not be preceded with '#'
"""
assert channel # not blank
if channel[0] == '#':
channel = channel[1:]
nurl = self.url + "?token={}&channel=%23{}".format(self.token, channel)
p = requests.post(nurl, statement)
return p.status_code
|
#! /usr/bin/env python2.7
import requests
class Slackbot(object):
def __init__(self, slack_name, token):
self.slack_name = slack_name
self.token = token
assert self.token, "Token should not be blank"
self.url = self.sb_url()
def sb_url(self):
url = "https://{}.slack.com/".format(self.slack_name)
url += "services/hooks/slackbot"
return url
def say(self, channel, statement):
"""
channel should not be preceded with '#'
"""
assert channel # not blank
if channel[0] == '#':
channel = channel[1:]
nurl = self.url + "?token={}&channel=%23{}".format(self.token, channel)
p = requests.post(nurl, data=statement.encode('utf-8'))
return p.status_code
|
Fix unicode encoding of Slack message posts
|
Fix unicode encoding of Slack message posts
|
Python
|
apache-2.0
|
TheConnMan/destalinator,royrapoport/destalinator,randsleadershipslack/destalinator,royrapoport/destalinator,TheConnMan/destalinator,randsleadershipslack/destalinator,underarmour/destalinator
|
29c977a7f7293f1ce45f393a4c8464bbb9691f9e
|
linkedevents/urls.py
|
linkedevents/urls.py
|
from django.conf.urls import url, include
from django.views.generic import RedirectView
from .api import LinkedEventsAPIRouter
from django.contrib import admin
admin.autodiscover()
api_router = LinkedEventsAPIRouter()
urlpatterns = [
url(r'^(?P<version>(v0.1|v1))/', include(api_router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('allauth.urls')),
url(r'^$', RedirectView.as_view(url='/v1/', permanent=False)),
]
|
from django.core.urlresolvers import reverse
from django.conf.urls import url, include
from django.views.generic import RedirectView
from .api import LinkedEventsAPIRouter
from django.contrib import admin
admin.autodiscover()
api_router = LinkedEventsAPIRouter()
class RedirectToAPIRootView(RedirectView):
permanent = False
def get_redirect_url(self, *args, **kwargs):
return reverse('api-root', kwargs={'version': 'v1'})
urlpatterns = [
url(r'^(?P<version>(v0.1|v1))/', include(api_router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('allauth.urls')),
url(r'^$', RedirectToAPIRootView.as_view()),
]
|
Make redirect-to-API work even with URL prefix
|
Make redirect-to-API work even with URL prefix
|
Python
|
mit
|
aapris/linkedevents,aapris/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,tuomas777/linkedevents,tuomas777/linkedevents,aapris/linkedevents,City-of-Helsinki/linkedevents
|
63ee144892ed0e740b87cb87895cf07d78b87d1f
|
lib/slack.py
|
lib/slack.py
|
from lib.config import Config
from slackclient import SlackClient
class Tubey():
def __init__(self, **kwargs):
### Cache the client in memory ###
self._client = None
def get_client(self):
### Fetch a cached slack client or create one and return it ###
if self._client is not None:
return self._client
token = Config.get_variable('tubey_credentials', 'bot_oauth_token')
sc = SlackClient(token)
self._client = sc
return self._client
def send_message(self, message):
### Sends message to the user/channel ###
params = {'channel': 'tubeydev', 'text': message}
#client = get_client(self)
self.get_client().api_call("chat.postMessage", **params)
if __name__ == "__main__":
#tubey = Tubey()
#tubey.send_message("This better work")
# params = {'channel': 'tubeydev', 'text': "Hi everybody! I'm a faige!"}
# client.api_call("chat.postMessage", **params)
|
from lib.config import Config
from slackclient import SlackClient
class Tubey():
def __init__(self, **kwargs):
# cache the client in memory
self._client = None
def send_message(self, message):
raise NotImplemented
def get_client(self):
### Fetch a cached slack client or create one and return it ###
if self._client is not None:
return self._client
token = Config.get_variable('tubey_credentials', 'bot_oauth_token')
sc = SlackClient(token)
self._client = sc
return self._client
if __name__ == "__main__":
# params = {'channel': 'tubeydev', 'text': "Hi everybody! I'm a faige!"}
# client.api_call("chat.postMessage", **params)
|
Revert "Implement send message functionality"
|
Revert "Implement send message functionality"
|
Python
|
mit
|
ImShady/Tubey
|
4eab1fb42f58d6203a0862aa9caf304193d3442b
|
libcloud/common/maxihost.py
|
libcloud/common/maxihost.py
|
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.base import JsonResponse
from libcloud.common.base import ConnectionKey
class MaxihostResponse(JsonResponse):
valid_response_codes = [httplib.OK, httplib.ACCEPTED, httplib.CREATED,
httplib.NO_CONTENT]
def parse_error(self):
if self.status == httplib.UNAUTHORIZED:
body = self.parse_body()
raise InvalidCredsError(body['message'])
else:
body = self.parse_body()
if 'message' in body:
error = '%s (code: %s)' % (body['message'], self.status)
else:
error = body
return error
def success(self):
return self.status in self.valid_response_codes
class MaxihostConnection(ConnectionKey):
"""
Connection class for the Maxihost driver.
"""
host = 'api.maxihost.com'
responseCls = MaxihostResponse
def add_default_headers(self, headers):
"""
Add headers that are necessary for every request
This method adds apikey to the request.
"""
headers['Authorization'] = 'Bearer %s' % (self.key)
headers['Content-Type'] = 'application/json'
return headers
|
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.base import JsonResponse
from libcloud.common.base import ConnectionKey
class MaxihostResponse(JsonResponse):
valid_response_codes = [httplib.OK, httplib.ACCEPTED, httplib.CREATED,
httplib.NO_CONTENT]
def parse_error(self):
if self.status == httplib.UNAUTHORIZED:
body = self.parse_body()
raise InvalidCredsError(body['message'])
else:
body = self.parse_body()
if 'message' in body:
error = '%s (code: %s)' % (body['message'], self.status)
else:
error = body
return error
def success(self):
return self.status in self.valid_response_codes
class MaxihostConnection(ConnectionKey):
"""
Connection class for the Maxihost driver.
"""
host = 'api.maxihost.com'
responseCls = MaxihostResponse
def add_default_headers(self, headers):
"""
Add headers that are necessary for every request
This method adds apikey to the request.
"""
headers['Authorization'] = 'Bearer %s' % (self.key)
headers['Content-Type'] = 'application/json'
headers['Accept']: 'application/vnd.maxihost.v1.1+json'
return headers
|
Add Accept header to use version 1.1
|
Add Accept header to use version 1.1
|
Python
|
apache-2.0
|
ByteInternet/libcloud,andrewsomething/libcloud,ByteInternet/libcloud,Kami/libcloud,apache/libcloud,andrewsomething/libcloud,mistio/libcloud,Kami/libcloud,mistio/libcloud,andrewsomething/libcloud,apache/libcloud,apache/libcloud,ByteInternet/libcloud,Kami/libcloud,mistio/libcloud
|
f576004e7d1352c7e8c1e203ae0a8b6769ce1b08
|
cla_backend/apps/core/views.py
|
cla_backend/apps/core/views.py
|
from django.views import defaults
from sentry_sdk import capture_message
def page_not_found(*args, **kwargs):
capture_message("Page not found", level="error")
return defaults.page_not_found(*args, **kwargs)
|
from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("type", "404")
scope.set_extra("path", request.path)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
|
Set some event data on 404 logging
|
Set some event data on 404 logging
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
55c7681304d66ad076372be7aa8f319baef153eb
|
polyaxon/runner/management/commands/clean_project_jobs.py
|
polyaxon/runner/management/commands/clean_project_jobs.py
|
from django.core.management import BaseCommand
from django.db import ProgrammingError
from django.db.models import Q
from projects.models import Project
from runner.schedulers import notebook_scheduler, tensorboard_scheduler
class Command(BaseCommand):
@staticmethod
def _clean():
for project in Project.objects.exclude(Q(tensorboard=None) | Q(notebook=None)):
if project.has_notebook:
notebook_scheduler.stop_notebook(project, update_status=False)
if project.has_tensorboard:
tensorboard_scheduler.stop_tensorboard(project, update_status=False)
def handle(self, *args, **options):
try:
self._clean()
except ProgrammingError:
pass
|
from django.core.management import BaseCommand
from django.db import ProgrammingError
from django.db.models import Q
from projects.models import Project
from runner.schedulers import notebook_scheduler, tensorboard_scheduler
class Command(BaseCommand):
@staticmethod
def _clean():
filters = Q(tensorboard_jobs=None) | Q(notebook_jobs=None)
for project in Project.objects.exclude(filters):
if project.has_notebook:
notebook_scheduler.stop_notebook(project, update_status=False)
if project.has_tensorboard:
tensorboard_scheduler.stop_tensorboard(project, update_status=False)
def handle(self, *args, **options):
try:
self._clean()
except ProgrammingError:
pass
|
Update clean project jobs command
|
Update clean project jobs command
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
1aa121daa3c99849173d5cd4c6a80d6bf94f5186
|
saleor/attribute/__init__.py
|
saleor/attribute/__init__.py
|
class AttributeInputType:
"""The type that we expect to render the attribute's values as."""
DROPDOWN = "dropdown"
MULTISELECT = "multiselect"
FILE = "file"
REFERENCE = "reference"
CHOICES = [
(DROPDOWN, "Dropdown"),
(MULTISELECT, "Multi Select"),
(FILE, "File"),
(REFERENCE, "Reference"),
]
# list of the input types that can be used in variant selection
ALLOWED_IN_VARIANT_SELECTION = [DROPDOWN]
class AttributeType:
PRODUCT_TYPE = "product-type"
PAGE_TYPE = "page-type"
CHOICES = [(PRODUCT_TYPE, "Product type"), (PAGE_TYPE, "Page type")]
class AttributeEntityType:
"""Type of a reference entity type. Must match the name of the graphql type."""
PAGE = "Page"
PRODUCT = "Product"
CHOICES = [(PAGE, "Page"), (PRODUCT, "Product")]
|
class AttributeInputType:
"""The type that we expect to render the attribute's values as."""
DROPDOWN = "dropdown"
MULTISELECT = "multiselect"
FILE = "file"
REFERENCE = "reference"
CHOICES = [
(DROPDOWN, "Dropdown"),
(MULTISELECT, "Multi Select"),
(FILE, "File"),
(REFERENCE, "Reference"),
]
# list of the input types that can be used in variant selection
ALLOWED_IN_VARIANT_SELECTION = [DROPDOWN]
class AttributeType:
PRODUCT_TYPE = "product-type"
PAGE_TYPE = "page-type"
CHOICES = [(PRODUCT_TYPE, "Product type"), (PAGE_TYPE, "Page type")]
class AttributeEntityType:
"""Type of a reference entity type. Must match the name of the graphql type.
After adding new value, `REFERENCE_VALUE_NAME_MAPPING`
and `ENTITY_TYPE_TO_MODEL_MAPPING` in saleor/graphql/attribute/utils.py
must be updated.
"""
PAGE = "Page"
PRODUCT = "Product"
CHOICES = [(PAGE, "Page"), (PRODUCT, "Product")]
|
Add info about required updates in AttributeEntityType
|
Add info about required updates in AttributeEntityType
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
ceb848021d5323b5bad8518ac7ed850a51fc89ca
|
raco/myrial/myrial_test.py
|
raco/myrial/myrial_test.py
|
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
|
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
from raco.myrialang import compile_to_json
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
json = compile_to_json(query, '', [('A', plan)])
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
|
Add compile_to_json invocation in Myrial test fixture
|
Add compile_to_json invocation in Myrial test fixture
|
Python
|
bsd-3-clause
|
uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco
|
3f5a6dcd622d7b1c890ced67468ecebd02b1806f
|
mastertickets/db_default.py
|
mastertickets/db_default.py
|
# Created by Noah Kantrowitz on 2007-07-04.
# Copyright (c) 2007 Noah Kantrowitz. All rights reserved.
from trac.db import Table, Column
name = 'mastertickets'
version = 2
tables = [
Table('mastertickets', key=('source','dest'))[
Column('source', type='integer'),
Column('dest', type='integer'),
],
]
def convert_to_int(data):
"""Convert both source and dest in the mastertickets table to ints."""
for row in data['mastertickets'][1]:
for i, (n1, n2) in enumerate(row):
row[i] = [int(n1), int(n2)]
migrations = [
(xrange(1,2), convert_to_int),
]
|
# Created by Noah Kantrowitz on 2007-07-04.
# Copyright (c) 2007 Noah Kantrowitz. All rights reserved.
from trac.db import Table, Column
name = 'mastertickets'
version = 2
tables = [
Table('mastertickets', key=('source','dest'))[
Column('source', type='integer'),
Column('dest', type='integer'),
],
]
def convert_to_int(data):
"""Convert both source and dest in the mastertickets table to ints."""
rows = data['mastertickets'][1]
for i, (n1, n2) in enumerate(rows):
rows[i] = [int(n1), int(n2)]
migrations = [
(xrange(1,2), convert_to_int),
]
|
Fix the migration to actual work.
|
Fix the migration to actual work.
|
Python
|
bsd-3-clause
|
SpamExperts/trac-masterticketsplugin,SpamExperts/trac-masterticketsplugin,SpamExperts/trac-masterticketsplugin
|
a662eded2841b87ccbccdd6dfb21315725d0a0c5
|
python/pyspark_llap/__init__.py
|
python/pyspark_llap/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark_llap.sql.session import HiveWarehouseSession
__all__ = ['HiveWarehouseSession']
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark_llap.sql.session import HiveWarehouseSession
# These are aliases so that importing this module exposes those attributes below directly.
DATAFRAME_TO_STREAM = HiveWarehouseSession.DATAFRAME_TO_STREAM
HIVE_WAREHOUSE_CONNECTOR = HiveWarehouseSession.HIVE_WAREHOUSE_CONNECTOR
STREAM_TO_STREAM = HiveWarehouseSession.STREAM_TO_STREAM
__all__ = [
'HiveWarehouseSession',
'DATAFRAME_TO_STREAM',
'HIVE_WAREHOUSE_CONNECTOR',
'STREAM_TO_STREAM',
]
|
Add aliases for HIVE_WAREHOUSE_CONNECTOR, DATAFRAME_TO_STREAM and STREAM_TO_STREAM
|
Add aliases for HIVE_WAREHOUSE_CONNECTOR, DATAFRAME_TO_STREAM and STREAM_TO_STREAM
|
Python
|
apache-2.0
|
hortonworks-spark/spark-llap,hortonworks-spark/spark-llap,hortonworks-spark/spark-llap
|
5a7291b9c305445aebe77ef020017ac9cffd35e2
|
pythonparser/test/test_utils.py
|
pythonparser/test/test_utils.py
|
# coding:utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
unicode = type("")
class BytesOnly(bytes):
def __new__(cls, s):
if isinstance(s, unicode):
s = s.encode()
return bytes.__new__(BytesOnly, s)
def __eq__(self, o):
return isinstance(o, bytes) and bytes.__eq__(self, o)
def __ne__(self, o):
return not self == o
class UnicodeOnly(unicode):
def __eq__(self, o):
return isinstance(o, unicode) and unicode.__eq__(self, o)
def __ne__(self, o):
return not self == o
if sys.version_info >= (3,):
LongOnly = int
else:
class LongOnly(long):
def __eq__(self, o):
return isinstance(o, long) and long.__cmp__(self, o) == 0
def __ne__(self, o):
return not self == o
|
# coding:utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
unicode = type("")
class BytesOnly(bytes):
def __new__(cls, s):
if isinstance(s, unicode):
s = s.encode()
return bytes.__new__(BytesOnly, s)
def __eq__(self, o):
return isinstance(o, bytes) and bytes.__eq__(self, o)
def __ne__(self, o):
return not self == o
class UnicodeOnly(unicode):
def __eq__(self, o):
return isinstance(o, unicode) and unicode.__eq__(self, o)
def __ne__(self, o):
return not self == o
try:
class LongOnly(long): # Python 2
def __eq__(self, o):
return isinstance(o, long) and long.__cmp__(self, o) == 0
def __ne__(self, o):
return not self == o
except NameError: # Python 3
LongOnly = int
|
Fix indentation error in LongOnly.__ne__()
|
Fix indentation error in LongOnly.__ne__()
Also follow Python porting best practice [__use feature detection instead of version detection__](https://docs.python.org/3/howto/pyporting.html#use-feature-detection-instead-of-version-detection).
|
Python
|
mit
|
m-labs/pythonparser
|
57c34cae582764b69bb32faa712110a46df69dde
|
chaser/__init__.py
|
chaser/__init__.py
|
__version__ = "0.1"
|
__version__ = "0.1"
import requests
import io
import tarfile
import ccr
def get_source_files(pkgname, workingdir):
"""Download the source tarball and extract it"""
r = requests.get(ccr.getpkgurl(pkgname))
tar = tarfile.open(mode='r', fileobj=io.BytesIO(r.content))
tar.extractall(workingdir)
|
Add initial function for get_source_files
|
Add initial function for get_source_files
|
Python
|
bsd-3-clause
|
rshipp/chaser,rshipp/chaser
|
19e84f0c528fd1c19dba709972f31343284c0a40
|
pymatgen/__init__.py
|
pymatgen/__init__.py
|
__author__ = "Shyue Ping Ong, Anubhav Jain, Michael Kocher, Geoffroy Hautier, Will Richards, Dan Gunter, Shreyas Cholia, Vincent L Chevrier, Rickard Armiento"
__date__ = "Jun 28, 2012"
__version__ = "2.0.0"
"""
Useful aliases for commonly used objects and modules.
"""
from pymatgen.core.periodic_table import Element, Specie
from pymatgen.core.structure import Structure, Molecule, Composition
from pymatgen.core.lattice import Lattice
from pymatgen.serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from pymatgen.electronic_structure.core import Spin, Orbital
|
__author__ = "Shyue Ping Ong, Anubhav Jain, Michael Kocher, Geoffroy Hautier, Will Richards, Dan Gunter, Shreyas Cholia, Vincent L Chevrier, Rickard Armiento"
__date__ = "Jun 28, 2012"
__version__ = "2.0.0"
"""
Useful aliases for commonly used objects and modules.
"""
from pymatgen.core.periodic_table import Element, Specie
from pymatgen.core.structure import Structure, Molecule, Composition
from pymatgen.core.lattice import Lattice
from pymatgen.serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from pymatgen.electronic_structure.core import Spin, Orbital
from pymatgen.util.io_utils import file_open_zip_aware as openz
|
Add an alias to file_open_zip_aware as openz.
|
Add an alias to file_open_zip_aware as openz.
Former-commit-id: 97796b7a5593858b2fc15c8009658926afa3eda0 [formerly 1ce26a0b0cbddb49047da0f8bac8214fb298c646]
Former-commit-id: 7bdb412108a247f3ebc9d3d9906f03c222178449
|
Python
|
mit
|
gVallverdu/pymatgen,richardtran415/pymatgen,Bismarrck/pymatgen,fraricci/pymatgen,aykol/pymatgen,gpetretto/pymatgen,johnson1228/pymatgen,mbkumar/pymatgen,dongsenfo/pymatgen,gVallverdu/pymatgen,blondegeek/pymatgen,aykol/pymatgen,vorwerkc/pymatgen,dongsenfo/pymatgen,nisse3000/pymatgen,czhengsci/pymatgen,setten/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,blondegeek/pymatgen,montoyjh/pymatgen,johnson1228/pymatgen,vorwerkc/pymatgen,tallakahath/pymatgen,czhengsci/pymatgen,gVallverdu/pymatgen,Bismarrck/pymatgen,tschaume/pymatgen,matk86/pymatgen,gpetretto/pymatgen,blondegeek/pymatgen,tschaume/pymatgen,setten/pymatgen,tallakahath/pymatgen,Bismarrck/pymatgen,setten/pymatgen,johnson1228/pymatgen,Bismarrck/pymatgen,czhengsci/pymatgen,nisse3000/pymatgen,gmatteo/pymatgen,ndardenne/pymatgen,matk86/pymatgen,nisse3000/pymatgen,Bismarrck/pymatgen,xhqu1981/pymatgen,setten/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,ndardenne/pymatgen,montoyjh/pymatgen,davidwaroquiers/pymatgen,richardtran415/pymatgen,dongsenfo/pymatgen,aykol/pymatgen,blondegeek/pymatgen,vorwerkc/pymatgen,xhqu1981/pymatgen,mbkumar/pymatgen,gmatteo/pymatgen,mbkumar/pymatgen,johnson1228/pymatgen,nisse3000/pymatgen,gpetretto/pymatgen,montoyjh/pymatgen,richardtran415/pymatgen,davidwaroquiers/pymatgen,xhqu1981/pymatgen,matk86/pymatgen,richardtran415/pymatgen,czhengsci/pymatgen,tschaume/pymatgen,dongsenfo/pymatgen,tschaume/pymatgen,fraricci/pymatgen,gpetretto/pymatgen,montoyjh/pymatgen,ndardenne/pymatgen,tschaume/pymatgen,davidwaroquiers/pymatgen,tallakahath/pymatgen,mbkumar/pymatgen,matk86/pymatgen,davidwaroquiers/pymatgen
|
b8f67c96febd1f7bc2ce1e87f1df0a468faddb87
|
src/taskmaster/util.py
|
src/taskmaster/util.py
|
"""
taskmaster.util
~~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
def import_target(target, default=None):
"""
>>> import_target('foo.bar:blah', 'get_jobs')
<function foo.bar.blah>
>>> import_target('foo.bar', 'get_jobs')
<function foo.bar.get_jobs>
>>> import_target('foo.bar:get_jobs')
<function foo.bar.get_jobs>
"""
if ':' not in target:
target += ':%s' % default
else:
raise ValueError('target must be in form of `path.to.module:function_name`')
mod_path, func_name = target.split(':', 1)
module = __import__(mod_path, {}, {}, [func_name], -1)
callback = getattr(module, func_name)
return callback
|
"""
taskmaster.util
~~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
import imp
import sys
from os.path import exists
def import_target(target, default=None):
"""
>>> import_target('foo.bar:blah', 'get_jobs')
<function foo.bar.blah>
>>> import_target('foo.bar', 'get_jobs')
<function foo.bar.get_jobs>
>>> import_target('foo.bar:get_jobs')
<function foo.bar.get_jobs>
>>> import_target('foo/bar.py:get_jobs')
<function get_jobs>
"""
if ':' not in target:
target += ':%s' % default
else:
raise ValueError('target must be in form of `path.to.module:function_name`')
path, func_name = target.split(':', 1)
if exists(path):
module_name = path.rsplit('/', 1)[-1].split('.', 1)[0]
module = imp.new_module(module_name)
module.__file__ = path
try:
execfile(path, module.__dict__)
except IOError, e:
e.strerror = 'Unable to load file (%s)' % e.strerror
raise
sys.modules[module_name] = module
else:
module = __import__(path, {}, {}, [func_name], -1)
callback = getattr(module, func_name)
return callback
|
Allow targets to be specified as files
|
Allow targets to be specified as files
|
Python
|
apache-2.0
|
alex/taskmaster,dcramer/taskmaster
|
b5cb4fe7abaa9fe1a4c387148af6ee494f69bd07
|
astropy/nddata/convolution/tests/test_make_kernel.py
|
astropy/nddata/convolution/tests/test_make_kernel.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from ....tests.compat import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
|
Fix compatibility with Numpy 1.4.1
|
Fix compatibility with Numpy 1.4.1
|
Python
|
bsd-3-clause
|
AustereCuriosity/astropy,MSeifert04/astropy,pllim/astropy,DougBurke/astropy,mhvk/astropy,larrybradley/astropy,kelle/astropy,pllim/astropy,funbaker/astropy,stargaser/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,saimn/astropy,lpsinger/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,AustereCuriosity/astropy,larrybradley/astropy,lpsinger/astropy,joergdietrich/astropy,funbaker/astropy,AustereCuriosity/astropy,DougBurke/astropy,joergdietrich/astropy,bsipocz/astropy,larrybradley/astropy,funbaker/astropy,DougBurke/astropy,saimn/astropy,tbabej/astropy,joergdietrich/astropy,AustereCuriosity/astropy,lpsinger/astropy,DougBurke/astropy,stargaser/astropy,StuartLittlefair/astropy,bsipocz/astropy,kelle/astropy,lpsinger/astropy,pllim/astropy,larrybradley/astropy,funbaker/astropy,tbabej/astropy,AustereCuriosity/astropy,astropy/astropy,MSeifert04/astropy,MSeifert04/astropy,kelle/astropy,mhvk/astropy,StuartLittlefair/astropy,mhvk/astropy,astropy/astropy,lpsinger/astropy,aleksandr-bakanov/astropy,mhvk/astropy,saimn/astropy,stargaser/astropy,dhomeier/astropy,saimn/astropy,larrybradley/astropy,dhomeier/astropy,kelle/astropy,astropy/astropy,tbabej/astropy,mhvk/astropy,dhomeier/astropy,pllim/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,tbabej/astropy,saimn/astropy,astropy/astropy,tbabej/astropy,stargaser/astropy,MSeifert04/astropy,joergdietrich/astropy,bsipocz/astropy,pllim/astropy,bsipocz/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,astropy/astropy,dhomeier/astropy,kelle/astropy
|
38b236c9fb0f944b41b6300963fbf5e67d0f3fe7
|
mwstools/requesters/utils.py
|
mwstools/requesters/utils.py
|
import os
from mws.mws import DictWrapper
requesters_dir = os.path.dirname(os.path.abspath(__file__))
responses_dir = os.path.join(requesters_dir, 'responses')
def write_response(response, fname):
with open(os.path.join(responses_dir, fname), 'wb') as f:
if isinstance(response, DictWrapper):
f.write(response.original)
else:
f.write(response.content)
|
import os
from mws.mws import DictWrapper
requesters_dir = os.path.dirname(os.path.abspath(__file__))
responses_dir = os.path.join(requesters_dir, 'responses')
def write_response(response, fname):
return
with open(os.path.join(responses_dir, fname), 'wb') as f:
if isinstance(response, DictWrapper):
f.write(response.original)
else:
f.write(response.content)
|
Write response now returns None since after packaging, the code becomes unusable
|
Write response now returns None since after packaging, the code becomes unusable
|
Python
|
unlicense
|
ziplokk1/python-amazon-mws-tools
|
0a3eb4b966dff69cbe582c60bf4444facb4b683d
|
tcconfig/_tc_command_helper.py
|
tcconfig/_tc_command_helper.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import subprocrunner as spr
from ._common import find_bin_path
from ._const import TcSubCommand
from ._error import NetworkInterfaceNotFoundError
def get_tc_base_command(tc_subcommand):
if tc_subcommand not in TcSubCommand:
raise ValueError("the argument must be a TcSubCommand value")
return "{:s} {:s}".format(find_bin_path("tc"), tc_subcommand.value)
def run_tc_show(subcommand, device, tc_command_output):
from ._network import verify_network_interface
verify_network_interface(device, tc_command_output)
runner = spr.SubprocessRunner(
"{:s} show dev {:s}".format(get_tc_base_command(subcommand), device)
)
if runner.run() != 0 and runner.stderr.find("Cannot find device") != -1:
# reach here if the device does not exist at the system and netiface
# not installed.
raise NetworkInterfaceNotFoundError(target=device)
return runner.stdout
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import subprocrunner as spr
from ._common import find_bin_path
from ._const import TcSubCommand
from ._error import NetworkInterfaceNotFoundError
def get_tc_base_command(tc_subcommand):
if not isinstance(tc_subcommand, TcSubCommand):
raise ValueError("the argument must be a TcSubCommand value")
return "{:s} {:s}".format(find_bin_path("tc"), tc_subcommand.value)
def run_tc_show(subcommand, device, tc_command_output):
from ._network import verify_network_interface
verify_network_interface(device, tc_command_output)
runner = spr.SubprocessRunner(
"{:s} show dev {:s}".format(get_tc_base_command(subcommand), device)
)
if runner.run() != 0 and runner.stderr.find("Cannot find device") != -1:
# reach here if the device does not exist at the system and netiface
# not installed.
raise NetworkInterfaceNotFoundError(target=device)
return runner.stdout
|
Change to avoid a DeprecationWarning
|
Change to avoid a DeprecationWarning
|
Python
|
mit
|
thombashi/tcconfig,thombashi/tcconfig
|
5a3a91fe075aa6d0c29cccb3b9bdfc5b40e3dba9
|
leapp/libraries/stdlib/__init__.py
|
leapp/libraries/stdlib/__init__.py
|
"""
:py:mod:`leapp.libraries.stdlib`
represents a location for functions that otherwise would be defined multiple times across leapp actors
and at the same time, they are really useful for other actors.
"""
import six
import subprocess
import os
def call(args, split=True):
"""
Call an external program, capture and automatically utf-8 decode its ouput.
Then, supress output to stderr and redirect to /dev/null.
:param args: Command to execute
:type args: list
:param split: Split the output on newlines
:type split: bool
:return: stdout output, 'utf-8' decoded, split by lines if split=True
:rtype: unicode/str or [unicode/str] if split=True
"""
r = None
with open(os.devnull, mode='w') as err:
if six.PY3:
r = subprocess.check_output(args, stderr=err, encoding='utf-8')
else:
r = subprocess.check_output(args, stderr=err).decode('utf-8')
if split:
return r.splitlines()
return r
|
"""
:py:mod:`leapp.libraries.stdlib`
represents a location for functions that otherwise would be defined multiple times across leapp actors
and at the same time, they are really useful for other actors.
"""
import six
import subprocess
import os
from leapp.libraries.stdlib import api
def call(args, split=True):
"""
Call an external program, capture and automatically utf-8 decode its ouput.
Then, supress output to stderr and redirect to /dev/null.
:param args: Command to execute
:type args: list
:param split: Split the output on newlines
:type split: bool
:return: stdout output, 'utf-8' decoded, split by lines if split=True
:rtype: unicode/str or [unicode/str] if split=True
"""
r = None
with open(os.devnull, mode='w') as err:
if six.PY3:
r = subprocess.check_output(args, stderr=err, encoding='utf-8')
else:
r = subprocess.check_output(args, stderr=err).decode('utf-8')
if split:
return r.splitlines()
return r
|
Make api directly available in stdlib
|
stdlib: Make api directly available in stdlib
|
Python
|
lgpl-2.1
|
leapp-to/prototype,leapp-to/prototype,leapp-to/prototype,leapp-to/prototype
|
e582ef07d4b9f537e31d31c1546df870a2bd361c
|
tests/plugins/async_plugin/asyncplugin.py
|
tests/plugins/async_plugin/asyncplugin.py
|
from senpy.plugins import AnalysisPlugin
import multiprocessing
class AsyncPlugin(AnalysisPlugin):
def _train(self, process_number):
return process_number
def _do_async(self, num_processes):
with multiprocessing.Pool(processes=num_processes) as pool:
values = pool.map(self._train, range(num_processes))
return values
def activate(self):
self.value = self._do_async(4)
def analyse_entry(self, entry, params):
values = self._do_async(2)
entry.async_values = values
yield entry
|
from senpy.plugins import AnalysisPlugin
import multiprocessing
def _train(process_number):
return process_number
class AsyncPlugin(AnalysisPlugin):
def _do_async(self, num_processes):
pool = multiprocessing.Pool(processes=num_processes)
values = pool.map(_train, range(num_processes))
return values
def activate(self):
self.value = self._do_async(4)
def analyse_entry(self, entry, params):
values = self._do_async(2)
entry.async_values = values
yield entry
|
Fix multiprocessing tests in python2.7
|
Fix multiprocessing tests in python2.7
Closes #28 for python 2.
Apparently, process pools are not contexts in python 2.7.
On the other hand, in py2 you cannot pickle instance methods, so
you have to implement Pool tasks as independent functions.
|
Python
|
apache-2.0
|
gsi-upm/senpy,gsi-upm/senpy,gsi-upm/senpy
|
1a534a3ac6ab1617e9d48e84ce34c0b482730e4d
|
pritunl_node/call_buffer.py
|
pritunl_node/call_buffer.py
|
from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
self.stop_waiter()
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
def stop_waiter(self):
if self.waiter:
self.waiter(None)
self.waiter = None
def return_call(self, id, response):
callback = self.call_waiters.pop(id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
|
from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
self.stop_waiter()
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
def stop_waiter(self):
if self.waiter:
self.waiter(None)
self.waiter = None
def return_call(self, call_id, response):
callback = self.call_waiters.pop(call_id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
return call_id
def cancel_call(self, call_id):
self.call_waiters.pop(call_id, None)
|
Add cancel call to call buffer
|
Add cancel call to call buffer
|
Python
|
agpl-3.0
|
pritunl/pritunl-node,pritunl/pritunl-node
|
716f953069b4fceebe4fec1a1ea2402e77cbb629
|
docs/src/conf.py
|
docs/src/conf.py
|
# -*- coding: utf-8 -*-
import os
import stat
from os.path import join, abspath
from subprocess import call
def prepare(globs, locs):
# RTD defaults the current working directory to where conf.py resides.
# In our case, that means <root>/docs/src/.
cwd = os.getcwd()
root = abspath(join(cwd, '..', '..'))
os.chdir(root)
# Download the PHP binary & composer.phar if necessary
base = 'https://github.com/Erebot/Buildenv/releases/download/1.4.0'
for f in ('php', 'composer.phar'):
call(['curl', '-L', '-z', f, '-o', f, '%s/%s' % (base, f)])
# Make sure the PHP interpreter is executable
os.chmod('./php', stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
# Call composer to download/update dependencies as necessary
os.environ['COMPOSER_CACHE_DIR'] = './cache'
call(['./php', 'composer.phar', 'update', '-n', '--ignore-platform-reqs',
'--no-progress'], env=os.environ)
# Load the second-stage configuration file.
os.chdir(cwd)
conf = join(root, 'vendor', 'erebot', 'buildenv', 'sphinx', 'rtd.py')
print "Including the second configuration file (%s)..." % (conf, )
execfile(conf, globs, locs)
prepare(globals(), locals())
|
# -*- coding: utf-8 -*-
import os
import stat
from os.path import join, abspath
from subprocess import call
def prepare(globs, locs):
# RTD defaults the current working directory to where conf.py resides.
# In our case, that means <root>/docs/src/.
cwd = os.getcwd()
root = abspath(join(cwd, '..', '..'))
os.chdir(root)
# Download the PHP binary & composer.phar if necessary
base = 'https://github.com/Erebot/Buildenv/releases/download/1.4.0'
for f in ('php', 'composer.phar'):
call(['curl', '-L', '-z', f, '-o', f, '%s/%s' % (base, f)])
# Make sure the PHP interpreter is executable
os.chmod('./php', stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
# Call composer to download/update dependencies as necessary
os.environ['COMPOSER_CACHE_DIR'] = './cache'
call(['./php', 'composer.phar', 'update', '-n', '--ignore-platform-reqs',
'--no-progress'], env=os.environ)
# Load the second-stage configuration file.
os.chdir(cwd)
conf = join(root, 'vendor', 'erebot', 'buildenv', 'sphinx', 'rtd.py')
print "Including the second configuration file (%s)..." % (conf, )
exec(compile(open(conf).read(), conf, 'exec'), globs, locs)
prepare(globals(), locals())
|
Replace execfile with py3 equivalent
|
Replace execfile with py3 equivalent
|
Python
|
mit
|
Erebot/Plop
|
a02739cc7b1384e51f44d86a05af5a9845469fca
|
pygame/__init__.py
|
pygame/__init__.py
|
""" XXX: fish """
from pygame.color import Color
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.constants import *
from pygame import (
display, color, surface, time, event, constants, sprite,
mouse, locals, image, transform, pkgdata, font, mixer,
cursors, key, draw
)
from pygame.base import (
init, quit, HAVE_NEWBUF, get_sdl_version, get_sdl_byteorder,
register_quit
)
from pygame._error import get_error, set_error, SDLError
# map our exceptions on pygame's default
error = SDLError
|
""" XXX: fish """
from pygame.color import Color
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.constants import *
from pygame import (
display, color, surface, time, event, constants, sprite,
mouse, locals, image, transform, pkgdata, font, mixer,
cursors, key, draw
)
from pygame.base import (
init, quit, HAVE_NEWBUF, get_sdl_version, get_sdl_byteorder,
register_quit
)
from pygame._error import get_error, set_error, SDLError
from pygame.mask import Mask
# map our exceptions on pygame's default
error = SDLError
|
Add Mask to toplevel pygame namespace
|
Add Mask to toplevel pygame namespace
|
Python
|
lgpl-2.1
|
CTPUG/pygame_cffi,caseyc37/pygame_cffi,CTPUG/pygame_cffi,caseyc37/pygame_cffi,caseyc37/pygame_cffi,CTPUG/pygame_cffi
|
409182019048a5cb84499258f6f8daaffb62aeae
|
tests/test_simulation_forward.py
|
tests/test_simulation_forward.py
|
import os
import pytest
import pandas as pd
from glob import glob
import numpy as np
from gypsy import DATA_DIR
from gypsy.forward_simulation import simulate_forwards_df
TEST_FILES = glob(os.path.join(DATA_DIR, 'forward_simulation_files', '*.csv'))
TEST_FILES = [(item) for item in TEST_FILES]
CHART_FILES = glob(os.path.join(DATA_DIR, 'output', 'comparisons*.csv'))
CHART_FILES = [(item) for item in CHART_FILES]
@pytest.mark.parametrize("test_file", TEST_FILES)
def test_compare_forward_simulation(test_file):
input_df = pd.read_csv(test_file)
expected_data_path = os.path.join(
DATA_DIR, 'output',
'comparisons_{}'.format(os.path.basename(test_file))
)
plot_id = str(int(input_df.loc[0, 'PlotID']))
result = simulate_forwards_df(input_df, simulation_choice='yes')[plot_id]
expected = pd.read_csv(expected_data_path, index_col=0)
assert isinstance(result, pd.DataFrame)
assert np.allclose(
expected.values.astype(np.float64), result.values.astype(np.float64),
equal_nan=True
)
# regenerate output files
# result.to_csv(expected_data_path)
|
import os
import pytest
import pandas as pd
from glob import glob
import numpy as np
from gypsy import DATA_DIR
from gypsy.forward_simulation import simulate_forwards_df
TEST_FILES = glob(os.path.join(DATA_DIR, 'forward_simulation_files', '*.csv'))
TEST_FILES = [(item) for item in TEST_FILES]
CHART_FILES = glob(os.path.join(DATA_DIR, 'output', 'comparisons*.csv'))
CHART_FILES = [(item) for item in CHART_FILES]
@pytest.mark.parametrize("test_file", TEST_FILES)
def test_compare_forward_simulation(test_file):
input_df = pd.read_csv(test_file)
expected_data_path = os.path.join(
DATA_DIR, 'output',
'comparisons_{}'.format(os.path.basename(test_file))
)
plot_id = str(int(input_df.loc[0, 'PlotID']))
result = simulate_forwards_df(input_df, simulation_choice='yes')[plot_id]
expected = pd.read_csv(expected_data_path, index_col=0)
assert isinstance(result, pd.DataFrame)
assert np.testing.assert_allclose(
expected.values, result.values,
rtol=0, atol=1e-4,
equal_nan=True
)
# regenerate output files
# result.to_csv(expected_data_path)
|
Revise tests to use np.testing.assert_allclose
|
Revise tests to use np.testing.assert_allclose
this is better - if na values mismatch (e,g, na in result where expected
has a value) this errors and gives a message to that effect. the
previous one just errored and it was very hard to tell why
|
Python
|
mit
|
tesera/pygypsy,tesera/pygypsy
|
0f9cb6eb32ce014cb6ae8d24aefed2347efe68d9
|
src/python/cargo/condor/host.py
|
src/python/cargo/condor/host.py
|
"""
cargo/condor/host.py
Host individual condor jobs.
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
import os
import sys
import cPickle as pickle
def main():
"""
Application entry point.
"""
# make the job identifier obvious
process_number = int(os.environ["CONDOR_PROCESS"])
cluster_number = int(os.environ["CONDOR_CLUSTER"])
identifier_path = "JOB_IS_%i.%i" % (cluster_number, process_number)
open(identifier_path, "w").close()
# load and run the job
job = pickle.load(sys.stdin)
job.run()
if __name__ == "__main__":
main()
|
"""
cargo/condor/host.py
Host individual condor jobs.
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
import os
import sys
import cPickle as pickle
def main():
"""
Application entry point.
"""
# make the job identifier obvious
process_number = int(os.environ["CONDOR_PROCESS"])
cluster_number = int(os.environ["CONDOR_CLUSTER"])
identifier_path = "JOB_IS_%i.%i" % (cluster_number, process_number)
open(identifier_path, "w").close()
# load and run the job
with open("job.pickle") as job_file:
job = pickle.load(job_file)
job.run()
if __name__ == "__main__":
main()
|
Load job from a job file instead of stdin.
|
Load job from a job file instead of stdin.
|
Python
|
mit
|
borg-project/cargo,borg-project/cargo
|
354fb43cc95d68b06b85e8d1fa2426ca663ef8b9
|
common/__init__.py
|
common/__init__.py
|
VERSION = (0, 0, 0)
__version__ = '.'.join(map(str, VERSION))
from django import template
template.add_to_builtins('common.templatetags.common')
template.add_to_builtins('common.templatetags.development')
|
VERSION = (0, 1, 0)
__version__ = '.'.join(map(str, VERSION))
from django import template
template.add_to_builtins('common.templatetags.common')
template.add_to_builtins('common.templatetags.development')
# Add db_name to options for use in model.Meta class
import django.db.models.options as options
options.DEFAULT_NAMES = options.DEFAULT_NAMES + ('db_name',)
|
Add db_name to options for use in model.Meta class
|
Add db_name to options for use in model.Meta class
|
Python
|
bsd-3-clause
|
baskoopmans/djcommon,baskoopmans/djcommon,baskoopmans/djcommon
|
02d184f94e2e5a0521e2ec06e2c10ca644ba2cef
|
python/balcaza/t2wrapper.py
|
python/balcaza/t2wrapper.py
|
from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.input[port.name] = type
self.input[port.name] >> nested.input[port.name]
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.output[port.name] = type
nested.output[port.name] >> self.output[port.name]
|
from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
self.task[flow.name] = NestedWorkflow(flow)
nested = self.task[flow.name]
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.input[port.name] = type
self.input[port.name] >> nested.input[port.name]
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.output[port.name] = type
nested.output[port.name] >> self.output[port.name]
|
Use [] notation in wrapper module for task management
|
Use [] notation in wrapper module for task management
|
Python
|
lgpl-2.1
|
jongiddy/balcazapy,jongiddy/balcazapy,jongiddy/balcazapy
|
3048bf667ec24c93d1c60f08124d68b6d1fc458d
|
src/python/borg/defaults.py
|
src/python/borg/defaults.py
|
"""@author: Bryan Silverthorn <bcs@cargo-cult.org>"""
import os
machine_speed = 1.0
minimum_fake_run_budget = 1800.0 # XXX
proc_poll_period = 1.0
root_log_level = os.environ.get("BORG_LOG_ROOT_LEVEL", "NOTSET")
try:
from borg_site_defaults import *
except ImportError:
pass
|
"""@author: Bryan Silverthorn <bcs@cargo-cult.org>"""
import os
machine_speed = 1.0
proc_poll_period = 1.0
root_log_level = os.environ.get("BORG_LOG_ROOT_LEVEL", "NOTSET")
try:
from borg_site_defaults import *
except ImportError:
pass
|
Remove an ancient configuration setting.
|
Remove an ancient configuration setting.
|
Python
|
mit
|
borg-project/borg
|
525f7fff89e02e54ad2a731533e6b817424594f1
|
tomviz/python/RotationAlign.py
|
tomviz/python/RotationAlign.py
|
# Perform alignment to the estimated rotation axis
#
# Developed as part of the tomviz project (www.tomviz.com).
def transform_scalars(dataset, SHIFT=None, rotation_angle=90.0):
from tomviz import utils
from scipy import ndimage
import numpy as np
data_py = utils.get_array(dataset) # Get data as numpy array.
if data_py is None: #Check if data exists
raise RuntimeError("No data array found!")
data_py_return = np.empty_like(data_py)
ndimage.interpolation.shift(data_py, SHIFT, order=0, output=data_py_return)
rotation_axis = 2 # This operator always assumes the rotation axis is Z
if rotation_angle == []: # If tilt angle not given, assign it to 90 degrees.
rotation_angle = 90
axis1 = (rotation_axis + 1) % 3
axis2 = (rotation_axis + 2) % 3
axes = (axis1, axis2)
shape = utils.rotate_shape(data_py_return, rotation_angle, axes=axes)
data_py_return2 = np.empty(shape, data_py_return.dtype, order='F')
ndimage.interpolation.rotate(
data_py_return, rotation_angle, output=data_py_return2, axes=axes)
utils.set_array(dataset, data_py_return2)
|
# Perform alignment to the estimated rotation axis
#
# Developed as part of the tomviz project (www.tomviz.com).
def transform_scalars(dataset, SHIFT=None, rotation_angle=90.0):
from tomviz import utils
from scipy import ndimage
import numpy as np
data_py = utils.get_array(dataset) # Get data as numpy array.
if data_py is None: #Check if data exists
raise RuntimeError("No data array found!")
if SHIFT is None:
SHIFT = np.zeros(len(data_py.shape), dtype=np.int)
data_py_return = np.empty_like(data_py)
ndimage.interpolation.shift(data_py, SHIFT, order=0, output=data_py_return)
rotation_axis = 2 # This operator always assumes the rotation axis is Z
if rotation_angle == []: # If tilt angle not given, assign it to 90 degrees.
rotation_angle = 90
axis1 = (rotation_axis + 1) % 3
axis2 = (rotation_axis + 2) % 3
axes = (axis1, axis2)
shape = utils.rotate_shape(data_py_return, rotation_angle, axes=axes)
data_py_return2 = np.empty(shape, data_py_return.dtype, order='F')
ndimage.interpolation.rotate(
data_py_return, rotation_angle, output=data_py_return2, axes=axes)
utils.set_array(dataset, data_py_return2)
|
Fix ndimage complaining about shift being of NoneType
|
Fix ndimage complaining about shift being of NoneType
|
Python
|
bsd-3-clause
|
OpenChemistry/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,mathturtle/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz
|
669b95d2092f67bcc220b5fa106064d6c3df6a63
|
rolca_core/urls.py
|
rolca_core/urls.py
|
from __future__ import absolute_import, unicode_literals
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns( # pylint: disable=invalid-name
'',
url(r'^$', 'uploader.views.upload_app', name="upload_app"),
url(r'^potrditev$',
TemplateView.as_view(template_name='uploader/upload_confirm.html'),
name="upload_confirm"),
# url(r'^$', 'uploader.views.upload', name="upload"),
url(r'^seznam$', 'uploader.views.list_select', name="list_select"),
url(r'^seznam/(?P<salon_id>\d+)$', 'uploader.views.list_details',
name="list_datails"),
url(r'^razpisi$',
TemplateView.as_view(template_name="uploader/notices.html"),
name="notices"),
url(r'^razpisi/os$',
TemplateView.as_view(template_name="uploader/notice_os.html"),
name="notice_os"),
url(r'^razpisi/ss$',
TemplateView.as_view(template_name="uploader/notice_ss.html"),
name="notice_ss"),
)
|
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from django.views.generic import TemplateView
from . import views as core_views
urlpatterns = [ # pylint: disable=invalid-name
url(r'^$', core_views.upload_app, name="upload_app"),
url(r'^potrditev$',
TemplateView.as_view(template_name='uploader/upload_confirm.html'),
name="upload_confirm"),
# url(r'^seznam$', 'uploader.views.list_select', name="list_select"),
# url(r'^seznam/(?P<salon_id>\d+)$', 'uploader.views.list_details',
# name="list_datails"),
# url(r'^razpisi$',
# TemplateView.as_view(template_name="uploader/notices.html"),
# name="notices"),
# url(r'^razpisi/os$',
# TemplateView.as_view(template_name="uploader/notice_os.html"),
# name="notice_os"),
# url(r'^razpisi/ss$',
# TemplateView.as_view(template_name="uploader/notice_ss.html"),
# name="notice_ss"),
]
|
Rewrite urlpatterns to new format
|
Rewrite urlpatterns to new format
|
Python
|
apache-2.0
|
dblenkus/rolca,dblenkus/rolca,dblenkus/rolca
|
54046bfb8834f5fc2a93841ae56e2790ae82eecf
|
shared/api.py
|
shared/api.py
|
from __future__ import print_function
import boto3
import json
import os
import btr3baseball
jobTable = os.environ['JOB_TABLE']
jobQueue = os.environ['JOB_QUEUE']
queue = boto3.resource('sqs').get_queue_by_name(QueueName=jobQueue)
jobRepo = btr3baseball.JobRepository(jobTable)
dsRepo = btr3baseball.DatasourceRepository()
def main(event, context):
method = event['method']
if 'data' in event:
data = event['data']
else:
data = None
if method == 'submitJob':
return submitJob(data, context)
elif method == 'getJob':
return getJob(data, context)
elif method == 'listDatasources':
return listDatasources(data, context)
elif method == 'getDatasource':
return getDatasource(data, context)
else:
return None
def submitJob(event, context):
# Put initial entry in dynamo db
jobId = jobRepo.createJob(event)
# Put the job ID on the SQS queue
response = queue.send_message(MessageBody=jobId)
# Update the DB entry with sqs message ID for traceability
return jobRepo.updateWithMessageId(jobId, response.get('MessageId'))
def getJob(event, context):
return jobRepo.getJob(event['jobId'])
def listDatasources(event, context):
return dsRepo.listDatasources()
def getDatasource(event, context):
return dsRepo.getDatasource(event['datasourceId'])
|
from __future__ import print_function
import boto3
import json
import os
import btr3baseball
jobTable = os.environ['JOB_TABLE']
jobQueue = os.environ['JOB_QUEUE']
queue = boto3.resource('sqs').get_queue_by_name(QueueName=jobQueue)
jobRepo = btr3baseball.JobRepository(jobTable)
dsRepo = btr3baseball.DatasourceRepository()
def main(event, context):
method = event['method']
if 'data' in event:
data = event['data']
else:
data = None
print(data)
if method == 'submitJob':
return submitJob(data, context)
elif method == 'getJob':
return getJob(data, context)
elif method == 'listDatasources':
return listDatasources(data, context)
elif method == 'getDatasource':
return getDatasource(data, context)
else:
return None
def submitJob(event, context):
# Put initial entry in dynamo db
jobId = jobRepo.createJob(event)
# Put the job ID on the SQS queue
response = queue.send_message(MessageBody=jobId)
# Update the DB entry with sqs message ID for traceability
return jobRepo.updateWithMessageId(jobId, response.get('MessageId'))
def getJob(event, context):
return jobRepo.getJob(event['jobId'])
def listDatasources(event, context):
return dsRepo.listDatasources()
def getDatasource(event, context):
return dsRepo.getDatasource(event['datasourceId'])
|
Add debug print of data
|
Add debug print of data
|
Python
|
apache-2.0
|
bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball
|
b44d34f8bc5264d495dc4c2176654b0bd53bfb8a
|
mistral/api/wsgi.py
|
mistral/api/wsgi.py
|
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mistral.api import app
from mistral import config
from mistral.engine import rpc
config.parse_args()
transport = rpc.get_transport()
application = app.setup_app(transport=transport)
|
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mistral.api import app
from mistral import config
config.parse_args()
application = app.setup_app()
|
Remove transport from WSGI script
|
Remove transport from WSGI script
The setup_app method no longer requires transport as input.
Change-Id: I4caf397a48e30822d423c8cf7d40f2773f9aa951
Closes-Bug: 1443654
|
Python
|
apache-2.0
|
dennybaa/mistral,openstack/mistral,StackStorm/mistral,dennybaa/mistral,openstack/mistral,StackStorm/mistral
|
a16b51bb26761f8c4a30c06da4c711dac24ac3e0
|
mr/preprocessing.py
|
mr/preprocessing.py
|
import numpy as np
from scipy.ndimage.filters import uniform_filter
from scipy.ndimage.fourier import fourier_gaussian
def bandpass(image, lshort, llong, threshold=1):
"""Convolve with a Gaussian to remove short-wavelength noise,
and subtract out long-wavelength variations,
retaining features of intermediate scale."""
if not 2*lshort < llong:
raise ValueError("The smoothing length scale must be more" +
"than twice the noise length scale.")
settings = dict(mode='nearest', cval=0)
boxcar = uniform_filter(image, 2*llong+1, **settings)
gaussian = np.fft.ifftn(fourier_gaussian(np.fft.fftn(image), lshort))
result = gaussian - boxcar
result -= threshold # Features must be this level above the background.
return result.real.clip(min=0.)
def scale_to_gamut(image, original_dtype):
max_value = np.iinfo(original_dtype).max
scaled = (max_value/image.max()*image.clip(min=0.))
return scaled.astype(original_dtype)
|
import numpy as np
from scipy.ndimage.filters import uniform_filter
from scipy.ndimage.fourier import fourier_gaussian
import warnings
first_run = True
try:
import pyfftw
except ImportError:
fftn = np.fft.fftn
ifftn = np.fft.ifftn
else:
def _maybe_align(a):
global planned
if first_run:
warnings.warn("FFTW is configuring itself. This will take " +
"several sections, but subsequent calls will run " +
"*much* faster.", UserWarning)
planned = False
return pyfftw.n_byte_align(a, a.dtype.alignment)
fftn = lambda a: pyfftw.interfaces.numpy_fft.fftn(_maybe_align(a))
ifftn = lambda a: pyfftw.interfaces.numpy_fft.ifftn(_maybe_align(a))
def bandpass(image, lshort, llong, threshold=1):
"""Convolve with a Gaussian to remove short-wavelength noise,
and subtract out long-wavelength variations,
retaining features of intermediate scale."""
if not 2*lshort < llong:
raise ValueError("The smoothing length scale must be more" +
"than twice the noise length scale.")
settings = dict(mode='nearest', cval=0)
boxcar = uniform_filter(image, 2*llong+1, **settings)
gaussian = ifftn(fourier_gaussian(fftn(image), lshort))
result = gaussian - boxcar
result -= threshold # Features must be this level above the background.
return result.real.clip(min=0.)
def scale_to_gamut(image, original_dtype):
max_value = np.iinfo(original_dtype).max
scaled = (max_value/image.max()*image.clip(min=0.))
return scaled.astype(original_dtype)
|
Add optional dependence on FFTW for faster bandpass
|
ENH: Add optional dependence on FFTW for faster bandpass
|
Python
|
bsd-3-clause
|
daniorerio/trackpy,daniorerio/trackpy
|
47b031db83f5cb90f786029a6ffbdb7a599145db
|
timepiece/context_processors.py
|
timepiece/context_processors.py
|
from django.conf import settings
from timepiece import models as timepiece
from timepiece.forms import QuickSearchForm
def timepiece_settings(request):
default_famfamfam_url = settings.STATIC_URL + 'images/icons/'
famfamfam_url = getattr(settings, 'FAMFAMFAM_URL', default_famfamfam_url)
context = {
'FAMFAMFAM_URL': famfamfam_url,
}
return context
def quick_search(request):
return {
'quick_search_form': QuickSearchForm(),
}
def active_entries(request):
active_entries = timepiece.Entry.objects.filter(
end_time__isnull=True,
).exclude(
user=request.user,
).select_related('user', 'project', 'activity')
return {
'active_entries': active_entries,
}
def extra_nav(request):
context = {
'extra_nav': getattr(settings, 'EXTRA_NAV', {})
}
return context
|
from django.conf import settings
from timepiece import models as timepiece
from timepiece.forms import QuickSearchForm
def timepiece_settings(request):
default_famfamfam_url = settings.STATIC_URL + 'images/icons/'
famfamfam_url = getattr(settings, 'FAMFAMFAM_URL', default_famfamfam_url)
context = {
'FAMFAMFAM_URL': famfamfam_url,
}
return context
def quick_search(request):
return {
'quick_search_form': QuickSearchForm(),
}
def active_entries(request):
active_entries = None
if request.user.is_authenticated():
active_entries = timepiece.Entry.objects.filter(
end_time__isnull=True,
).exclude(
user=request.user,
).select_related('user', 'project', 'activity')
return {
'active_entries': active_entries,
}
def extra_nav(request):
context = {
'extra_nav': getattr(settings, 'EXTRA_NAV', {})
}
return context
|
Apply active_entries fix from payroll-reports branch
|
Apply active_entries fix from payroll-reports branch
|
Python
|
mit
|
gaga3966/django-timepiece,josesanch/django-timepiece,BocuStudio/django-timepiece,dannybrowne86/django-timepiece,josesanch/django-timepiece,BocuStudio/django-timepiece,gaga3966/django-timepiece,BocuStudio/django-timepiece,arbitrahj/django-timepiece,caktus/django-timepiece,arbitrahj/django-timepiece,gaga3966/django-timepiece,dannybrowne86/django-timepiece,caktus/django-timepiece,dannybrowne86/django-timepiece,caktus/django-timepiece,josesanch/django-timepiece,arbitrahj/django-timepiece
|
943e920603d5507a37c1b0c835c598972f0f2cff
|
github/models.py
|
github/models.py
|
import json, requests
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page, Orderable
import django.utils.dateparse as dateparse
from django.db import models
from django.core.cache import cache
class GithubOrgIndexPage(Page):
github_org_name = models.CharField(default='City-of-Helsinki', max_length=200)
content_panels = Page.content_panels + [
FieldPanel('github_org_name'),
]
def events(self):
events = cache.get('github')
if not events:
cache.add('github',
requests.get('https://api.github.com/orgs/' + self.github_org_name + '/events?per_page=20').json(), 60)
events = cache.get('github')
for index, event in enumerate(events):
event['created_at'] = dateparse.parse_datetime(event['created_at'])
# get html repo url
event['repo']['url'] = event['repo']['url'].replace('https://api.github.com/repos/', 'https://github.com/')
return events
def top_events(self):
return self.events()[:3]
|
import json, requests
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page, Orderable
import django.utils.dateparse as dateparse
from django.db import models
from django.core.cache import cache
class GithubOrgIndexPage(Page):
github_org_name = models.CharField(default='City-of-Helsinki', max_length=200)
content_panels = Page.content_panels + [
FieldPanel('github_org_name'),
]
def events(self):
events = cache.get('github')
if not events:
response = requests.get('https://api.github.com/orgs/' + self.github_org_name + '/events?per_page=20')
if response.status_code == 200:
cache.add('github', response.json(), 60)
events = cache.get('github')
for index, event in enumerate(events):
event['created_at'] = dateparse.parse_datetime(event['created_at'])
# get html repo url
event['repo']['url'] = event['repo']['url'].replace('https://api.github.com/repos/', 'https://github.com/')
return events
def top_events(self):
return self.events()[:3]
|
Check github response before parsing
|
Check github response before parsing
|
Python
|
agpl-3.0
|
City-of-Helsinki/devheldev,City-of-Helsinki/devheldev,terotic/devheldev,terotic/devheldev,City-of-Helsinki/devheldev,terotic/devheldev
|
fba4fdf426b0a29ca06deb67587c2bd804adb017
|
tbgxmlutils/xmlutils.py
|
tbgxmlutils/xmlutils.py
|
#!/usr/bin/env python
from xml.dom import minidom
import xml.etree.ElementTree as ET
import xmltodict
def add(k, parent=None, txt=None, attrs=None):
if parent is None:
handle = ET.Element(k)
else:
handle = ET.SubElement(parent, k)
if txt: handle.text = unicode(txt)
try:
for k, v in attrs.iteritems(): handle.attrib[k] = v
except AttributeError:
pass
return handle
def etree2xml(e, encoding='UTF-8'):
return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)
def pretty(xml=None, fn=None):
if fn is not None:
xml = minidom.parse(fn)
elif not isinstance(xml, minidom.Document):
xml = minidom.parseString(xml)
return xml.toprettyxml(indent=' ')
def xml_fn_to_json(fn):
fh = open(fn, 'r')
json = xmltodict.parse(fh.read())
return json
|
#!/usr/bin/env python
from xml.dom import minidom
import lxml.etree as ET
import xmltodict
def add(k, parent=None, txt=None, attrs=None):
if parent is None:
handle = ET.Element(k)
else:
handle = ET.SubElement(parent, k)
if txt: handle.text = unicode(txt)
try:
for k, v in attrs.iteritems(): handle.attrib[k] = v
except AttributeError:
pass
return handle
def etree2xml(e, encoding='UTF-8'):
return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)
def pretty(xml=None, fn=None):
if fn is not None:
xml = minidom.parse(fn)
elif not isinstance(xml, minidom.Document):
xml = minidom.parseString(xml)
return xml.toprettyxml(indent=' ')
def xml_fn_to_json(fn):
fh = open(fn, 'r')
json = xmltodict.parse(fh.read())
return json
|
Use lxml instead of elementtree.
|
Use lxml instead of elementtree.
|
Python
|
mit
|
Schwarzschild/TBGXMLUtils
|
ac3f56f4ed0826600b9adbbf8dfe3b99ce508ac6
|
migrations/versions/0334_broadcast_message_number.py
|
migrations/versions/0334_broadcast_message_number.py
|
"""
Revision ID: 0334_broadcast_message_number
Revises: 0333_service_broadcast_provider
Create Date: 2020-12-04 15:06:22.544803
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0334_broadcast_message_number'
down_revision = '0333_service_broadcast_provider'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("create sequence broadcast_provider_message_number_seq")
op.create_table(
'broadcast_provider_message_number',
sa.Column(
'broadcast_provider_message_number',
sa.Integer(),
server_default=sa.text("nextval('broadcast_provider_message_number_seq')"),
nullable=False
),
sa.Column('broadcast_provider_message_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(['broadcast_provider_message_id'], ['broadcast_provider_message.id'], ),
sa.PrimaryKeyConstraint('broadcast_provider_message_number')
)
op.execute(
"""
INSERT INTO
broadcast_provider_message_number (broadcast_provider_message_id)
SELECT
id
FROM
broadcast_provider_message
"""
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('broadcast_provider_message_number')
op.execute("drop sequence broadcast_provider_message_number_seq")
# ### end Alembic commands ###
|
"""
Revision ID: 0334_broadcast_message_number
Revises: 0333_service_broadcast_provider
Create Date: 2020-12-04 15:06:22.544803
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0334_broadcast_message_number'
down_revision = '0333_service_broadcast_provider'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("create sequence broadcast_provider_message_number_seq")
op.create_table(
'broadcast_provider_message_number',
sa.Column(
'broadcast_provider_message_number',
sa.Integer(),
server_default=sa.text("nextval('broadcast_provider_message_number_seq')"),
nullable=False
),
sa.Column('broadcast_provider_message_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(['broadcast_provider_message_id'], ['broadcast_provider_message.id'], ),
sa.PrimaryKeyConstraint('broadcast_provider_message_number')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('broadcast_provider_message_number')
op.execute("drop sequence broadcast_provider_message_number_seq")
# ### end Alembic commands ###
|
Delete unneeded code form migration
|
Delete unneeded code form migration
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
6f3b0c997f7207279bf836edc94db1ac19d2ce1d
|
src/rabird/core/logging.py
|
src/rabird/core/logging.py
|
'''
@date 2013-5-9
@author Hong-She Liang <starofrainnight@gmail.com>
'''
import sys
import os
# Import the global logging unit, not our logging .
global_logging = __import__('logging')
def load_default_config():
arguments = {
'level': None,
'filename': None,
'filemode': None,
'format': None,
'datefmt': None,
'style': None,
}
for k in list(arguments.keys()):
try:
envionment_text = 'PYTHON_LOGGING_{}'.format(k.upper())
arguments[k] = os.environ[envionment_text]
except ValueError:
pass
except KeyError:
pass
# Remove all arguments that is None value.
keys = list(arguments.keys())
for k in keys:
if arguments[k] is None:
del arguments[k]
# Set default level to logging.INFO .
if 'level' not in list(arguments.keys()):
arguments['level'] = global_logging.INFO
global_logging.basicConfig(**arguments)
# Added console handler only there have filename argument.
if 'filename' in list(arguments.keys()):
global_logging.getLogger().addHandler(global_logging.StreamHandler(sys.stdout))
|
'''
@date 2013-5-9
@author Hong-She Liang <starofrainnight@gmail.com>
'''
import sys
import os
# Import the global logging unit, not our logging .
global_logging = __import__('logging')
def load_default_config():
arguments = {
'level': None,
'filename': None,
'filemode': None,
'format': None,
'datefmt': None,
'style': None,
}
for k in list(arguments.keys()):
try:
envionment_text = 'PYTHON_LOGGING_%s' % k.upper()
arguments[k] = os.environ[envionment_text]
except ValueError:
pass
except KeyError:
pass
# Remove all arguments that is None value.
keys = list(arguments.keys())
for k in keys:
if arguments[k] is None:
del arguments[k]
# Set default level to logging.INFO .
if 'level' not in list(arguments.keys()):
arguments['level'] = global_logging.INFO
global_logging.basicConfig(**arguments)
# Added console handler only there have filename argument.
if 'filename' in list(arguments.keys()):
global_logging.getLogger().addHandler(global_logging.StreamHandler(sys.stdout))
|
Use old style string format method to avoid formatting warning
|
Use old style string format method to avoid formatting warning
|
Python
|
apache-2.0
|
starofrainnight/rabird.core
|
2de7427d06ff33bf8bdfe0424e07b3fb34621b07
|
shop/user/views.py
|
shop/user/views.py
|
# -*- coding: utf-8 -*-
"""User views."""
from flask import Blueprint, render_template
from flask_login import login_required
blueprint = Blueprint('user', __name__, url_prefix='/users', static_folder='../static')
@blueprint.route('/')
@login_required
def members():
"""List members."""
return render_template('users/members.html')
|
# -*- coding: utf-8 -*-
"""User views."""
from flask import Blueprint, render_template
from flask_login import login_required
blueprint = Blueprint(
'user', __name__,
url_prefix='/users', static_folder='../static'
)
@blueprint.route('/')
@login_required
def members():
"""List members."""
return render_template('users/members.html')
|
Clean up code a bit
|
Clean up code a bit
|
Python
|
bsd-3-clause
|
joeirimpan/shop,joeirimpan/shop,joeirimpan/shop
|
ff59a35d5ea90169e34d65bd9ec3a6177e1faebd
|
thinglang/execution/stack.py
|
thinglang/execution/stack.py
|
class StackFrame(object):
def __init__(self, instance):
self.instance = instance
self.data = {}
self.idx = 0
self.return_value = None
def __setitem__(self, key, value):
print('\tSET<{}> {}: {}'.format(self.idx, key, value))
self.data[key] = (self.idx, value)
def __getitem__(self, item):
print('\tGET<{}> {}: {}'.format(self.idx, item, self.data[item][1]))
return self.data[item][1]
def __contains__(self, item):
return item in self.data
def __iter__(self):
for key, value in self.data.items():
yield key, value
def enter(self):
print('\tINCR<{}> -> <{}>'.format(self.idx, self.idx + 1))
self.idx += 1
def exit(self):
print('\tDECR<{}> -> <{}>'.format(self.idx, self.idx - 1))
self.data = {
key: value for key, value in self.data.items() if value[1] != self.idx
}
self.idx -= 1
class StackFrameTerminator(object):
def __init__(self, target_arg=None):
self.target_arg = target_arg
class StackScopeTerminator(object):
pass
|
class StackFrame(object):
def __init__(self, instance):
self.instance = instance
self.data = {}
self.idx = 0
self.return_value = None
def __setitem__(self, key, value):
print('\tSET<{}> {}: {}'.format(self.idx, key, value))
self.data[key] = (self.idx, value)
def __getitem__(self, item):
print('\tGET<{}> {}: {}'.format(self.idx, item, self.data[item][1]))
return self.data[item][1]
def __contains__(self, item):
return item in self.data
def __iter__(self):
for key, value in self.data.items():
yield key, value
def enter(self):
print('\tINCR<{}> -> <{}>'.format(self.idx, self.idx + 1))
self.idx += 1
def exit(self):
assert self.idx > 0, 'Cannot exit lowest stack segment'
print('\tDECR<{}> -> <{}>'.format(self.idx, self.idx - 1))
self.data = {
key: value for key, value in self.data.items() if value[1] != self.idx
key: value for key, value in self.data.items() if value[0] != self.idx
}
self.idx -= 1
class StackFrameTerminator(object):
def __init__(self, target_arg=None):
self.target_arg = target_arg
class StackScopeTerminator(object):
pass
|
Add index assertion during segment exit and fix segment cleanup logic
|
Add index assertion during segment exit and fix segment cleanup logic
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
f024e340a6a443bb765b67bbdb811fa44fd3d19b
|
tests/test_resources.py
|
tests/test_resources.py
|
from flask import json
from helper import TestCase
from models import db, Major
class StudentsTestCase(TestCase):
def setUp(self):
super(StudentsTestCase, self).setUp()
with self.appx.app_context():
db.session.add(Major(id=1, university_id=1, name='Major1'))
db.session.add(Major(id=2, university_id=1, name='Major2'))
db.session.commit()
def test_students_patch(self):
headers = {
'Authorization': 'Bearer ' + self.jwt,
'Content-Type': 'application/json'
}
data = {
'graduation_year': 2018,
'gender': 'm',
'majors': [1, 2]
}
rv = self.app.patch('/students/0', headers=headers, data=json.dumps(data))
self.assertEqual(rv.status_code, 200)
|
from flask import json
from helper import TestCase
from models import db, Major, Student
class StudentsTestCase(TestCase):
def setUp(self):
super(StudentsTestCase, self).setUp()
with self.appx.app_context():
db.session.add(Major(id=1, university_id=1, name='Major1'))
db.session.add(Major(id=2, university_id=1, name='Major2'))
db.session.commit()
def test_students_patch(self):
headers = {
'Authorization': 'Bearer ' + self.jwt,
'Content-Type': 'application/json'
}
data = {
'graduation_year': 2018,
'gender': 'm',
'majors': [1, 2]
}
rv = self.app.patch('/students/0', headers=headers, data=json.dumps(data))
self.assertEqual(rv.status_code, 200)
with self.appx.app_context():
student = Student.query.get(0)
self.assertEqual(student.graduation_year, data['graduation_year'])
self.assertEqual(student.gender, data['gender'])
self.assertEqual(student.majors_list, data['majors'])
|
Improve testing of student patching
|
Improve testing of student patching
|
Python
|
agpl-3.0
|
SCUEvals/scuevals-api,SCUEvals/scuevals-api
|
938043259eefdec21994489d68b1cf737618ba34
|
test/test_conversion.py
|
test/test_conversion.py
|
import unittest
from src import conversion
class TestNotationConverter(unittest.TestCase):
"""Tests for NotationConverter class"""
def test_alg_search_good_input_a5(self):
"""Input with 'a5'"""
actual_result = main.TileLine('w').line
expected_result = ' '
self.assertEqual(actual_result, expected_result)
|
"""Tests for conversion module"""
import unittest
from src import conversion
class TestNotationConverter(unittest.TestCase):
"""Tests for NotationConverter class"""
def test_alg_search_good_input_a5(self):
"""Input with 'a5'"""
n_con = conversion.NotationConverter()
actual_result = n_con.alg_search('a5')
expected_result = ('a5', 'qr5', 'qr4')
self.assertEqual(actual_result, expected_result)
def test_alg_search_good_input_f7(self):
"""Input with 'f7'"""
n_con = conversion.NotationConverter()
actual_result = n_con.alg_search('f7')
expected_result = ('f7', 'kb7', 'kb2')
self.assertEqual(actual_result, expected_result)
def test_alg_search_nonexistant(self):
"""Input which does not exist"""
n_con = conversion.NotationConverter()
self.assertRaises(LookupError, n_con.alg_search, 'f99')
def test_desc_search_good_white(self):
"""Input with good value"""
n_con = conversion.NotationConverter()
actual_result = n_con.desc_search('qn3', 'white')
expected_result = ('b3', 'qn3', 'qn6')
self.assertEqual(actual_result, expected_result)
def test_desc_search_good_black(self):
"""Input with good value"""
n_con = conversion.NotationConverter()
actual_result = n_con.desc_search('qn6', 'black')
expected_result = ('b3', 'qn3', 'qn6')
self.assertEqual(actual_result, expected_result)
def test_desc_search_nonexistant(self):
"""Input with good value"""
n_con = conversion.NotationConverter()
self.assertRaises(LookupError, n_con.desc_search, 'qn333', 'white')
|
Add tests for NotationConverter methods
|
Add tests for NotationConverter methods
|
Python
|
mit
|
blairck/chess_notation
|
b6cfa50e127d3f74247ab148219ef6336e445cca
|
InvenTree/InvenTree/ready.py
|
InvenTree/InvenTree/ready.py
|
import sys
def canAppAccessDatabase():
"""
Returns True if the apps.py file can access database records.
There are some circumstances where we don't want the ready function in apps.py
to touch the database
"""
# If any of the following management commands are being executed,
# prevent custom "on load" code from running!
excluded_commands = [
'flush',
'loaddata',
'dumpdata',
'makemirations',
'migrate',
'check',
'dbbackup',
'mediabackup',
'dbrestore',
'mediarestore',
'shell',
'createsuperuser',
'wait_for_db',
'prerender',
'collectstatic',
'makemessages',
'compilemessages',
'test',
]
for cmd in excluded_commands:
if cmd in sys.argv:
return False
return True
|
import sys
def canAppAccessDatabase():
"""
Returns True if the apps.py file can access database records.
There are some circumstances where we don't want the ready function in apps.py
to touch the database
"""
# If any of the following management commands are being executed,
# prevent custom "on load" code from running!
excluded_commands = [
'flush',
'loaddata',
'dumpdata',
'makemirations',
'migrate',
'check',
'dbbackup',
'mediabackup',
'dbrestore',
'mediarestore',
'shell',
'createsuperuser',
'wait_for_db',
'prerender',
'collectstatic',
'makemessages',
'compilemessages',
]
for cmd in excluded_commands:
if cmd in sys.argv:
return False
return True
|
Allow data operations to run for 'test'
|
Allow data operations to run for 'test'
|
Python
|
mit
|
inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree
|
fef28556bc4d105feb44345782c632b8d3befa3f
|
server/acre/settings/dev.py
|
server/acre/settings/dev.py
|
from .base import *
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['RDS_DB_NAME'],
'HOST': os.environ['RDS_HOSTNAME'],
'PORT': os.environ['RDS_PORT'],
'USER': os.environ['RDS_USERNAME'],
'PASSWORD': os.environ['RDS_PASSWORD'],
}
}
ALLOWED_HOSTS = [".us-east-2.elasticbeanstalk.com", "localhost"]
|
from .base import *
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['RDS_DB_NAME'],
'HOST': os.environ['RDS_HOSTNAME'],
'PORT': os.environ['RDS_PORT'],
'USER': os.environ['RDS_USERNAME'],
'PASSWORD': os.environ['RDS_PASSWORD'],
}
}
ALLOWED_HOSTS = [".acre.one", ".us-east-2.elasticbeanstalk.com", "localhost"]
|
Add acre.one to allowed host
|
Add acre.one to allowed host
|
Python
|
mit
|
yizhang7210/Acre,yizhang7210/Acre,yizhang7210/Acre,yizhang7210/Acre
|
1636fe834830ebb6644d17f908f893a3c2a41e33
|
tests/test_sentences.py
|
tests/test_sentences.py
|
#
import pytest
from sdsc import sentencesegmenter
@pytest.mark.parametrize("sentence,expected",
(
# 1
("This is a simple ##@command-2## sentence. This one too.",
["This is a simple ##@command-2## sentence", "This one too"]),
# 2
("This is not a test in one go. openSUSE is not written with a capital letter.",
["This is not a test in one go",
"openSUSE is not written with a capital letter"]),
# 3
("This is a sentence, e.g. for me.",
["This is a sentence, e.g. for me"]),
# 4
("E. g. this is a sentence.",
["E. g. this is a sentence"]),
# 5
("An above average chance stands e.g. Michael. Marta is also on the list.",
["An above average chance stands e.g. Michael",
"Marta is also on the list"]),
# Add more entries here:
))
def test_sentencesegmenter(sentence, expected):
"""checks whether sentencesegmenter behaves sane"""
sentences = sentencesegmenter(sentence)
assert sentences == expected
|
#
import pytest
from sdsc import sentencesegmenter
@pytest.mark.parametrize("sentence,expected",
(
# 0 - a single simple sentence
("This is a simple sentence.",
["This is a simple sentence"]),
# 1 - two simple sentences
("This is a simple ##@command-2## sentence. This one is too.",
["This is a simple ##@command-2## sentence", "This one is too"]),
# 2 - lowercase letter starts second sentence
("This is not a test in one go. openSUSE is not written with a capital letter.",
["This is not a test in one go",
"openSUSE is not written with a capital letter"]),
# 3 - abbreviation in the middle of the sentence
("This is a sentence, e.g. for me.",
["This is a sentence, e.g. for me"]),
# 4 - abbreviation at the start of the sentence
("E. g. this is a sentence.",
["E. g. this is a sentence"]),
# 5 - abbreviation in the middle of sentence before a capital letter
("An above average chance stands e.g. Michael. Marta is also on the list.",
["An above average chance stands e.g. Michael",
"Marta is also on the list"]),
# 6 - sentences with parentheses around them
("(We speak in circles. We dance in code.)",
["We speak in circles",
"We dance in code"]),
# 6 - sentences with parentheses around them
("We speak in circles. (We dance in code.)",
["We speak in circles",
"We dance in code"]),
))
def test_sentencesegmenter(sentence, expected):
"""checks whether sentencesegmenter behaves sanely"""
sentences = sentencesegmenter(sentence)
assert sentences == expected
|
Expand the sentence segmentation tests a little()
|
Expand the sentence segmentation tests a little()
|
Python
|
lgpl-2.1
|
sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker
|
dd7513f4146679d11aff6d528f11927131dc692f
|
feder/monitorings/factories.py
|
feder/monitorings/factories.py
|
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
description = factory.Sequence(lambda n: 'description no.%04d' % n)
template = factory.Sequence(lambda n:
'template no.%04d. reply to {{EMAIL}}' % n)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
Add description and template to MonitoringFactory
|
Add description and template to MonitoringFactory
|
Python
|
mit
|
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
|
b17e39436bde57558c1a9d6e70330a51dd1d0d19
|
website/addons/osffiles/utils.py
|
website/addons/osffiles/utils.py
|
from website.addons.osffiles.exceptions import FileNotFoundError
def get_versions(filename, node):
"""Return file versions for a :class:`NodeFile`.
:raises: FileNotFoundError if file does not exists for the node.
"""
try:
return node.files_versions[filename.replace('.', '_')]
except KeyError:
raise FileNotFoundError('{0!r} not found for node {1!r}'.format(
filename, node._id
))
def get_latest_version_number(filename, node):
"""Return the current version number (0-indexed) for a NodeFile.
:raises: FileNotFoundError if file does not exists for the node.
"""
versions = get_versions(filename, node)
return len(versions) - 1
|
from website.addons.osffiles.exceptions import FileNotFoundError
def get_versions(filename, node):
"""Return IDs for a file's version records.
:param str filename: The name of the file.
:param Node node: The node which has the requested file.
:return: List of ids (strings) for :class:`NodeFile` records.
:raises: FileNotFoundError if file does not exists for the node.
"""
try:
return node.files_versions[filename.replace('.', '_')]
except KeyError:
raise FileNotFoundError('{0!r} not found for node {1!r}'.format(
filename, node._id
))
def get_latest_version_number(filename, node):
"""Return the current version number (0-indexed) for a file.
:param str filename: The name of the file.
:param Node node: The node which has the requested file.
:raises: FileNotFoundError if file does not exists for the node.
"""
versions = get_versions(filename, node)
return len(versions) - 1
|
Clarify documentation for get_versions and get_latest_version_number.
|
Clarify documentation for get_versions and get_latest_version_number.
|
Python
|
apache-2.0
|
bdyetton/prettychart,Johnetordoff/osf.io,caneruguz/osf.io,ZobairAlijan/osf.io,brandonPurvis/osf.io,arpitar/osf.io,GageGaskins/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,fabianvf/osf.io,caseyrygt/osf.io,dplorimer/osf,MerlinZhang/osf.io,zkraime/osf.io,zkraime/osf.io,hmoco/osf.io,lamdnhan/osf.io,cosenal/osf.io,lyndsysimon/osf.io,HarryRybacki/osf.io,caseyrollins/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,cldershem/osf.io,cldershem/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,asanfilippo7/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,mluo613/osf.io,rdhyee/osf.io,cosenal/osf.io,ticklemepierce/osf.io,Ghalko/osf.io,saradbowman/osf.io,emetsger/osf.io,cwisecarver/osf.io,mattclark/osf.io,petermalcolm/osf.io,CenterForOpenScience/osf.io,TomHeatwole/osf.io,reinaH/osf.io,pattisdr/osf.io,doublebits/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,kushG/osf.io,jmcarp/osf.io,zachjanicki/osf.io,abought/osf.io,RomanZWang/osf.io,haoyuchen1992/osf.io,pattisdr/osf.io,kushG/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,jolene-esposito/osf.io,TomBaxter/osf.io,doublebits/osf.io,Ghalko/osf.io,binoculars/osf.io,amyshi188/osf.io,wearpants/osf.io,Ghalko/osf.io,TomHeatwole/osf.io,kushG/osf.io,zkraime/osf.io,kwierman/osf.io,danielneis/osf.io,HalcyonChimera/osf.io,asanfilippo7/osf.io,jeffreyliu3230/osf.io,jeffreyliu3230/osf.io,ticklemepierce/osf.io,barbour-em/osf.io,TomBaxter/osf.io,lamdnhan/osf.io,jnayak1/osf.io,billyhunt/osf.io,acshi/osf.io,adlius/osf.io,AndrewSallans/osf.io,mfraezz/osf.io,leb2dg/osf.io,mluo613/osf.io,icereval/osf.io,Nesiehr/osf.io,AndrewSallans/osf.io,njantrania/osf.io,chrisseto/osf.io,doublebits/osf.io,chennan47/osf.io,HarryRybacki/osf.io,caseyrygt/osf.io,samanehsan/osf.io,DanielSBrown/osf.io,cslzchen/osf.io,dplorimer/osf,sloria/osf.io,RomanZWang/osf.io,bdyetton/prettychart,DanielSBrown/osf.io,haoyuchen1992/osf.io,caseyrollins/osf.io,HarryRybacki/osf.io,zamattiac/osf.io,jmcarp/osf.io,laurenrevere/osf.io,haoyuchen1992/osf.io,amyshi188/osf.io,wearpants/osf.io,ckc6cz/osf.io,doublebits/osf.io,fabianvf/osf.io,jnayak1/osf.io,barbour-em/osf.io,caseyrollins/osf.io,SSJohns/osf.io,lyndsysimon/osf.io,reinaH/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,abought/osf.io,arpitar/osf.io,felliott/osf.io,zachjanicki/osf.io,revanthkolli/osf.io,alexschiller/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,kch8qx/osf.io,binoculars/osf.io,crcresearch/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,abought/osf.io,rdhyee/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,sbt9uc/osf.io,GaryKriebel/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,njantrania/osf.io,samchrisinger/osf.io,MerlinZhang/osf.io,sbt9uc/osf.io,jinluyuan/osf.io,caseyrygt/osf.io,adlius/osf.io,barbour-em/osf.io,kwierman/osf.io,jnayak1/osf.io,aaxelb/osf.io,erinspace/osf.io,jeffreyliu3230/osf.io,adlius/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,jolene-esposito/osf.io,jeffreyliu3230/osf.io,mluo613/osf.io,billyhunt/osf.io,GaryKriebel/osf.io,jnayak1/osf.io,Ghalko/osf.io,revanthkolli/osf.io,acshi/osf.io,fabianvf/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,RomanZWang/osf.io,ckc6cz/osf.io,hmoco/osf.io,jmcarp/osf.io,lamdnhan/osf.io,samanehsan/osf.io,mluke93/osf.io,bdyetton/prettychart,erinspace/osf.io,pattisdr/osf.io,TomHeatwole/osf.io,dplorimer/osf,GageGaskins/osf.io,SSJohns/osf.io,brandonPurvis/osf.io,danielneis/osf.io,njantrania/osf.io,petermalcolm/osf.io,ZobairAlijan/osf.io,leb2dg/osf.io,samanehsan/osf.io,SSJohns/osf.io,zachjanicki/osf.io,kch8qx/osf.io,cldershem/osf.io,caneruguz/osf.io,hmoco/osf.io,kwierman/osf.io,baylee-d/osf.io,haoyuchen1992/osf.io,barbour-em/osf.io,himanshuo/osf.io,samanehsan/osf.io,MerlinZhang/osf.io,felliott/osf.io,mluke93/osf.io,ckc6cz/osf.io,doublebits/osf.io,samchrisinger/osf.io,aaxelb/osf.io,RomanZWang/osf.io,arpitar/osf.io,monikagrabowska/osf.io,jinluyuan/osf.io,revanthkolli/osf.io,acshi/osf.io,icereval/osf.io,jinluyuan/osf.io,himanshuo/osf.io,adlius/osf.io,chrisseto/osf.io,cosenal/osf.io,caneruguz/osf.io,billyhunt/osf.io,petermalcolm/osf.io,alexschiller/osf.io,cwisecarver/osf.io,leb2dg/osf.io,sbt9uc/osf.io,cosenal/osf.io,lamdnhan/osf.io,mluke93/osf.io,aaxelb/osf.io,ckc6cz/osf.io,mluo613/osf.io,zamattiac/osf.io,mfraezz/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,felliott/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,acshi/osf.io,arpitar/osf.io,abought/osf.io,icereval/osf.io,crcresearch/osf.io,zachjanicki/osf.io,njantrania/osf.io,crcresearch/osf.io,jolene-esposito/osf.io,GageGaskins/osf.io,zkraime/osf.io,binoculars/osf.io,chennan47/osf.io,billyhunt/osf.io,ZobairAlijan/osf.io,acshi/osf.io,jolene-esposito/osf.io,TomHeatwole/osf.io,kushG/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,KAsante95/osf.io,sloria/osf.io,revanthkolli/osf.io,kwierman/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,cldershem/osf.io,mluke93/osf.io,wearpants/osf.io,sloria/osf.io,jinluyuan/osf.io,emetsger/osf.io,emetsger/osf.io,chrisseto/osf.io,kch8qx/osf.io,mfraezz/osf.io,zamattiac/osf.io,GageGaskins/osf.io,aaxelb/osf.io,laurenrevere/osf.io,GaryKriebel/osf.io,mattclark/osf.io,KAsante95/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,dplorimer/osf,mfraezz/osf.io,mluo613/osf.io,lyndsysimon/osf.io,reinaH/osf.io,reinaH/osf.io,fabianvf/osf.io,himanshuo/osf.io,KAsante95/osf.io,danielneis/osf.io,alexschiller/osf.io,ticklemepierce/osf.io,MerlinZhang/osf.io,jmcarp/osf.io,leb2dg/osf.io,samchrisinger/osf.io,alexschiller/osf.io,himanshuo/osf.io,Nesiehr/osf.io,emetsger/osf.io,GageGaskins/osf.io,alexschiller/osf.io,chennan47/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,KAsante95/osf.io,GaryKriebel/osf.io,bdyetton/prettychart,amyshi188/osf.io,HarryRybacki/osf.io,sbt9uc/osf.io,samchrisinger/osf.io,felliott/osf.io,caseyrygt/osf.io,hmoco/osf.io,chrisseto/osf.io,baylee-d/osf.io,cslzchen/osf.io,petermalcolm/osf.io,erinspace/osf.io,cwisecarver/osf.io,danielneis/osf.io,lyndsysimon/osf.io,baylee-d/osf.io,KAsante95/osf.io,wearpants/osf.io
|
0f9418eed089938e0094f40cc15682ef59e041a1
|
__init__.py
|
__init__.py
|
# -*- coding: utf8 -*-
import default_settings
from flask.ext.plugins import Plugin
from flask import current_app as app
from pybossa_gravatar.gravatar import Gravatar
from pybossa.model.user import User
from sqlalchemy import event
__plugin__ = "PyBossaGravatar"
__version__ = "0.1.0"
gravatar = Gravatar()
class PyBossaGravatar(Plugin):
"""A PyBossa plugin for Gravatar integration."""
def setup(self):
"""Setup the plugin."""
self.load_config()
gravatar.init_app(app)
self.setup_event_listener()
def load_config(self):
"""Configure the plugin."""
settings = [key for key in dir(default_settings) if key.isupper()]
for s in settings:
if not app.config.get(s):
app.config[s] = getattr(default_settings, s)
def setup_event_listener(self):
"""Setup event listener."""
@event.listens_for(User, 'before_insert')
def add_user_event(mapper, conn, target):
"""Set gravatar by default for new users."""
gravatar.set(target, update_repo=False)
|
# -*- coding: utf8 -*-
import default_settings
from flask.ext.plugins import Plugin
from flask import current_app as app
from flask import redirect
from pybossa_gravatar.gravatar import Gravatar
from pybossa.model.user import User
from sqlalchemy import event
from flask.ext.login import current_user
__plugin__ = "PyBossaGravatar"
__version__ = "0.1.0"
gravatar = Gravatar()
class PyBossaGravatar(Plugin):
"""A PyBossa plugin for Gravatar integration."""
def setup(self):
"""Setup the plugin."""
self.load_config()
gravatar.init_app(app)
self.setup_event_listener()
self.setup_url_rule()
def load_config(self):
"""Configure the plugin."""
settings = [key for key in dir(default_settings) if key.isupper()]
for s in settings:
if not app.config.get(s):
app.config[s] = getattr(default_settings, s)
def setup_event_listener(self):
"""Setup event listener."""
@event.listens_for(User, 'before_insert')
def add_user_event(mapper, conn, target):
"""Set gravatar by default for new users."""
gravatar.set(target, update_repo=False)
def setup_url_rule(self):
"""Setup URL rule."""
@app.route('/account/set-gravatar')
def set_gravatar(self):
"""Set gravatar for the current user."""
if current_user.is_anonymous():
return redirect(url_for('account.signin'))
gravatar.set(current_user)
|
Add URL rule to set Gravatar for current user
|
Add URL rule to set Gravatar for current user
|
Python
|
bsd-3-clause
|
alexandermendes/pybossa-gravatar
|
8d8863fe178b085c6ce7500996f9c2d2c8f159f6
|
umibukela/csv_export.py
|
umibukela/csv_export.py
|
from collections import OrderedDict
def form_questions(form):
d = OrderedDict()
children = form['children']
for child in children:
if 'pathstr' in child and 'control' not in child:
d.update({child['pathstr']: ''})
elif 'children' in child:
for minor in child['children']:
if 'pathstr' in minor:
d.update({minor['pathstr']: ''})
if 'Contact_number' in d:
del d['Contact_number']
if 'Full_name' in d:
del d['Full_name']
if 'Monitor_name' in d:
del d['Monitor_name']
if 'phonenumber' in d:
del d['phonenumber']
if 'capturer' in d:
del d['capturer']
if 'surveyor' in d:
del d['surveyor']
if 'Monitor_Name' in d:
del d['Monitor_Name']
if 'phone_number' in d:
del d['phone_number']
return d
def export_row(answer, fields):
obj = answer.answers
for k in fields.keys():
try:
fields[k] = obj[k]
except KeyError:
del fields[k]
return fields
|
from collections import OrderedDict
def form_questions(form):
d = OrderedDict()
children = form['children']
for child in children:
if 'pathstr' in child and 'control' not in child and child['type'] != 'group':
d.update({child['pathstr']: ''})
elif 'children' in child:
for minor in child['children']:
if 'pathstr' in minor:
d.update({minor['pathstr']: ''})
if 'Contact_number' in d:
del d['Contact_number']
if 'Full_name' in d:
del d['Full_name']
if 'Monitor_name' in d:
del d['Monitor_name']
if 'phonenumber' in d:
del d['phonenumber']
if 'capturer' in d:
del d['capturer']
if 'surveyor' in d:
del d['surveyor']
if 'Monitor_Name' in d:
del d['Monitor_Name']
if 'phone_number' in d:
del d['phone_number']
return d
def export_row(answer, fields):
obj = answer.answers
for k in fields.keys():
try:
fields[k] = obj[k]
except KeyError:
del fields[k]
return fields
|
Make sure correct type is excluded
|
Make sure correct type is excluded
|
Python
|
mit
|
Code4SA/umibukela,Code4SA/umibukela,Code4SA/umibukela,Code4SA/umibukela
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.