commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
65ecd11b4d4689108eabd464377afdb20ff95240
|
rest_framework_simplejwt/utils.py
|
rest_framework_simplejwt/utils.py
|
from __future__ import unicode_literals
from calendar import timegm
from datetime import datetime
from django.conf import settings
from django.utils import six
from django.utils.functional import lazy
from django.utils.timezone import is_aware, make_aware, utc
def make_utc(dt):
if settings.USE_TZ and not is_aware(dt):
return make_aware(dt, timezone=utc)
return dt
def aware_utcnow():
return make_utc(datetime.utcnow())
def datetime_to_epoch(dt):
return timegm(dt.utctimetuple())
def datetime_from_epoch(ts):
return make_utc(datetime.utcfromtimestamp(ts))
def format_lazy(s, *args, **kwargs):
return s.format(*args, **kwargs)
format_lazy = lazy(format_lazy, six.text_type)
|
from __future__ import unicode_literals
from calendar import timegm
from datetime import datetime
from django.conf import settings
from django.utils import six
from django.utils.functional import lazy
from django.utils.timezone import is_naive, make_aware, utc
def make_utc(dt):
if settings.USE_TZ and is_naive(dt):
return make_aware(dt, timezone=utc)
return dt
def aware_utcnow():
return make_utc(datetime.utcnow())
def datetime_to_epoch(dt):
return timegm(dt.utctimetuple())
def datetime_from_epoch(ts):
return make_utc(datetime.utcfromtimestamp(ts))
def format_lazy(s, *args, **kwargs):
return s.format(*args, **kwargs)
format_lazy = lazy(format_lazy, six.text_type)
|
Use is_naive here for clarity
|
Use is_naive here for clarity
|
Python
|
mit
|
davesque/django-rest-framework-simplejwt,davesque/django-rest-framework-simplejwt
|
eb33d70bfda4857fbd76616cf3bf7fb7d7feec71
|
spoj/00005/palin.py
|
spoj/00005/palin.py
|
#!/usr/bin/env python3
def next_palindrome(k):
palin = list(k)
n = len(k)
mid = n // 2
# case 1: forward right
just_copy = False
for i in range(mid, n):
mirrored = n - 1 - i
if k[i] < k[mirrored]:
just_copy = True
if just_copy:
palin[i] = palin[mirrored]
# case 2: backward left
if not just_copy:
i = (n - 1) // 2
while i >= 0 and k[i] == '9':
i -= 1
if i >= 0:
palin[i] = str(int(k[i]) + 1)
for j in range(i + 1, mid):
palin[j] = '0'
for j in range(mid, n):
mirrored = n - 1 - j
palin[j] = palin[mirrored]
else:
# case 3: "99...9" -> "100..01"
palin = ['0'] * (n + 1)
palin[0] = palin[-1] = '1'
return ''.join(palin)
if __name__ == '__main__':
t = int(input())
for _ in range(t):
k = input()
print(next_palindrome(k))
|
#!/usr/bin/env python3
def next_palindrome(k):
palin = list(k)
n = len(k)
mid = n // 2
# case 1: forward right
just_copy = False
for i in range(mid, n):
mirrored = n - 1 - i
if k[i] < k[mirrored]:
just_copy = True
if just_copy:
palin[i] = palin[mirrored]
# case 2: backward left
if not just_copy:
i = (n - 1) // 2
while i >= 0 and k[i] == '9':
i -= 1
if i >= 0:
palin[i] = str(int(k[i]) + 1)
for j in range(i + 1, (n + 1) // 2):
palin[j] = '0'
for j in range((n + 1) // 2, n):
mirrored = n - 1 - j
palin[j] = palin[mirrored]
else:
# case 3: "99...9" -> "100..01"
palin = ['0'] * (n + 1)
palin[0] = palin[-1] = '1'
return ''.join(palin)
if __name__ == '__main__':
t = int(input())
for _ in range(t):
k = input()
print(next_palindrome(k))
|
Fix bug in ranges (to middle)
|
Fix bug in ranges (to middle)
- in SPOJ palin
Signed-off-by: Karel Ha <70f8965fdfb04f1fc0e708a55d9e822c449f57d3@gmail.com>
|
Python
|
mit
|
mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming
|
e43596395507c4606909087c0e77e84c1a232811
|
damn/__init__.py
|
damn/__init__.py
|
"""
**damn** (aka *digital audio for music nerds*) is an easy-to-use python package
for digital audio signal processing, analysis and synthesis.
"""
__version__ = '0.0.0'
|
"""
**damn** (aka *digital audio for music nerds*) is an easy-to-use python package
for digital audio signal processing, analysis and synthesis.
"""
__author__ = 'Romain Clement'
__copyright__ = 'Copyright 2014, Romain Clement'
__credits__ = []
__license__ = 'MIT'
__version__ = "0.0.0"
__maintainer__ = 'Romain Clement'
__email__ = 'contact@romainclement.com'
__status__ = 'Development'
|
Add meta information for damn package
|
[DEV] Add meta information for damn package
|
Python
|
mit
|
rclement/yodel,rclement/yodel
|
7c4a8d1249becb11727002c4eb2cd2f58c712244
|
zou/app/utils/emails.py
|
zou/app/utils/emails.py
|
from flask_mail import Message
from zou.app import mail, app
def send_email(subject, body, recipient_email, html=None):
"""
Send an email with given subject and body to given recipient.
"""
if html is None:
html = body
with app.app_context():
message = Message(
sender="Kitsu Bot <no-reply@cg-wire.com>",
body=body,
html=html,
subject=subject,
recipients=[recipient_email]
)
mail.send(message)
|
from flask_mail import Message
from zou.app import mail, app
def send_email(subject, body, recipient_email, html=None):
"""
Send an email with given subject and body to given recipient.
"""
if html is None:
html = body
with app.app_context():
mail_default_sender = app.config["MAIL_DEFAULT_SENDER"]
message = Message(
sender="Kitsu Bot <%s>" % mail_default_sender,
body=body,
html=html,
subject=subject,
recipients=[recipient_email]
)
mail.send(message)
|
Fix configuration of email default sender
|
Fix configuration of email default sender
|
Python
|
agpl-3.0
|
cgwire/zou
|
a5cc18bab108f83ab45073272fa467fc62a2649b
|
run_python_tests.py
|
run_python_tests.py
|
#!/usr/bin/python
import os
import optparse
import sys
import unittest
USAGE = """%prog SDK_PATH TEST_PATH
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation.
TEST_PATH Path to package containing test modules.
WEBTEST_PATH Path to the webtest library."""
def main(sdk_path, test_path, webtest_path):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
sys.path.append(webtest_path)
suite = unittest.loader.TestLoader().discover(test_path,
pattern="*test.py")
return unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful()
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) != 3:
print 'Error: Exactly 3 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
WEBTEST_PATH = args[2]
sys.exit(not main(SDK_PATH, TEST_PATH, WEBTEST_PATH))
|
#!/usr/bin/python
import os
import optparse
import sys
import unittest
USAGE = """%prog SDK_PATH TEST_PATH
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation.
TEST_PATH Path to package containing test modules.
WEBTEST_PATH Path to the webtest library."""
def main(sdk_path, test_path, webtest_path):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
sys.path.append(webtest_path)
suite = unittest.loader.TestLoader().discover(test_path,
pattern="*test.py")
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) != 3:
print 'Error: Exactly 3 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
WEBTEST_PATH = args[2]
main(SDK_PATH, TEST_PATH, WEBTEST_PATH)
|
Revert "Python tests now return an error code on fail."
|
Revert "Python tests now return an error code on fail."
|
Python
|
bsd-3-clause
|
pquochoang/samples,jiayliu/apprtc,todotobe1/apprtc,jan-ivar/adapter,82488059/apprtc,shelsonjava/apprtc,procandi/apprtc,4lejandrito/adapter,overtakermtg/samples,mvenkatesh431/samples,JiYou/apprtc,martin7890/samples,Zauberstuhl/adapter,TribeMedia/samples,jiayliu/apprtc,bpyoung92/apprtc,aadebuger/docker-apprtc,xdumaine/adapter,mulyoved/samples,b-cuts/samples,jan-ivar/adapter,dengshaodong/docker-apprtc,smayoorans/samples,juberti/samples,martin7890/samples,Edward-Shawn/samples,mauricionr/samples,ralic/samples,jan-ivar/samples,keshwans/samples,harme199497/adapter,fetterov/samples,dengshaodong/docker-apprtc,fitraditya/samples,shines/adapter,Edward-Shawn/samples,dengshaodong/docker-apprtc,mvenkatesh431/apprtc,smadhusu/AppRTC,shelsonjava/apprtc,smbale/samples,JiYou/apprtc,arnauorriols/apprtc,shelsonjava/apprtc,taylor-b/samples,YouthAndra/apprtc,bpyoung92/apprtc,kod3r/samples,myself659/samples,leehz/samples,procandi/apprtc,Roarz/samples,mauricionr/samples,b-cuts/samples,4lejandrito/adapter,procandi/apprtc,EmreAkkoyun/sample,TheKnarf/apprtc,diddie06/webrtc,bemasc/samples,MahmoudFouad/samples,shelsonjava/samples,procandi/samples,volkanh/volkanh.github.io,virajs/apprtc,dengshaodong/docker-apprtc,Zauberstuhl/adapter,YouthAndra/apprtc,tsruban/samples,fetterov/samples,pquochoang/samples,akashrchoksi/newone,virajs/apprtc,tsruban/samples,virajs/samples,mvenkatesh431/apprtc,virajs/samples,pquochoang/samples,mvenkatesh431/samples,dushmis/webrtc,juberti/samples,overtakermtg/samples,jjrasche/cell-based-RC-control,bemasc/samples,guoweis/webrtc,mvenkatesh431/apprtc,TribeMedia/apprtc,samdutton/apprtc,shelsonjava/apprtc,bemasc/samples,bpyoung92/apprtc,webrtc/samples,jarl-alejandro/apprtc,virajs/apprtc,jjrasche/cell-based-RC-control,shines/adapter,fitraditya/samples,jiayliu/apprtc,mauricionr/samples,shelsonjava/samples,procandi/samples,Acidburn0zzz/adapter,bbandaru/samples,JiYou/apprtc,mulyoved/samples,diddie06/webrtc,samdutton/apprtc,calebboyd/adapter,xdumaine/samples,aadebuger/docker-apprtc,smadhusu/AppRTC,leehz/samples,todotobe1/samples,taylor-b/samples,YouthAndra/apprtc,82488059/apprtc,guoweis/webrtc,smayoorans/samples,mvenkatesh431/apprtc,martin7890/samples,webrtc/apprtc,jarl-alejandro/apprtc,samdutton/apprtc,smbale/samples,knightsofaa/webrtc,jjrasche/cell-based-RC-control,aadebuger/docker-apprtc,xdumaine/adapter,arnauorriols/apprtc,keshwans/samples,samdutton/apprtc,shelsonjava/samples,smadhusu/AppRTC,TheKnarf/apprtc,smadhusu/AppRTC,TribeMedia/samples,volkanh/volkanh.github.io,harme199497/adapter,dajise/samples,mvenkatesh431/samples,YouthAndra/apprtc,samdutton/apprtc,overtakermtg/samples,MahmoudFouad/samples,Acidburn0zzz/adapter,oliverhuangchao/samples,Acidburn0zzz/adapter,fippo/webrtc,TribeMedia/samples,todotobe1/apprtc,myself659/samples,Zauberstuhl/adapter,arnauorriols/apprtc,kod3r/samples,akashrchoksi/newone,xdumaine/samples,todotobe1/apprtc,JiYou/apprtc,oliverhuangchao/samples,shelsonjava/apprtc,mvenkatesh431/apprtc,bpyoung92/apprtc,procandi/apprtc,fippo/apprtc,askdaddy/samples,Edward-Shawn/samples,todotobe1/apprtc,harme199497/adapter,dengshaodong/docker-apprtc,webrtc/adapter,todotobe1/samples,dushmis/webrtc,webrtc/apprtc,JiYou/apprtc,calebboyd/adapter,smbale/samples,TheKnarf/apprtc,TribeMedia/apprtc,calebboyd/adapter,82488059/apprtc,jarl-alejandro/apprtc,fippo/apprtc,b-cuts/samples,82488059/apprtc,kod3r/samples,jan-ivar/samples,jiayliu/apprtc,samdutton/webrtc,TribeMedia/apprtc,xdumaine/samples,82488059/apprtc,fippo/apprtc,virajs/apprtc,bbandaru/samples,fippo/webrtc,smadhusu/AppRTC,todotobe1/apprtc,TribeMedia/apprtc,aadebuger/docker-apprtc,shaohung001/samples,leehz/samples,webrtc/apprtc,virajs/apprtc,fitraditya/samples,oliverhuangchao/samples,dajise/samples,jiayliu/apprtc,aadebuger/docker-apprtc,procandi/samples,webrtc/samples,dajise/samples,arnauorriols/apprtc,todotobe1/samples,volkanh/volkanh.github.io,mulyoved/samples,tsruban/samples,samdutton/webrtc,TheKnarf/apprtc,4lejandrito/adapter,jjrasche/cell-based-RC-control,webrtc/apprtc,bbandaru/samples,YouthAndra/apprtc,Roarz/samples,jarl-alejandro/apprtc,webrtc/apprtc,virajs/samples,MahmoudFouad/samples,askdaddy/samples,knightsofaa/webrtc,TribeMedia/apprtc,guoweis/webrtc,bpyoung92/apprtc,akashrchoksi/newone,fetterov/samples,shaohung001/samples,myself659/samples,webrtc/adapter,fippo/apprtc,smayoorans/samples,ralic/samples,shines/adapter,fippo/apprtc,arnauorriols/apprtc,TheKnarf/apprtc,EmreAkkoyun/sample,jarl-alejandro/apprtc,procandi/apprtc,ralic/samples,Roarz/samples,EmreAkkoyun/sample
|
49e95022577eb40bcf9e1d1c9f95be7269fd0e3b
|
scripts/update_acq_stats.py
|
scripts/update_acq_stats.py
|
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from mica.stats import update_acq_stats
update_acq_stats.main()
import os
table_file = mica.stats.acq_stats.table_file
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
from mica.stats import update_acq_stats
import mica.stats.acq_stats
update_acq_stats.main()
table_file = mica.stats.acq_stats.TABLE_FILE
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
Fix reference to acq table file in script
|
Fix reference to acq table file in script
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
8a6144fc3918856cb2259f65f9ee5cc9cfaf1fdc
|
locustfile.py
|
locustfile.py
|
from locust import HttpLocust, TaskSet, task
class UserBehavior(TaskSet):
tasks = []
def on_start(self):
pass
@task
def index(self):
self.client.get("/")
@task
def move_map(self):
self.client.get("")
@task
def select_scene(self):
# Get url
self.client.get()
@task
def render_preview(self):
self.client.get()
@task
def render_full(self):
self.client.get()
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 1000
max_wait = 5000
|
from locust import HttpLocust, TaskSet, task
from bs4 import BeautifulSoup
from requests import Session
import random
class UserBehavior(TaskSet):
def on_start(self):
pass
@task
def index(self):
self.client.get("/")
@task
def move_map(self):
lat = random.uniform(-1, 1)
lon = random.uniform(-1, 1)
response = self.client.post(
url="/ajax",
data={'lat': lat, 'lng': lng,}
)
self.client.get("")
@task
def select_scene(self):
# Get url
soup = BeautifulSoup(self.client.get(""))
self.client.get()
@task
def render_preview(self):
self.client.get()
@task
def render_full(self):
self.client.get()
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 1000
max_wait = 5000
|
Add random functionality to map move.
|
Add random functionality to map move.
|
Python
|
mit
|
recombinators/snapsat,recombinators/snapsat,recombinators/snapsat
|
3b41e2166adde50f36f8f7ea389c80b76b83acaf
|
test/test_wavedrom.py
|
test/test_wavedrom.py
|
import subprocess
from utils import *
@all_files_in_dir('wavedrom_0')
def test_wavedrom_0(datafiles):
with datafiles.as_cwd():
subprocess.check_call(['python3', 'wavedrom-test.py'])
@all_files_in_dir('wavedrom_1')
def test_wavedrom_1(datafiles):
with datafiles.as_cwd():
for s in get_simulators():
subprocess.check_call(['runSVUnit', '-s', s, '-w'])
expect_testrunner_pass('run.log')
|
import subprocess
from utils import *
@all_files_in_dir('wavedrom_0')
def test_wavedrom_0(datafiles):
with datafiles.as_cwd():
subprocess.check_call(['python3', 'wavedrom-test.py'])
@all_files_in_dir('wavedrom_1')
@all_available_simulators()
def test_wavedrom_1(datafiles, simulator):
with datafiles.as_cwd():
subprocess.check_call(['runSVUnit', '-s', simulator, '-w'])
expect_testrunner_pass('run.log')
|
Update wavedrom tests to get simulators via fixture
|
Update wavedrom tests to get simulators via fixture
|
Python
|
apache-2.0
|
nosnhojn/svunit-code,svunit/svunit,nosnhojn/svunit-code,svunit/svunit,svunit/svunit,nosnhojn/svunit-code
|
9d041287e5e0d1950d5dcda23f6f68522d287282
|
tests/test_machine.py
|
tests/test_machine.py
|
import rml.machines
def test_machine_load_elements():
lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM')
assert len(lattice) == 173
for element in lattice.get_elements():
assert element.get_pv_name('readback')
|
import rml.machines
def test_machine_load_elements():
lattice = rml.machines.get_elements(machine='SRI21', elem_type='BPM')
assert len(lattice) == 173
for element in lattice.get_elements():
assert isinstance(element.get_pv_name('readback', 'x'), str)
assert isinstance(element.get_pv_name('readback', 'y'), str)
|
Test if pvs are loaded correctly from the database
|
Test if pvs are loaded correctly from the database
|
Python
|
apache-2.0
|
willrogers/pml,willrogers/pml,razvanvasile/RML
|
4a3df7842ab8f305ece134aa223801007d55c4f9
|
timm/utils/metrics.py
|
timm/utils/metrics.py
|
""" Eval metrics and related
Hacked together by / Copyright 2020 Ross Wightman
"""
class AverageMeter:
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
return [correct[:k].view(-1).float().sum(0) * 100. / batch_size for k in topk]
|
""" Eval metrics and related
Hacked together by / Copyright 2020 Ross Wightman
"""
class AverageMeter:
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.reshape(1, -1).expand_as(pred))
return [correct[:k].reshape(-1).float().sum(0) * 100. / batch_size for k in topk]
|
Fix topn metric view regression on PyTorch 1.7
|
Fix topn metric view regression on PyTorch 1.7
|
Python
|
apache-2.0
|
rwightman/pytorch-image-models,rwightman/pytorch-image-models
|
362c8dacda35bac24aa83e4fcaa2f6bac37150fd
|
tests/test_mw_util.py
|
tests/test_mw_util.py
|
"""Unit tests for cat2cohort."""
import unittest
from mw_util import str2cat
class TestMWutil(unittest.TestCase):
"""Test methods from mw_util."""
pass
|
"""Unit tests for cat2cohort."""
import unittest
from mw_util import str2cat
class TestMWutil(unittest.TestCase):
"""Test methods from mw_util."""
def test_str2cat(self):
"""Test str2cat."""
values = [
('A', 'Category:A'),
('Category:B', 'Category:B'),
]
for value, expected in values:
self.assertEqual(str2cat(value), expected)
|
Add unit test for str2cat method.
|
Add unit test for str2cat method.
|
Python
|
mit
|
Commonists/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics,danmichaelo/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics
|
ebf52caf6ee09ef1f15cb88815a1fb8008899c79
|
tests/test_reactjs.py
|
tests/test_reactjs.py
|
# -*- coding: utf-8 -*-
import dukpy
class TestReactJS(object):
def test_hello_world(self):
jsx = dukpy.jsx_compile('var react_hello = <h1>Hello, world!</h1>;')
jsi = dukpy.JSInterpreter()
result = jsi.evaljs([
'''
var React = require('react/react'),
ReactDOM = require('react/react-dom-server');
''',
jsx,
'ReactDOM.renderToStaticMarkup(react_hello, null);'
])
assert result == '<h1>Hello, world!</h1>'
|
# -*- coding: utf-8 -*-
import dukpy
class TestReactJS(object):
def test_hello_world(self):
jsx = dukpy.jsx_compile('var react_hello = <h1>Hello, world!</h1>;')
jsi = dukpy.JSInterpreter()
result = jsi.evaljs([
'''
var React = require('react/react'),
ReactDOM = require('react/react-dom-server');
''',
jsx,
'ReactDOM.renderToStaticMarkup(react_hello, null);'
])
assert result == '<h1>Hello, world!</h1>', res
def test_jsx_mixed(self):
code = '''
var React = require('react/react'),
ReactDOM = require('react/react-dom-server');
ReactDOM.renderToStaticMarkup(<h1>Hello, world!</h1>, null);
'''
jsx = dukpy.jsx_compile(code)
res = dukpy.evaljs(jsx)
assert res == '<h1>Hello, world!</h1>', res
def test_react_binding(self):
code = '''
var React = require('react/react'),
ReactDOM = require('react/react-dom-server');
var HelloWorld = React.createClass({
render: function() {
return (
<div className="helloworld">
Hello {this.props.data.name}
</div>
);
}
});
ReactDOM.renderToStaticMarkup(<HelloWorld data={dukpy.data}/>, null);
'''
jsx = dukpy.jsx_compile(code)
res = dukpy.evaljs(jsx, data={'id': 1, 'name': "Alessandro"})
assert res == '<div class="helloworld">Hello Alessandro</div>', res
|
Add tests for a React Component
|
Add tests for a React Component
|
Python
|
mit
|
amol-/dukpy,amol-/dukpy,amol-/dukpy
|
484e5693b2f3e0bc8c238cd64afeaad17bfa6673
|
skimage/viewer/qt/QtCore.py
|
skimage/viewer/qt/QtCore.py
|
from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
def pyqtSignal(*args, **kwargs):
pass
|
from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects for buildbot (which doesn't have Qt, but imports viewer).
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def pyqtSignal(*args, **kwargs):
pass
|
Add attributes to Mock object to fix Travis build
|
Add attributes to Mock object to fix Travis build
|
Python
|
bsd-3-clause
|
ajaybhat/scikit-image,chintak/scikit-image,warmspringwinds/scikit-image,vighneshbirodkar/scikit-image,SamHames/scikit-image,SamHames/scikit-image,SamHames/scikit-image,michaelpacer/scikit-image,jwiggins/scikit-image,rjeli/scikit-image,blink1073/scikit-image,almarklein/scikit-image,bsipocz/scikit-image,almarklein/scikit-image,paalge/scikit-image,robintw/scikit-image,vighneshbirodkar/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,chriscrosscutler/scikit-image,oew1v07/scikit-image,chintak/scikit-image,pratapvardhan/scikit-image,youprofit/scikit-image,paalge/scikit-image,emon10005/scikit-image,emon10005/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,ofgulban/scikit-image,ClinicalGraphics/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,chriscrosscutler/scikit-image,pratapvardhan/scikit-image,juliusbierk/scikit-image,ClinicalGraphics/scikit-image,GaZ3ll3/scikit-image,keflavich/scikit-image,blink1073/scikit-image,bsipocz/scikit-image,michaelaye/scikit-image,SamHames/scikit-image,almarklein/scikit-image,youprofit/scikit-image,WarrenWeckesser/scikits-image,dpshelio/scikit-image,newville/scikit-image,Hiyorimi/scikit-image,juliusbierk/scikit-image,paalge/scikit-image,oew1v07/scikit-image,ajaybhat/scikit-image,Midafi/scikit-image,michaelaye/scikit-image,chintak/scikit-image,robintw/scikit-image,michaelpacer/scikit-image,warmspringwinds/scikit-image,Britefury/scikit-image,newville/scikit-image,keflavich/scikit-image,bennlich/scikit-image,ofgulban/scikit-image,bennlich/scikit-image,rjeli/scikit-image,Britefury/scikit-image,Hiyorimi/scikit-image,dpshelio/scikit-image
|
2a32fc912a5839f627a216918e4671e6547ee53b
|
tests/utils/driver.py
|
tests/utils/driver.py
|
import os
from importlib import import_module
from .testdriver import TestDriver
class Driver(TestDriver):
drivers = {}
def __new__(cls, type, *args, **kwargs):
if type not in cls.drivers:
try:
mod = import_module('onitu.drivers.{}.tests.driver'.
format(type))
except ImportError:
raise KeyError("No such driver {}".format(repr(type)))
cls.drivers[type] = mod.Driver
return cls.drivers[type](*args, **kwargs)
class LocalStorageDriver(TestDriver):
def __new__(cls, *args, **kwargs):
return Driver('local_storage', *args, **kwargs)
class TargetDriver(Driver):
def __new__(cls, *args, **kwargs):
type = os.environ.get('ONITU_TEST_DRIVER', 'local_storage')
return Driver(type, *args, **kwargs)
|
import os
import pkg_resources
from .testdriver import TestDriver
class Driver(TestDriver):
drivers = {}
def __new__(cls, name, *args, **kwargs):
entry_points = pkg_resources.iter_entry_points('onitu.tests')
tests_modules = {e.name: e for e in entry_points}
if name not in tests_modules:
raise ImportError(
"Cannot import tests for driver {}".format(name)
)
try:
tests = tests_modules[name].load()
except ImportError as e:
raise ImportError(
"Error importing tests for driver {}: {}".format(name, e)
)
try:
driver = tests.Driver
except ImportError:
raise ImportError(
"Tests for driver {} don't expose a"
"Driver class".format(name)
)
cls.drivers[name] = driver
return driver(*args, **kwargs)
class LocalStorageDriver(TestDriver):
def __new__(cls, *args, **kwargs):
return Driver('local_storage', *args, **kwargs)
class TargetDriver(Driver):
def __new__(cls, *args, **kwargs):
type = os.environ.get('ONITU_TEST_DRIVER', 'local_storage')
return Driver(type, *args, **kwargs)
|
Load tests helpers using entry_points
|
Load tests helpers using entry_points
|
Python
|
mit
|
onitu/onitu,onitu/onitu,onitu/onitu
|
86f6191867141d7a7a165b227255d7b4406eb4f4
|
accounts/utils.py
|
accounts/utils.py
|
"""
Utility functions for the accounts app.
"""
from django.core.exceptions import ObjectDoesNotExist
def get_user_city(user):
"""Return the user's city. If unavailable, return an empty string."""
# If the profile is absent (i.e. superuser), return None.
try:
city = user.common_profile.city
except ObjectDoesNotExist:
city = ''
return city
def get_user_gender(user):
"""Return the user's city. If unavailable, return an empty string."""
# If either the profile (i.e. superuser) or the college
# (i.e. non-student) are absent, return an empty string.
try:
gender = user.common_profile.college.gender
except (ObjectDoesNotExist, AttributeError):
gender = ''
return gender
|
"""
Utility functions for the accounts app.
"""
from django.core.exceptions import ObjectDoesNotExist
def get_user_city(user):
"""Return the user's city. If unavailable, return an empty string."""
# If the profile is absent (i.e. superuser), return None.
try:
city = user.common_profile.city
except (ObjectDoesNotExist, AttributeError):
city = ''
return city
def get_user_gender(user):
"""Return the user's city. If unavailable, return an empty string."""
# If either the profile (i.e. superuser) or the college
# (i.e. non-student) are absent, return an empty string.
try:
gender = user.common_profile.college.gender
except (ObjectDoesNotExist, AttributeError):
gender = ''
return gender
|
Fix crash on non-logged in users.
|
Fix crash on non-logged in users.
|
Python
|
agpl-3.0
|
osamak/student-portal,osamak/student-portal,osamak/student-portal,osamak/student-portal,enjaz/enjaz,enjaz/enjaz,enjaz/enjaz,enjaz/enjaz,osamak/student-portal,enjaz/enjaz
|
6795e112e4f7037449850a361ab6b2f85fc2a66e
|
service/settings/staging.py
|
service/settings/staging.py
|
from service.settings.production import *
ALLOWED_HOSTS = [
'fantastic-doodle--staging.herokuapp.com',
]
|
from service.settings.production import *
ALLOWED_HOSTS = [
'fantastic-doodle--staging.herokuapp.com',
'.herokuapp.com',
]
|
Add .herokuapp.com to ALLOWED_HOSTS to support review apps
|
Add .herokuapp.com to ALLOWED_HOSTS to support review apps
|
Python
|
unlicense
|
Mystopia/fantastic-doodle
|
3800c095f58e9bc2ca8c580537ea576049bbfe2d
|
sell/urls.py
|
sell/urls.py
|
from django.conf.urls import url
from sell import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^personal/$', views.personal_data),
url(r'^books/$', views.books),
url(r'^summary/$', views.summary),
]
|
from django.conf.urls import url
from sell import views
urlpatterns = [
url(r'^$', views.index),
url(r'^personal/$', views.personal_data),
url(r'^books/$', views.books),
url(r'^summary/$', views.summary),
]
|
Remove unnecessary URL name in Sell app
|
Remove unnecessary URL name in Sell app
|
Python
|
agpl-3.0
|
m4tx/egielda,m4tx/egielda,m4tx/egielda
|
c47c043e76ac037456b8e966a5f9d60a151e3120
|
elodie/geolocation.py
|
elodie/geolocation.py
|
from os import path
from ConfigParser import ConfigParser
import requests
import sys
def reverse_lookup(lat, lon):
if(lat is None or lon is None):
return None
if not path.exists('./config.ini'):
return None
config = ConfigParser()
config.read('./config.ini')
if('MapQuest' not in config.sections()):
return None
key = config.get('MapQuest', 'key')
try:
r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon))
return r.json()
except requests.exceptions.RequestException as e:
print e
return None
except ValueError as e:
print r.text
print e
return None
def place_name(lat, lon):
geolocation_info = reverse_lookup(lat, lon)
if(geolocation_info is not None):
if('address' in geolocation_info):
address = geolocation_info['address']
if('city' in address):
return address['city']
elif('state' in address):
return address['state']
elif('country' in address):
return address['country']
return None
|
from os import path
from ConfigParser import ConfigParser
import requests
import sys
def reverse_lookup(lat, lon):
if(lat is None or lon is None):
return None
config_file = '%s/config.ini' % path.dirname(path.dirname(path.abspath(__file__)))
if not path.exists(config_file):
return None
config = ConfigParser()
config.read(config_file)
if('MapQuest' not in config.sections()):
return None
key = config.get('MapQuest', 'key')
try:
r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon))
return r.json()
except requests.exceptions.RequestException as e:
print e
return None
except ValueError as e:
print r.text
print e
return None
def place_name(lat, lon):
geolocation_info = reverse_lookup(lat, lon)
if(geolocation_info is not None):
if('address' in geolocation_info):
address = geolocation_info['address']
if('city' in address):
return address['city']
elif('state' in address):
return address['state']
elif('country' in address):
return address['country']
return None
|
Use absolute path for config file so it works with apps like Hazel
|
Use absolute path for config file so it works with apps like Hazel
|
Python
|
apache-2.0
|
zserg/elodie,zingo/elodie,jmathai/elodie,jmathai/elodie,zingo/elodie,zserg/elodie,zserg/elodie,jmathai/elodie,zserg/elodie,jmathai/elodie,zingo/elodie
|
4fd47cf73d59cb9e9d83cea12026878f65df858a
|
numscons/core/allow_undefined.py
|
numscons/core/allow_undefined.py
|
import os
from subprocess import Popen, PIPE
def get_darwin_version():
p = Popen(["sw_vers", "-productVersion"], stdout = PIPE, stderr = PIPE)
st = p.wait()
if st:
raise RuntimeError(
"Could not execute sw_vers -productVersion to get version")
verstring = p.stdout.next()
a, b, c = verstring.split(".")
try:
major = int(a)
minor = int(b)
micro = int(c)
return major, minor, micro
except ValueError:
raise ValueError("Could not parse version string %s" % verstring)
def get_darwin_allow_undefined():
"""Return the list of flags to allow undefined symbols in a shared library.
On MAC OS X, takes MACOSX_DEPLOYMENT_TARGET into account."""
major, minor, micro = get_darwin_version()
if major == 10:
if minor < 3:
flag = ["-Wl,-undefined", "-Wl,suppress"]
else:
try:
deptarget = os.environ['MACOSX_DEPLOYMENT_TARGET']
ma, mi = deptarget.split(".")
if mi < 3:
flag = ['-Wl,-flat_namespace', '-Wl,-undefined', '-Wl,suppress']
else:
flag = ['-Wl,-undefined', '-Wl,dynamic_lookup']
except KeyError:
flag = ['-Wl,-flat_namespace', '-Wl,-undefined', '-Wl,suppress']
else:
# Non existing mac os x ? Just set to empty list
flag = []
return flag
|
"""This module handle platform specific link options to allow undefined symbols
in shared libraries and dynamically loaded libraries."""
import os
from subprocess import Popen, PIPE
def get_darwin_version():
p = Popen(["sw_vers", "-productVersion"], stdout = PIPE, stderr = PIPE)
st = p.wait()
if st:
raise RuntimeError(
"Could not execute sw_vers -productVersion to get version")
verstring = p.stdout.next()
a, b, c = verstring.split(".")
try:
major = int(a)
minor = int(b)
micro = int(c)
return major, minor, micro
except ValueError:
raise ValueError("Could not parse version string %s" % verstring)
def get_darwin_allow_undefined():
"""Return the list of flags to allow undefined symbols in a shared library.
On MAC OS X, takes MACOSX_DEPLOYMENT_TARGET into account."""
major, minor, micro = get_darwin_version()
if major == 10:
if minor < 3:
flag = ["-Wl,-undefined", "-Wl,suppress"]
else:
try:
deptarget = os.environ['MACOSX_DEPLOYMENT_TARGET']
ma, mi = deptarget.split(".")
if mi < 3:
flag = ['-Wl,-flat_namespace', '-Wl,-undefined',
'-Wl,suppress']
else:
flag = ['-Wl,-undefined', '-Wl,dynamic_lookup']
except KeyError:
flag = ['-Wl,-flat_namespace', '-Wl,-undefined', '-Wl,suppress']
else:
# Non existing mac os x ? Just set to empty list
flag = []
return flag
|
Add docstring + fix missing import in allow_udnefined module.
|
Add docstring + fix missing import in allow_udnefined module.
|
Python
|
bsd-3-clause
|
cournape/numscons,cournape/numscons,cournape/numscons
|
4e0ec0fdf791fc9af1e83171b54054bd53d5536b
|
django_evolution/compat/apps.py
|
django_evolution/compat/apps.py
|
try:
from django.apps.registry import apps
get_apps = apps.get_apps
cache = None
except ImportError:
from django.db.models.loading import cache
get_apps = cache.get_apps
apps = None
def get_app(app_label, emptyOK=False):
"""Return the app with the given label.
This returns the app from the app registry on Django >= 1.7, and from
the old-style cache on Django < 1.7.
The ``emptyOK`` argument is ignored for Django >= 1.7.
"""
if apps:
return apps.get_app(app_label)
else:
return apps.get_app(app_label, emptyOK)
__all__ = ['get_app', 'get_apps']
|
try:
from django.apps.registry import apps
# Django >= 1.7
get_apps = apps.get_apps
cache = None
except ImportError:
from django.db.models.loading import cache
# Django < 1.7
get_apps = cache.get_apps
apps = None
def get_app(app_label, emptyOK=False):
"""Return the app with the given label.
This returns the app from the app registry on Django >= 1.7, and from
the old-style cache on Django < 1.7.
The ``emptyOK`` argument is ignored for Django >= 1.7.
"""
if apps:
return get_app(app_label)
else:
return get_app(app_label, emptyOK)
__all__ = ['get_app', 'get_apps']
|
Fix the new get_app compatibility function.
|
Fix the new get_app compatibility function.
The get_app compatibility function was trying to call get_apps() on
the apps variable, instead of calling the extracted version that was
pre-computed. Now it uses the correct versions.
|
Python
|
bsd-3-clause
|
beanbaginc/django-evolution
|
82ae5e5cf3da57af771aa688ec7d951879423578
|
big_o/test/test_complexities.py
|
big_o/test/test_complexities.py
|
import unittest
import numpy as np
from numpy.testing import assert_array_almost_equal
from big_o import complexities
class TestComplexities(unittest.TestCase):
def test_compute(self):
x = np.linspace(10, 100, 100)
y = 3.0 * x + 2.0
linear = complexities.Linear()
linear.fit(x, y)
assert_array_almost_equal(linear.compute(x), y, 10)
def test_not_fitted(self):
linear = complexities.Linear()
self.assertRaises(complexities.NotFittedError, linear.compute, 100)
def test_str_includes_units(self):
x = np.linspace(10, 100, 100)
y = 3.0 * x + 2.0
linear = complexities.Linear()
linear.fit(x, y)
linear_str = str(linear)
assert '(sec)' in linear_str
|
import unittest
import numpy as np
from numpy.testing import assert_array_almost_equal
from big_o import complexities
class TestComplexities(unittest.TestCase):
def test_compute(self):
desired = [
(lambda x: 2.+x*0., complexities.Constant),
(lambda x: 5.*x+3., complexities.Linear),
(lambda x: 8.1*x**2.+0.9, complexities.Quadratic),
(lambda x: 1.0*x**3+11.0, complexities.Cubic),
(lambda x: 5.2*x**2.5, complexities.Polynomial),
(lambda x: 8.5*np.log(x)+99.0, complexities.Logarithmic),
(lambda x: 1.7*x*np.log(x)+2.74, complexities.Linearithmic),
(lambda x: 3.14**x, complexities.Exponential)
]
x = np.linspace(10, 100, 100)
for f, class_ in desired:
y = f(x)
complexity = class_()
complexity.fit(x, y)
assert_array_almost_equal(complexity.compute(x), y, 10, "compute() failed to match expected values for class %r" % class_)
def test_not_fitted(self):
linear = complexities.Linear()
self.assertRaises(complexities.NotFittedError, linear.compute, 100)
def test_str_includes_units(self):
x = np.linspace(10, 100, 100)
y = 3.0 * x + 2.0
linear = complexities.Linear()
linear.fit(x, y)
linear_str = str(linear)
assert '(sec)' in linear_str
|
Add compute test cases for all complexity classes
|
Add compute test cases for all complexity classes
|
Python
|
bsd-3-clause
|
pberkes/big_O
|
219c474860ca7674070ef19fa95f0282b7c92399
|
mpages/admin.py
|
mpages/admin.py
|
from django.contrib import admin
from .models import Page, PageRead, Tag
class PageAdmin(admin.ModelAdmin):
search_fields = ["title"]
list_display = ["title", "parent", "updated"]
prepopulated_fields = {"slug": ("title",)}
readonly_fields = ["updated"]
ordering = ["parent", "title"]
filter_horizontal = ("tags",)
save_on_top = True
fieldsets = (
(
None,
{
"fields": (
("content",),
("title", "parent"),
("slug", "updated"),
("tags",),
)
},
),
)
admin.site.register(Page, PageAdmin)
admin.site.register(PageRead)
admin.site.register(Tag)
|
from django.contrib import admin
from .models import Page, PageRead, Tag
class PageAdmin(admin.ModelAdmin):
search_fields = ["title"]
list_display = ["title", "parent", "updated"]
prepopulated_fields = {"slug": ("title",)}
readonly_fields = ["updated"]
ordering = ["parent", "title"]
filter_horizontal = ("tags",)
save_on_top = True
fieldsets = (
(
None,
{
"fields": (
("content",),
("title", "parent"),
("slug", "updated"),
("tags",),
)
},
),
)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "parent":
kwargs["queryset"] = Page.objects.order_by("title")
return super(PageAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
admin.site.register(Page, PageAdmin)
admin.site.register(PageRead)
admin.site.register(Tag)
|
Order parents in Admin select field
|
Order parents in Admin select field
|
Python
|
bsd-3-clause
|
ahernp/DMCM,ahernp/DMCM,ahernp/DMCM
|
f76783ddb616c74e22feb003cb12952375cad658
|
corehq/apps/hqwebapp/encoders.py
|
corehq/apps/hqwebapp/encoders.py
|
import json
import datetime
from django.utils.encoding import force_unicode
from django.utils.functional import Promise
class LazyEncoder(json.JSONEncoder):
"""Taken from https://github.com/tomchristie/django-rest-framework/issues/87
This makes sure that ugettext_lazy refrences in a dict are properly evaluated
"""
def default(self, obj):
if isinstance(obj, Promise):
return force_unicode(obj)
return super(LazyEncoder, self).default(obj)
|
import json
import datetime
from decimal import Decimal
from django.utils.encoding import force_unicode
from django.utils.functional import Promise
class DecimalEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Decimal):
return str(obj)
return super(DecimalEncoder, self).default(obj)
class LazyEncoder(DecimalEncoder):
"""Taken from https://github.com/tomchristie/django-rest-framework/issues/87
This makes sure that ugettext_lazy refrences in a dict are properly evaluated
"""
def default(self, obj):
if isinstance(obj, Promise):
return force_unicode(obj)
return super(LazyEncoder, self).default(obj)
|
Fix for json encoding Decimal values
|
Fix for json encoding Decimal values
|
Python
|
bsd-3-clause
|
SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
991973e554758e7a9881453d7668925902e610b9
|
tests.py
|
tests.py
|
#!/usr/bin/env python
import unittest
import git_mnemonic as gm
class GitMnemonicTests(unittest.TestCase):
def test_encode(self):
self.assertTrue(gm.encode("master"))
def test_decode(self):
self.assertTrue(gm.decode("bis alo ama aha"))
def test_invertible(self):
once = gm.encode("master")
self.assertEquals(gm.encode(gm.decode(once)), once)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
#!/usr/bin/env python
import unittest
import git_mnemonic as gm
class GitMnemonicTests(unittest.TestCase):
def test_encode(self):
self.assertTrue(gm.encode("master"))
def test_decode(self):
self.assertTrue(gm.decode("bis alo ama aha"))
def test_invertible(self):
once = gm.encode("master")
self.assertEquals(gm.encode(gm.decode(once)), once)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(GitMnemonicTests)
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
import sys
sys.exit(1)
|
Make unittest test runner work in older pythons
|
Make unittest test runner work in older pythons
|
Python
|
mit
|
glenjamin/git-mnemonic
|
cb08d25f49b8b4c5177c8afdd9a69330992ee854
|
tests/replay/test_replay.py
|
tests/replay/test_replay.py
|
# -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
import pytest
from cookiecutter import replay, main, exceptions
def test_get_replay_file_name():
"""Make sure that replay.get_file_name generates a valid json file path."""
assert replay.get_file_name('foo', 'bar') == 'foo/bar.json'
@pytest.fixture(params=[
{'no_input': True},
{'extra_context': {}},
{'no_input': True, 'extra_context': {}},
])
def invalid_kwargs(request):
return request.param
def test_raise_on_invalid_mode(invalid_kwargs):
with pytest.raises(exceptions.InvalidModeException):
main.cookiecutter('foo', replay=True, **invalid_kwargs)
|
# -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
import pytest
from cookiecutter import replay, main, exceptions
def test_get_replay_file_name():
"""Make sure that replay.get_file_name generates a valid json file path."""
assert replay.get_file_name('foo', 'bar') == 'foo/bar.json'
@pytest.fixture(params=[
{'no_input': True},
{'extra_context': {}},
{'no_input': True, 'extra_context': {}},
])
def invalid_kwargs(request):
return request.param
def test_raise_on_invalid_mode(invalid_kwargs):
with pytest.raises(exceptions.InvalidModeException):
main.cookiecutter('foo', replay=True, **invalid_kwargs)
def test_main_does_not_invoke_dump_but_load(mocker):
mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config')
mock_gen_context = mocker.patch('cookiecutter.main.generate_context')
mock_gen_files = mocker.patch('cookiecutter.main.generate_files')
mock_replay_dump = mocker.patch('cookiecutter.main.dump')
mock_replay_load = mocker.patch('cookiecutter.main.load')
main.cookiecutter('foobar', replay=True)
assert not mock_prompt.called
assert not mock_gen_context.called
assert not mock_replay_dump.called
assert mock_replay_load.called
assert mock_gen_files.called
def test_main_does_not_invoke_load_but_dump(mocker):
mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config')
mock_gen_context = mocker.patch('cookiecutter.main.generate_context')
mock_gen_files = mocker.patch('cookiecutter.main.generate_files')
mock_replay_dump = mocker.patch('cookiecutter.main.dump')
mock_replay_load = mocker.patch('cookiecutter.main.load')
main.cookiecutter('foobar', replay=False)
assert mock_prompt.called
assert mock_gen_context.called
assert mock_replay_dump.called
assert not mock_replay_load.called
assert mock_gen_files.called
|
Add tests for a correct behaviour in cookiecutter.main for replay
|
Add tests for a correct behaviour in cookiecutter.main for replay
|
Python
|
bsd-3-clause
|
christabor/cookiecutter,luzfcb/cookiecutter,hackebrot/cookiecutter,cguardia/cookiecutter,pjbull/cookiecutter,dajose/cookiecutter,michaeljoseph/cookiecutter,moi65/cookiecutter,terryjbates/cookiecutter,takeflight/cookiecutter,terryjbates/cookiecutter,luzfcb/cookiecutter,agconti/cookiecutter,cguardia/cookiecutter,christabor/cookiecutter,audreyr/cookiecutter,stevepiercy/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,stevepiercy/cookiecutter,takeflight/cookiecutter,pjbull/cookiecutter,benthomasson/cookiecutter,agconti/cookiecutter,benthomasson/cookiecutter,Springerle/cookiecutter,ramiroluz/cookiecutter,audreyr/cookiecutter,moi65/cookiecutter,dajose/cookiecutter,hackebrot/cookiecutter,michaeljoseph/cookiecutter,Springerle/cookiecutter,ramiroluz/cookiecutter,venumech/cookiecutter,willingc/cookiecutter
|
9547988a1a9ef8faf22d9bfa881f4e542637fd46
|
utils.py
|
utils.py
|
import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
establish_connection()
|
import xmlrpclib
import cPickle
import subprocess
from time import sleep
p = None
s = None
def start_plot_server():
global p
if p is None:
p = subprocess.Popen(["python", "plot_server.py"])
def stop_plot_server():
if p is not None:
p.terminate()
sleep(0.01)
p.kill()
def plot_server_alive():
global s
try:
s.alive()
except Exception, e:
if str(e).endswith("Connection refused"):
return False
else:
raise
return True
def establish_connection():
global s
if s is not None:
return
s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True)
if not plot_server_alive():
start_plot_server()
print "waiting for the plot server to start up..."
while not plot_server_alive():
sleep(0.05)
print " done."
def plot(vert, triangles):
establish_connection()
print "plotting using mayavi..."
v = cPickle.dumps(vert)
t = cPickle.dumps(triangles)
s.plot(v, t)
print " done."
|
Establish connection only when needed
|
Establish connection only when needed
|
Python
|
bsd-3-clause
|
certik/mhd-hermes,certik/mhd-hermes
|
f3b9cc6392e4c271ae11417357ecdc196f1c3ae7
|
python_scripts/extractor_python_readability_server.py
|
python_scripts/extractor_python_readability_server.py
|
#!/usr/bin/python
import sys
import os
import glob
#sys.path.append(os.path.join(os.path.dirname(__file__), "gen-py"))
sys.path.append(os.path.join(os.path.dirname(__file__),"gen-py/thrift_solr/"))
sys.path.append(os.path.dirname(__file__) )
from thrift.transport import TSocket
from thrift.server import TServer
#import thrift_solr
import ExtractorService
import sys
import readability
import readability
def extract_with_python_readability( raw_content ):
doc = readability.Document( raw_content )
return [ u'' + doc.short_title(),
u'' + doc.summary() ]
class ExtractorHandler:
def extract_html( self, raw_html ):
#print raw_html
#raw_html = raw_html.encode( 'utf-8' )
ret = extract_with_python_readability( raw_html )
#print ret[1]
return ret
handler = ExtractorHandler()
processor = ExtractorService.Processor(handler)
listening_socket = TSocket.TServerSocket(port=9090)
server = TServer.TThreadPoolServer(processor, listening_socket)
print ("[Server] Started")
server.serve()
|
#!/usr/bin/python
import sys
import os
import glob
#sys.path.append(os.path.join(os.path.dirname(__file__), "gen-py"))
sys.path.append(os.path.join(os.path.dirname(__file__),"gen-py/thrift_solr/"))
sys.path.append(os.path.dirname(__file__) )
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
from thrift.protocol.TBinaryProtocol import TBinaryProtocolAccelerated
#import thrift_solr
import ExtractorService
import sys
import readability
import readability
def extract_with_python_readability( raw_content ):
doc = readability.Document( raw_content )
return [ u'' + doc.short_title(),
u'' + doc.summary() ]
class ExtractorHandler:
def extract_html( self, raw_html ):
#print raw_html
#raw_html = raw_html.encode( 'utf-8' )
ret = extract_with_python_readability( raw_html )
#print ret[1]
return ret
handler = ExtractorHandler()
processor = ExtractorService.Processor(handler)
listening_socket = TSocket.TServerSocket(port=9090)
tfactory = TTransport.TBufferedTransportFactory()
#pfactory = TBinaryProtocol.TBinaryProtocolFactory()
pfactory = TBinaryProtocol.TBinaryProtocolAcceleratedFactory()
server = TServer.TThreadPoolServer(processor, listening_socket, tfactory, pfactory)
print ("[Server] Started")
server.serve()
|
Use the TBinaryProtocolAccelerated protocol instead of TBinaryProtocol to improve performance.
|
Use the TBinaryProtocolAccelerated protocol instead of TBinaryProtocol to improve performance.
|
Python
|
agpl-3.0
|
AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud
|
3ce1b928f36c314ab07c334843b2db96626f469e
|
kyokai/asphalt.py
|
kyokai/asphalt.py
|
"""
Asphalt framework mixin for Kyokai.
"""
import logging
import asyncio
from functools import partial
from typing import Union
from asphalt.core import Component, resolve_reference, Context
from typeguard import check_argument_types
from kyokai.app import Kyokai
from kyokai.protocol import KyokaiProtocol
from kyokai.context import HTTPRequestContext
logger = logging.getLogger("Kyokai")
class KyoukaiComponent(Component):
def __init__(self, app: Union[str, Kyokai], ip: str = '0.0.0.0', port: int = 4444, **cfg):
assert check_argument_types()
if not isinstance(app, Kyokai):
self.app = resolve_reference(app)
else:
self.app = app
self.ip = ip
self.port = port
self._extra_cfg = cfg
# Set HTTPRequestContext's `cfg` val to the extra config.
HTTPRequestContext.cfg = self._extra_cfg
self.server = None
self.app.reconfigure(cfg)
def get_protocol(self, ctx: Context):
return KyokaiProtocol(self.app, ctx)
async def start(self, ctx: Context):
"""
Starts a Kyokai server.
"""
protocol = self.get_protocol(ctx)
self.server = await asyncio.get_event_loop().create_server(protocol, self.ip, self.port)
logger.info("Kyokai serving on {}:{}.".format(self.ip, self.port))
|
"""
Asphalt framework mixin for Kyokai.
"""
import logging
import asyncio
from functools import partial
from typing import Union
from asphalt.core import Component, resolve_reference, Context
from typeguard import check_argument_types
from kyokai.app import Kyokai
from kyokai.protocol import KyokaiProtocol
from kyokai.context import HTTPRequestContext
logger = logging.getLogger("Kyokai")
class KyoukaiComponent(Component):
def __init__(self, app: Union[str, Kyokai], ip: str = '0.0.0.0', port: int = 4444, **cfg):
assert check_argument_types()
if not isinstance(app, Kyokai):
self.app = resolve_reference(app)
else:
self.app = app
self.ip = ip
self.port = port
self._extra_cfg = cfg
# Set HTTPRequestContext's `cfg` val to the extra config.
HTTPRequestContext.cfg = self._extra_cfg
self.server = None
self.app.reconfigure(cfg)
def get_protocol(self, ctx: Context):
return KyokaiProtocol(self.app, ctx)
async def start(self, ctx: Context):
"""
Starts a Kyokai server.
"""
protocol = partial(self.get_protocol, ctx)
self.server = await asyncio.get_event_loop().create_server(protocol, self.ip, self.port)
logger.info("Kyokai serving on {}:{}.".format(self.ip, self.port))
|
Make this into a partial to get the protocol correctly.
|
Make this into a partial to get the protocol correctly.
|
Python
|
mit
|
SunDwarf/Kyoukai
|
b352c3e1f5e8812d29f2e8a1bca807bea5da8cc4
|
test/test_hx_launcher.py
|
test/test_hx_launcher.py
|
import pytest_twisted
from hendrix.ux import main
from hendrix.options import HendrixOptionParser
def test_no_arguments_gives_help_text(mocker):
class MockFile(object):
@classmethod
def write(cls, whatever):
cls.things_written = whatever
class MockStdOut(object):
@classmethod
def write(cls, whatever):
HendrixOptionParser.print_help(MockFile)
assert MockFile.things_written == whatever
mocker.patch('sys.stdout', new=MockStdOut)
main([])
|
from hendrix.options import HendrixOptionParser
from hendrix.ux import main
def test_no_arguments_gives_help_text(mocker):
class MockFile(object):
@classmethod
def write(cls, whatever):
cls.things_written = whatever
class MockStdOut(object):
@classmethod
def write(cls, whatever):
HendrixOptionParser.print_help(MockFile)
assert MockFile.things_written == whatever
mocker.patch('sys.stdout', new=MockStdOut)
main([])
|
Test for the hx launcher.
|
Test for the hx launcher.
|
Python
|
mit
|
hangarunderground/hendrix,hendrix/hendrix,hangarunderground/hendrix,hendrix/hendrix,jMyles/hendrix,hendrix/hendrix,jMyles/hendrix,hangarunderground/hendrix,hangarunderground/hendrix,jMyles/hendrix
|
ad21c9255f6246944cd032ad50082c0aca46fcb3
|
neurokernel/tools/mpi.py
|
neurokernel/tools/mpi.py
|
#!/usr/bin/env python
"""
MPI utilities.
"""
from mpi4py import MPI
import twiggy
class MPIOutput(twiggy.outputs.Output):
"""
Output messages to a file via MPI I/O.
"""
def __init__(self, name, format, comm,
mode=MPI.MODE_CREATE | MPI.MODE_WRONLY,
close_atexit=True):
self.filename = name
self._format = format if format is not None else self._noop_format
self.comm = comm
self.mode = mode
super(MPIOutput, self).__init__(format, close_atexit)
def _open(self):
self.file = MPI.File.Open(self.comm, self.filename,
self.mode)
def _close(self):
self.file.Close()
def _write(self, x):
self.file.Iwrite_shared(x)
|
#!/usr/bin/env python
"""
MPI utilities.
"""
from mpi4py import MPI
import twiggy
class MPIOutput(twiggy.outputs.Output):
"""
Output messages to a file via MPI I/O.
"""
def __init__(self, name, format, comm,
mode=MPI.MODE_CREATE | MPI.MODE_WRONLY,
close_atexit=True):
self.filename = name
self._format = format if format is not None else self._noop_format
self.comm = comm
self.mode = mode
super(MPIOutput, self).__init__(format, close_atexit)
def _open(self):
self.file = MPI.File.Open(self.comm, self.filename,
self.mode)
def _close(self):
self.file.Close()
def _write(self, x):
self.file.Iwrite_shared(x)
# This seems to be necessary to prevent some log lines from being lost:
self.file.Sync()
|
Call MPIOutput.file.Sync() in MPIOutput.file._write() to prevent log lines from intermittently being lost.
|
Call MPIOutput.file.Sync() in MPIOutput.file._write() to prevent log lines from intermittently being lost.
|
Python
|
bsd-3-clause
|
cerrno/neurokernel
|
4485b65722645d6c9617b5ff4aea6d62ee8a9adf
|
bumblebee_status/modules/contrib/optman.py
|
bumblebee_status/modules/contrib/optman.py
|
"""Displays currently active gpu by optimus-manager
Requires the following packages:
* optimus-manager
"""
import subprocess
import core.module
import core.widget
class Module(core.module.Module):
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.output))
self.__gpumode = ""
def output(self, _):
return "GPU: {}".format(self.__gpumode)
def update(self):
cmd = ["optimus-manager", "--print-mode"]
output = (
subprocess.Popen(cmd, stdout=subprocess.PIPE)
.communicate()[0]
.decode("utf-8")
.lower()
)
if "intel" in output:
self.__gpumode = "Intel"
elif "nvidia" in output:
self.__gpumode = "Nvidia"
elif "amd" in output:
self.__gpumode = "AMD"
|
"""Displays currently active gpu by optimus-manager
Requires the following packages:
* optimus-manager
"""
import core.module
import core.widget
import util.cli
class Module(core.module.Module):
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.output))
self.__gpumode = ""
def output(self, _):
return "GPU: {}".format(self.__gpumode)
def update(self):
cmd = "optimus-manager --print-mode"
output = util.cli.execute(cmd).strip()
if "intel" in output:
self.__gpumode = "Intel"
elif "nvidia" in output:
self.__gpumode = "Nvidia"
elif "amd" in output:
self.__gpumode = "AMD"
|
Use the existing util.cli module
|
Use the existing util.cli module
|
Python
|
mit
|
tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status
|
3307bfb7075a527dc7805da2ff735f461f5fc02f
|
employees/models.py
|
employees/models.py
|
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Role(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(max_length=100)
weight = models.PositiveSmallIntegerField(default=1)
def __str__(self):
return self.name
class Meta:
verbose_name_plural = "categories"
ordering = ['weight']
class Employee(AbstractUser):
role = models.ForeignKey(Role, null=True, blank=True)
skype_id = models.CharField(max_length=200, null=True, blank=True)
last_month_score = models.PositiveIntegerField(default=0)
current_month_score = models.PositiveIntegerField(default=0)
level = models.PositiveIntegerField(default=0)
total_score = models.PositiveIntegerField(default=0)
avatar = models.ImageField(upload_to='avatar', null=True, blank=True)
categories = models.ManyToManyField(Category)
|
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Role(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(max_length=100)
weight = models.PositiveSmallIntegerField(default=1)
def __str__(self):
return self.name
class Meta:
verbose_name_plural = "categories"
ordering = ['weight']
class Employee(AbstractUser):
role = models.ForeignKey(Role, null=True, blank=True)
skype_id = models.CharField(max_length=200, null=True, blank=True)
last_month_score = models.PositiveIntegerField(default=0)
current_month_score = models.PositiveIntegerField(default=0)
level = models.PositiveIntegerField(default=0)
total_score = models.PositiveIntegerField(default=0)
avatar = models.ImageField(upload_to='avatar', null=True, blank=True)
categories = models.ManyToManyField(Category, blank=True)
|
Change categories field to non required.
|
Change categories field to non required.
|
Python
|
mit
|
neosergio/allstars
|
b8bc10e151f12e2bfe2c03765a410a04325a3233
|
satchmo/product/templatetags/satchmo_product.py
|
satchmo/product/templatetags/satchmo_product.py
|
from django import template
from django.conf import settings
from django.core import urlresolvers
from django.template import Context, Template
from django.utils.translation import get_language, ugettext_lazy as _
from satchmo.configuration import config_value
from satchmo.product.models import Category
from satchmo.shop.templatetags import get_filter_args
register = template.Library()
def is_producttype(product, ptype):
"""Returns True if product is ptype"""
if ptype in product.get_subtypes():
return "true"
else:
return ""
register.filter('is_producttype', is_producttype)
def product_images(product, args=""):
args, kwargs = get_filter_args(args,
keywords=('include_main', 'maximum'),
boolargs=('include_main'),
intargs=('maximum'),
stripquotes=True)
q = product.productimage_set
if kwargs.get('include_main', True):
q = q.all()
else:
main = product.main_image
q = q.exclude(id = main.id)
maximum = kwargs.get('maximum', -1)
if maximum>-1:
q = list(q)[:maximum]
return q
register.filter('product_images', product_images)
def smart_attr(product, key):
"""Run the smart_attr function on the spec'd product
"""
return product.smart_attr(key)
register.filter('smart_attr', smart_attr)
|
from django import template
from django.conf import settings
from django.core import urlresolvers
from django.template import Context, Template
from django.utils.translation import get_language, ugettext_lazy as _
from satchmo.configuration import config_value
from satchmo.product.models import Category
from satchmo.shop.templatetags import get_filter_args
register = template.Library()
def is_producttype(product, ptype):
"""Returns True if product is ptype"""
if ptype in product.get_subtypes():
return True
else:
return False
register.filter('is_producttype', is_producttype)
def product_images(product, args=""):
args, kwargs = get_filter_args(args,
keywords=('include_main', 'maximum'),
boolargs=('include_main'),
intargs=('maximum'),
stripquotes=True)
q = product.productimage_set
if kwargs.get('include_main', True):
q = q.all()
else:
main = product.main_image
q = q.exclude(id = main.id)
maximum = kwargs.get('maximum', -1)
if maximum>-1:
q = list(q)[:maximum]
return q
register.filter('product_images', product_images)
def smart_attr(product, key):
"""Run the smart_attr function on the spec'd product
"""
return product.smart_attr(key)
register.filter('smart_attr', smart_attr)
|
Change the is_producttype template tag to return a boolean rather than a string.
|
Change the is_producttype template tag to return a boolean rather than a string.
--HG--
extra : convert_revision : svn%3Aa38d40e9-c014-0410-b785-c606c0c8e7de/satchmo/trunk%401200
|
Python
|
bsd-3-clause
|
Ryati/satchmo,ringemup/satchmo,Ryati/satchmo,dokterbob/satchmo,twidi/satchmo,twidi/satchmo,dokterbob/satchmo,ringemup/satchmo
|
b4247769fcaa67d09e0f38d1283cf4f28ddc350e
|
cookiecutter/extensions.py
|
cookiecutter/extensions.py
|
# -*- coding: utf-8 -*-
"""Jinja2 extensions."""
import json
from jinja2.ext import Extension
class JsonifyExtension(Extension):
"""Jinja2 extension to convert a python object to json."""
def __init__(self, environment):
"""Initilize extension with given environment."""
super(JsonifyExtension, self).__init__(environment)
def jsonify(obj):
return json.dumps(obj, sort_keys=True, indent=4)
environment.filters['jsonify'] = jsonify
|
# -*- coding: utf-8 -*-
"""Jinja2 extensions."""
import json
from jinja2.ext import Extension
class JsonifyExtension(Extension):
"""Jinja2 extension to convert a Python object to JSON."""
def __init__(self, environment):
"""Initialize the extension with the given environment."""
super(JsonifyExtension, self).__init__(environment)
def jsonify(obj):
return json.dumps(obj, sort_keys=True, indent=4)
environment.filters['jsonify'] = jsonify
|
Fix typo and improve grammar in doc string
|
Fix typo and improve grammar in doc string
|
Python
|
bsd-3-clause
|
michaeljoseph/cookiecutter,dajose/cookiecutter,audreyr/cookiecutter,hackebrot/cookiecutter,audreyr/cookiecutter,hackebrot/cookiecutter,luzfcb/cookiecutter,pjbull/cookiecutter,dajose/cookiecutter,pjbull/cookiecutter,luzfcb/cookiecutter,michaeljoseph/cookiecutter
|
42ec5ed6d56fcc59c99d175e1c9280d00cd3bef1
|
tests/test_published_results.py
|
tests/test_published_results.py
|
""" To test if the new code produces the same precision values on the published results."""
from __future__ import division, print_function
import pytest
import numpy as np
import eniric.Qcalculator as Q
import eniric.IOmodule as IO
from bin.prec_1 import calc_prec1
# For python2.X compatibility
file_error_to_catch = getattr(__builtins__, 'FileNotFoundError', IOError)
path = "data/Published_Results/resampled/"
@pytest.mark.xfail(raises=file_error_to_catch) # Data file may not exist
def test_presicion_1():
""" New precision 1 test that works."""
published_results = {1: 3.8, 5: 9.1, 10: 20.7}
path = "data/resampled/"
for vsini in [1, 5, 10]:
# name = "Spectrum_M0-PHOENIX-ACES_Yband_vsini{0}.0_R100k_res3.txt".format(vsini)
__, p1 = calc_prec1("M0", "Y", vsini, "100k", 3, resampled_dir=path)
assert np.round(p1, 1).value == published_results[vsini]
|
""" To test if the new code produces the same precision values on the published results."""
from __future__ import division, print_function
import pytest
import numpy as np
import eniric.Qcalculator as Q
import eniric.IOmodule as IO
from bin.prec_1 import calc_prec1
# For python2.X compatibility
file_error_to_catch = getattr(__builtins__, 'FileNotFoundError', IOError)
path = "data/Published_Results/resampled/"
@pytest.mark.xfail(raises=file_error_to_catch) # Data file may not exist
def test_presicion_1():
""" New precision 1 test that works."""
published_results = {1: 3.8, 5: 9.1, 10: 20.7}
path = "data/resampled/"
for vsini in [1, 5, 10]:
# name = "Spectrum_M0-PHOENIX-ACES_Yband_vsini{0}.0_R100k_res3.txt".format(vsini)
__, p1 = calc_prec1("M0", "Y", vsini, "100k", 3, resampled_dir=path)
# assert np.round(p1, 1).value == published_results[vsini]
assert np.round(100 * p1, 1).value == published_results[vsini] # With incorect normalization
|
Add known offset for known bad calibration.
|
Add known offset for known bad calibration.
Former-commit-id: afa3d6a66e32bbcc2b20f00f7e63fba5cb45882e [formerly 0470ca22b8a24205d2eb1c66caee912c990da0b3] [formerly c23210f4056c27e61708da2f2440bce3eda151a8 [formerly 5c0a6b9c0fefd2b88b9382d4a6ed98d9eac626df]]
Former-commit-id: 8bfdaa1f7940b26aee05f20e801616f4a8d1d55d [formerly 1c85db5b2b87b73dfb28a1db171ff79a69e3a24a]
Former-commit-id: d02a26b263c5c59776a35fc130e5c96b7ac30f5d
|
Python
|
mit
|
jason-neal/eniric,jason-neal/eniric
|
f3df3b2b8e1167e953457a85f2297d28b6a39729
|
examples/Micro.Blog/microblog.py
|
examples/Micro.Blog/microblog.py
|
from getpass import getpass
from bessie import BaseClient
import config
class MicroBlogApi(BaseClient):
endpoints = config.available_endpoints
separator = '/'
base_url='https://micro.blog'
def __init__(self, path='', token=''):
self.token = token
super(self.__class__, self).__init__(path, token=token)
# override method from BaseClient to inject Authorization header
def _prepare_request(self):
super(self.__class__, self)._prepare_request()
self.request.headers['Authorization'] = 'Token {}'.format(self.token)
if __name__ == '__main__':
token = getpass('Token... ')
mba = MicroBlogApi(token=token)
# GET - https://micro.blog/posts/all
posts = mba.posts.all.get()
print(posts.status_code, posts.reason)
print(posts.json())
|
from getpass import getpass
from bessie import BaseClient
import config
class MicroBlogApi(BaseClient):
endpoints = config.available_endpoints
separator = '/'
base_url='https://micro.blog'
def __init__(self, path='', path_params=None, token=''):
self.token = token
super(self.__class__, self).__init__(path, path_params, token=token)
# override method from BaseClient to inject Authorization header
def _prepare_request(self):
super(self.__class__, self)._prepare_request()
self.request.headers['Authorization'] = 'Token {}'.format(self.token)
if __name__ == '__main__':
token = getpass('Token... ')
mba = MicroBlogApi(token=token)
# GET - https://micro.blog/posts/all
posts = mba.posts.all.get()
print(posts.status_code, posts.reason)
print(posts.json())
|
Include path_params in override constructor
|
Include path_params in override constructor
|
Python
|
mit
|
andymitchhank/bessie
|
c9980756dcee82cc570208e73ec1a2112aea0155
|
tvtk/tests/test_scene.py
|
tvtk/tests/test_scene.py
|
""" Tests for the garbage collection of Scene objects.
"""
# Authors: Deepak Surti, Ioannis Tziakos
# Copyright (c) 2015, Enthought, Inc.
# License: BSD Style.
import unittest
import weakref
import gc
from traits.etsconfig.api import ETSConfig
from tvtk.pyface.scene import Scene
from tvtk.tests.common import restore_gc_state
class TestScene(unittest.TestCase):
@unittest.skipIf(
ETSConfig.toolkit=='wx', 'Test segfaults using WX (issue #216)')
def test_scene_garbage_collected(self):
# given
scene_collected = []
scene_weakref = None
def scene_collected_callback(weakref):
scene_collected.append(True)
def do():
scene = Scene()
reference = weakref.ref(scene, scene_collected_callback)
scene.close()
return reference
# when
with restore_gc_state():
gc.disable()
scene_weakref = do()
# The Scene should have been collected.
self.assertTrue(scene_collected[0])
if __name__ == "__main__":
unittest.main()
|
""" Tests for the garbage collection of Scene objects.
"""
# Authors: Deepak Surti, Ioannis Tziakos
# Copyright (c) 2015, Enthought, Inc.
# License: BSD Style.
import unittest
import weakref
import gc
from traits.etsconfig.api import ETSConfig
from tvtk.pyface.scene import Scene
from tvtk.tests.common import restore_gc_state
class TestScene(unittest.TestCase):
@unittest.skipIf(
ETSConfig.toolkit=='wx', 'Test segfaults using WX (issue #216)')
def test_scene_garbage_collected(self):
# given
scene_collected = []
scene_weakref = None
def scene_collected_callback(weakref):
scene_collected.append(True)
def do():
scene = Scene()
reference = weakref.ref(scene, scene_collected_callback)
scene.close()
return reference
# when
with restore_gc_state():
gc.disable()
scene_weakref = do()
# The Scene should have been collected.
self.assertTrue(scene_collected[0])
self.assertIsNone(scene_weakref())
if __name__ == "__main__":
unittest.main()
|
Add weakref assertion in test case
|
Add weakref assertion in test case
|
Python
|
bsd-3-clause
|
alexandreleroux/mayavi,dmsurti/mayavi,dmsurti/mayavi,alexandreleroux/mayavi,liulion/mayavi,liulion/mayavi
|
74b2883c3371304e8f5ea95b0454fb006d85ba3d
|
mapentity/urls.py
|
mapentity/urls.py
|
from django.conf import settings
from django.conf.urls import patterns, url
from . import app_settings
from .views import (map_screenshot, convert, history_delete,
serve_secure_media, JSSettings)
_MEDIA_URL = settings.MEDIA_URL.replace(app_settings['ROOT_URL'], '')[1:]
urlpatterns = patterns(
'',
url(r'^%s(?P<path>.*?)$' % _MEDIA_URL, serve_secure_media),
url(r'^map_screenshot/$', map_screenshot, name='map_screenshot'),
url(r'^convert/$', convert, name='convert'),
url(r'^history/delete/$', history_delete, name='history_delete'),
# See default value in app_settings.JS_SETTINGS.
# Will be overriden, most probably.
url(r'^api/settings.json$', JSSettings.as_view(), name='js_settings'),
)
|
from django.conf import settings
from django.conf.urls import patterns, url
from . import app_settings
from .views import (map_screenshot, convert, history_delete,
serve_secure_media, JSSettings)
_MEDIA_URL = settings.MEDIA_URL.replace(app_settings['ROOT_URL'], '')
if _MEDIA_URL.startswith('/'):
_MEDIA_URL = _MEDIA_URL[1:]
if _MEDIA_URL.endswith('/'):
_MEDIA_URL = _MEDIA_URL[:-1]
urlpatterns = patterns(
'',
url(r'^%s(?P<path>.*?)$' % _MEDIA_URL, serve_secure_media),
url(r'^map_screenshot/$', map_screenshot, name='map_screenshot'),
url(r'^convert/$', convert, name='convert'),
url(r'^history/delete/$', history_delete, name='history_delete'),
# See default value in app_settings.JS_SETTINGS.
# Will be overriden, most probably.
url(r'^api/settings.json$', JSSettings.as_view(), name='js_settings'),
)
|
Remove leading and trailing slash of MEDIA_URL
|
Remove leading and trailing slash of MEDIA_URL
Conflicts:
mapentity/static/mapentity/Leaflet.label
|
Python
|
bsd-3-clause
|
Anaethelion/django-mapentity,Anaethelion/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,Anaethelion/django-mapentity,makinacorpus/django-mapentity
|
6953b831c3c48a3512a86ca9e7e92edbf7a62f08
|
tests/integration/test_sqs.py
|
tests/integration/test_sqs.py
|
import os
from asyncaws import SQS
from tornado.testing import AsyncTestCase, gen_test
aws_key_id = os.environ['AWS_ACCESS_KEY_ID']
aws_key_secret = os.environ['AWS_SECRET_ACCESS_KEY']
aws_region = os.environ['AWS_REGION']
class TestSQS(AsyncTestCase):
sqs = SQS(aws_key_id, aws_key_secret, aws_region, async=False)
@gen_test(timeout=60)
def test_create_queue(self):
queue_url = self.sqs.create_queue(
"test-queue", {"MessageRetentionPeriod": 60})
self.assertIsInstance(queue_url, str)
self.assertTrue(queue_url.startswith('http'))
get_attr_result = self.sqs.get_queue_attributes(
queue_url, ['MessageRetentionPeriod'])
self.assertIsInstance(get_attr_result, dict)
self.assertEqual(get_attr_result['MessageRetentionPeriod'], '60')
add_perm_result = self.sqs.add_permission(
queue_url, ['637085312181'], ["SendMessage"], "test-permission-id")
self.assertIsInstance(add_perm_result, str)
delete_result = self.sqs.delete_queue(queue_url)
self.assertIsInstance(delete_result, str)
|
import os
from asyncaws import SQS
from tornado.testing import AsyncTestCase, gen_test
from random import randint
aws_key_id = os.environ['AWS_ACCESS_KEY_ID']
aws_key_secret = os.environ['AWS_SECRET_ACCESS_KEY']
aws_region = os.environ['AWS_REGION']
aws_test_account_id = "637085312181"
class TestSQS(AsyncTestCase):
@classmethod
def setUpClass(cls):
cls.sqs = SQS(aws_key_id, aws_key_secret, aws_region, async=False)
cls.queue_name = "test-queue-%s" % randint(1000, 9999)
cls.queue_url = cls.sqs.create_queue(
cls.queue_name, {"MessageRetentionPeriod": 60})
@classmethod
def tearDownClass(cls):
cls.sqs.delete_queue(cls.queue_url)
@gen_test
def test_queue_actions(self):
self.assertTrue(self.queue_url.startswith('http'))
get_attr_result = self.sqs.get_queue_attributes(
self.queue_url, ['MessageRetentionPeriod'])
self.assertIsInstance(get_attr_result, dict)
self.assertEqual(get_attr_result['MessageRetentionPeriod'], '60')
add_perm_result = self.sqs.add_permission(
self.queue_url, [aws_test_account_id], ["SendMessage"], "test-permission-id")
self.assertIsInstance(add_perm_result, str)
|
Add correct setUp/tearDown methods for integration sqs test
|
Add correct setUp/tearDown methods for integration sqs test
|
Python
|
mit
|
MA3STR0/AsyncAWS
|
7dd17cc10f7e0857ab3017177d6c4abeb115ff07
|
south/models.py
|
south/models.py
|
from django.db import models
from south.db import DEFAULT_DB_ALIAS
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration, database):
try:
# Switch on multi-db-ness
if database != DEFAULT_DB_ALIAS:
# Django 1.2
objects = cls.objects.using(database)
else:
# Django <= 1.1
objects = cls.objects
return objects.get(
app_name=migration.app_label(),
migration=migration.name(),
)
except cls.DoesNotExist:
return cls(
app_name=migration.app_label(),
migration=migration.name(),
)
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
def __str__(self):
return "<%s: %s>" % (self.app_name, self.migration)
|
from django.db import models
from south.db import DEFAULT_DB_ALIAS
# If we detect Django 1.7 or higher, then exit
# Placed here so it's guaranteed to be imported on Django start
import django
if django.VERSION[0] > 1 or (django.VERSION[0] == 1 and django.VERSION[1] > 6):
raise RuntimeError("South does not support Django 1.7 or higher. Please use native Django migrations.")
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration, database):
try:
# Switch on multi-db-ness
if database != DEFAULT_DB_ALIAS:
# Django 1.2
objects = cls.objects.using(database)
else:
# Django <= 1.1
objects = cls.objects
return objects.get(
app_name=migration.app_label(),
migration=migration.name(),
)
except cls.DoesNotExist:
return cls(
app_name=migration.app_label(),
migration=migration.name(),
)
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
def __str__(self):
return "<%s: %s>" % (self.app_name, self.migration)
|
Add explicit version check for Django 1.7 or above
|
Add explicit version check for Django 1.7 or above
|
Python
|
apache-2.0
|
smartfile/django-south,smartfile/django-south
|
fe85f1f135d2a7831afee6c8ab0bad394beb8aba
|
src/ais.py
|
src/ais.py
|
class MonsterAI(object):
def __init__(self, level):
self.owner = None
self.level = level
def take_turn(self):
self.owner.log.log_begin_turn(self.owner.oid)
self._take_turn()
def _take_turn(self):
raise NotImplementedError('Subclass this before usage please.')
class TestMonster(MonsterAI):
def _take_turn(self):
enemies = self.level.get_objects_outside_faction(self.owner.faction)
if len(enemies) > 0:
distances = {self.owner.distance_to(e): e for e in enemies}
closest_distance = min(distances)
closest_enemy = distances[closest_distance]
if closest_distance <= 1.5:
self.owner.fighter.attack(closest_enemy)
else:
self.owner.move_towards(closest_enemy.x, closest_enemy.y, self.level)
|
from src.constants import *
class MonsterAI(object):
def __init__(self, level):
self.owner = None
self.level = level
def take_turn(self):
self.owner.log.log_begin_turn(self.owner.oid)
self._take_turn()
def _take_turn(self):
raise NotImplementedError('Subclass this before usage please.')
class TestMonster(MonsterAI):
def _take_turn(self):
enemies = self.level.get_objects_outside_faction(self.owner.faction)
if len(enemies) > 0:
# Identify the closest enemy
distances = {self.owner.distance_to(e): e for e in enemies}
closest_distance = min(distances)
closest_enemy = distances[closest_distance]
# Inspect inventory for usable items
if self.owner.inventory is not None:
usable = self.owner.inventory.get_usable_items()
throwing_items = [i for i in usable if i.item.can_use(self.owner, closest_enemy, self.level)]
else:
throwing_items = []
# Attack if adjacent
if closest_distance <= 1.5:
self.owner.fighter.attack(closest_enemy)
# Throw if you have a throwing item
if len(throwing_items) > 0:
throwing_items[0].item.use(self.owner, closest_enemy, self.level)
else:
self.owner.move_towards(closest_enemy.x, closest_enemy.y, self.level)
|
Add throwing item usage to test AI
|
Add throwing item usage to test AI
Unforutnately the item isn't evicted from the inventory on usage,
so the guy with the throwing item can kill everybody, but it's
working - he does throw it!
|
Python
|
mit
|
MoyTW/RL_Arena_Experiment
|
fe78335e4f469e22f9a1de7a1e5ddd52021a7f0f
|
linesep.py
|
linesep.py
|
STARTER = -1
SEPARATOR = 0
TERMINATOR = 1
def readlines(fp, sep, mode=TERMINATOR, retain=True, size=512):
if mode < 0:
return _readlines_start(fp, sep, retain, size)
elif mode == 0:
return _readlines_sep(fp, sep, size)
else:
return _readlines_term(fp, sep, retain, size)
def _readlines_start(fp, sep, retain=True, size=512):
# Omits empty leading entry
entries = _readlines_sep(fp, sep, size=size)
e = next(entries)
if e:
yield e
for e in entries:
if retain:
e = sep + e
yield e
def _readlines_sep(fp, sep, size=512):
buff = ''
for chunk in iter(lambda: fp.read(size), ''):
buff += chunk
lines = buff.split(sep)
buff = lines.pop()
for l in lines:
yield l
yield buff
def _readlines_term(fp, sep, retain=True, size=512):
# Omits empty trailing entry
buff = ''
for chunk in iter(lambda: fp.read(size), ''):
buff += chunk
lines = buff.split(sep)
buff = lines.pop()
for l in lines:
if retain:
l += sep
yield l
if buff:
yield buff
|
def read_begun(fp, sep, retain=True, size=512):
# Omits empty leading entry
entries = read_separated(fp, sep, size=size)
e = next(entries)
if e:
yield e
for e in entries:
if retain:
e = sep + e
yield e
def read_separated(fp, sep, size=512):
buff = ''
for chunk in iter(lambda: fp.read(size), ''):
buff += chunk
lines = buff.split(sep)
buff = lines.pop()
for l in lines:
yield l
yield buff
def read_terminated(fp, sep, retain=True, size=512):
# Omits empty trailing entry
buff = ''
for chunk in iter(lambda: fp.read(size), ''):
buff += chunk
lines = buff.split(sep)
buff = lines.pop()
for l in lines:
if retain:
l += sep
yield l
if buff:
yield buff
|
Use three public functions instead of one
|
Use three public functions instead of one
|
Python
|
mit
|
jwodder/linesep
|
e9ae6b7f92ee0a4585adc11e695cc15cbe425e23
|
morepath/app.py
|
morepath/app.py
|
from .interfaces import IRoot, IApp
from .publish import publish
from .request import Request
from .traject import Traject
from comparch import ClassRegistry, Lookup, ChainClassLookup
known_apps = {}
class App(IApp, ClassRegistry):
def __init__(self, name='', parent=None):
super(App, self).__init__()
self.name = name
self.root_model = None
self.root_obj = None
self.child_apps = {}
self.parent = parent
self.traject = Traject()
if self.parent is not None:
parent.add_child(self)
def add_child(self, app):
self.child_apps[app.name] = app
self.traject.register(app.name, lambda: app, conflicting=True)
def class_lookup(self):
if self.parent is None:
return ChainClassLookup(self, global_app)
return ChainClassLookup(self, self.parent.class_lookup())
def __call__(self, environ, start_response):
# XXX do caching lookup where?
lookup = Lookup(self.class_lookup())
request = Request(environ)
request.lookup = lookup
response = publish(request, self, lookup)
return response(environ, start_response)
global_app = App()
# XXX this shouldn't be here but be the root of the global app
class Root(IRoot):
pass
root = Root()
|
from .interfaces import IRoot, IApp
from .publish import publish
from .request import Request
from .traject import Traject
from comparch import ClassRegistry, Lookup, ChainClassLookup
known_apps = {}
class App(IApp, ClassRegistry):
def __init__(self, name='', parent=None):
super(App, self).__init__()
self.name = name
self.root_model = None
self.root_obj = None
self.child_apps = {}
self.parent = parent
self.traject = Traject()
if self.parent is not None:
parent.add_child(self)
def add_child(self, app):
self.child_apps[app.name] = app
self.traject.register(app.name, lambda: app, conflicting=True)
def class_lookup(self):
if self.parent is None:
return ChainClassLookup(self, global_app)
return ChainClassLookup(self, self.parent.class_lookup())
def __call__(self, environ, start_response):
# XXX do caching lookup where?
lookup = Lookup(self.class_lookup())
request = Request(environ)
request.lookup = lookup
response = publish(request, self, lookup)
return response(environ, start_response)
global_app = App()
|
Remove root that wasn't used.
|
Remove root that wasn't used.
|
Python
|
bsd-3-clause
|
faassen/morepath,morepath/morepath,taschini/morepath
|
a7938ed9ec814fa9cf53272ceb65e84d11d50dc1
|
moto/s3/urls.py
|
moto/s3/urls.py
|
from __future__ import unicode_literals
from moto.compat import OrderedDict
from .responses import S3ResponseInstance
url_bases = [
"https?://s3(.*).amazonaws.com",
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3(.*).amazonaws.com"
]
url_paths = OrderedDict([
# subdomain bucket
('{0}/$', S3ResponseInstance.bucket_response),
# subdomain key of path-based bucket
('{0}/(?P<key_or_bucket_name>.+)', S3ResponseInstance.ambiguous_response),
# path-based bucket + key
('{0}/(?P<bucket_name_path>[a-zA-Z0-9\-_./]+)/(?P<key_name>.+)', S3ResponseInstance.key_response),
])
|
from __future__ import unicode_literals
from .responses import S3ResponseInstance
url_bases = [
"https?://s3(.*).amazonaws.com",
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3(.*).amazonaws.com"
]
url_paths = {
# subdomain bucket
'{0}/$': S3ResponseInstance.bucket_response,
# subdomain key of path-based bucket
'{0}/(?P<key_or_bucket_name>[^/]+)/?$': S3ResponseInstance.ambiguous_response,
# path-based bucket + key
'{0}/(?P<bucket_name_path>[a-zA-Z0-9\-_./]+)/(?P<key_name>.+)': S3ResponseInstance.key_response,
}
|
Fix s3 url regex to ensure path-based bucket and key does not catch.
|
Fix s3 url regex to ensure path-based bucket and key does not catch.
|
Python
|
apache-2.0
|
william-richard/moto,kefo/moto,botify-labs/moto,2rs2ts/moto,dbfr3qs/moto,im-auld/moto,william-richard/moto,william-richard/moto,Affirm/moto,kefo/moto,botify-labs/moto,Brett55/moto,ZuluPro/moto,ZuluPro/moto,okomestudio/moto,spulec/moto,whummer/moto,william-richard/moto,kefo/moto,kefo/moto,ZuluPro/moto,dbfr3qs/moto,heddle317/moto,Brett55/moto,whummer/moto,mrucci/moto,gjtempleton/moto,rocky4570/moto,spulec/moto,whummer/moto,tootedom/moto,Brett55/moto,heddle317/moto,gjtempleton/moto,IlyaSukhanov/moto,botify-labs/moto,2rs2ts/moto,spulec/moto,william-richard/moto,okomestudio/moto,ZuluPro/moto,okomestudio/moto,whummer/moto,gjtempleton/moto,Affirm/moto,rocky4570/moto,silveregg/moto,2rs2ts/moto,spulec/moto,botify-labs/moto,okomestudio/moto,dbfr3qs/moto,heddle317/moto,whummer/moto,rocky4570/moto,Affirm/moto,dbfr3qs/moto,Brett55/moto,Brett55/moto,dbfr3qs/moto,spulec/moto,2rs2ts/moto,gjtempleton/moto,botify-labs/moto,botify-labs/moto,spulec/moto,whummer/moto,kefo/moto,Brett55/moto,Affirm/moto,braintreeps/moto,ZuluPro/moto,heddle317/moto,gjtempleton/moto,Affirm/moto,rocky4570/moto,okomestudio/moto,rocky4570/moto,Affirm/moto,heddle317/moto,2rs2ts/moto,dbfr3qs/moto,rocky4570/moto,ZuluPro/moto,william-richard/moto,riccardomc/moto,okomestudio/moto
|
429c2548835aef1cb1655229ee11f42ccf189bd1
|
shopping_list.py
|
shopping_list.py
|
shopping_list = []
def show_help():
print("What should we pick up at the store?")
print("Enter DONE to stop. Enter HELP for this help. Enter SHOW to see your current list.")
|
shopping_list = []
def show_help():
print("What should we pick up at the store?")
print("Enter DONE to stop. Enter HELP for this help. Enter SHOW to see your current list.")
def add_to_list(item):
shopping_list.append(item)
print("Added! List has {} items.".format(len(shopping_list)))
|
Add an item to the shopping list.
|
Add an item to the shopping list.
|
Python
|
mit
|
adityatrivedi/shopping-list
|
39ce4e74a6b7115a35260fa2722ace1792cb1780
|
python/count_triplets.py
|
python/count_triplets.py
|
#!/bin/python3
import math
import os
import random
import re
import sys
from collections import Counter
def countTriplets(arr, r):
potential_triplets_with_middle = Counter()
potential_triplets_with_end = Counter()
total_triplets = 0
for num in arr:
# num completed potential_triplets_with_end[num] triplets
if potential_triplets_with_end[num]:
total_triplets += potential_triplets_with_end[num]
# num can be the middle number in potential_triplets_with_middle[num] triplets
if potential_triplets_with_middle[num]:
potential_triplets_with_end[num * r] += potential_triplets_with_middle[num]
# num can be the begining of a triplet
potential_triplets_with_middle[num * r] += 1
print("num", num, " middle", potential_triplets_with_middle, " end", potential_triplets_with_end, " total", total_triplets)
return total_triplets
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nr = input().rstrip().split()
n = int(nr[0])
r = int(nr[1])
arr = list(map(int, input().rstrip().split()))
ans = countTriplets(arr, r)
fptr.write(str(ans) + '\n')
fptr.close()
|
#!/bin/python3
import math
import os
import random
import re
import sys
from collections import Counter
def countTriplets(arr, r):
potential_triplets_with_middle = Counter()
potential_triplets_with_end = Counter()
total_triplets = 0
for num in arr:
# num completed potential_triplets_with_end[num] triplets
if potential_triplets_with_end[num]:
total_triplets += potential_triplets_with_end[num]
# num can be the middle number in
# potential_triplets_with_middle[num] triplets
if potential_triplets_with_middle[num]:
potential_triplets_with_end[num * r] += \
potential_triplets_with_middle[num]
# num can be the begining of a triplet
potential_triplets_with_middle[num * r] += 1
return total_triplets
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nr = input().rstrip().split()
n = int(nr[0])
r = int(nr[1])
arr = list(map(int, input().rstrip().split()))
ans = countTriplets(arr, r)
fptr.write(str(ans) + '\n')
fptr.close()
|
Remove debug output and pycodestyle
|
Remove debug output and pycodestyle
|
Python
|
mit
|
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
|
5dd78f614e5882bc2a3fcae24117a26ee34371ac
|
register-result.py
|
register-result.py
|
#!/usr/bin/env python
import json
import socket
import sys
if len(sys.argv) < 4:
print("Error: Usage <register-result> <client> <name> <output> <status> <ttl>")
sys.exit(128)
check_client = sys.argv[1]
check_name = sys.argv[2]
check_output = sys.argv[3]
check_status = int(sys.argv[4])
check_ttl = int(sys.argv[5]) if len(sys.argv) > 5 else 90000
# Our result dict
result = dict()
result['source'] = check_client
result['name'] = check_name
result['output'] = check_output
result['status'] = check_status
result['ttl'] = check_ttl
# TCP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 3030)
sock.connect(server_address)
print (json.dumps(result))
socket.sendall(json.dumps(result))
|
#!/usr/bin/env python
import json
import socket
import sys
if len(sys.argv) < 4:
print("Error: Usage <register-result> <client> <name> <output> <status> <ttl>")
sys.exit(128)
check_client = sys.argv[1]
check_name = sys.argv[2]
check_output = sys.argv[3]
check_status = int(sys.argv[4])
check_ttl = int(sys.argv[5]) if len(sys.argv) > 5 else 90000
# Our result dict
result = dict()
result['source'] = check_client
result['name'] = check_name
result['output'] = check_output
result['status'] = check_status
result['ttl'] = check_ttl
# TCP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 3030)
sock.connect(server_address)
sock.sendall(json.dumps(result))
print (json.dumps(result))
|
Fix mistake with socket constructor
|
Fix mistake with socket constructor
|
Python
|
mit
|
panubo/docker-monitor,panubo/docker-monitor,panubo/docker-monitor
|
5e57dce84ffe7be7e699af1e2be953d5a65d8435
|
tests/test_module.py
|
tests/test_module.py
|
#!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 2008-2014 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/pathos/browser/dill/LICENSE
import sys
import dill
import test_mixins as module
module.a = 1234
pik_mod = dill.dumps(module)
module.a = 0
# remove module
del sys.modules[module.__name__]
del module
module = dill.loads(pik_mod)
assert module.a == 1234
assert module.double_add(1, 2, 3) == 2 * module.fx
|
#!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 2008-2014 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/pathos/browser/dill/LICENSE
import sys
import dill
import test_mixins as module
cached = (module.__cached__ if hasattr(module, "__cached__")
else module.__file__ + "c")
module.a = 1234
pik_mod = dill.dumps(module)
module.a = 0
# remove module
del sys.modules[module.__name__]
del module
module = dill.loads(pik_mod)
assert hasattr(module, "a") and module.a == 1234
assert module.double_add(1, 2, 3) == 2 * module.fx
# clean up
import os
os.remove(cached)
if os.path.exists("__pycache__") and not os.listdir("__pycache__"):
os.removedirs("__pycache__")
|
Add code to clean up
|
Add code to clean up
|
Python
|
bsd-3-clause
|
wxiang7/dill,mindw/dill
|
66a6223ca2c512f3f39ecb4867547a440611713b
|
nisl/__init__.py
|
nisl/__init__.py
|
"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
#from . import check_build
#from .base import clone
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
|
"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Sklearn could not be found, please install it properly to use nisl.'
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
|
Add an error message when trying to load nisl without having Numpy, Scipy and Sklearn installed.
|
Add an error message when trying to load nisl without having Numpy, Scipy and Sklearn installed.
|
Python
|
bsd-3-clause
|
abenicho/isvr
|
910e1a1762dac1d62c8a6749286c436d6c2b28d9
|
UM/Operations/RemoveSceneNodeOperation.py
|
UM/Operations/RemoveSceneNodeOperation.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.Selection import Selection
from UM.Application import Application
## An operation that removes a SceneNode from the scene.
class RemoveSceneNodeOperation(Operation.Operation):
## Initialises the RemoveSceneNodeOperation.
#
# \param node The node to remove.
def __init__(self, node):
super().__init__()
self._node = node
self._parent = node.getParent()
## Undoes the operation, putting the node back in the scene.
def undo(self):
self._node.setParent(self._parent) # Hanging it back under its original parent puts it back in the scene.
## Redo the operation, removing the node again.
def redo(self):
self._node.setParent(None)
# Hack to ensure that the _onchanged is triggered correctly.
# We can't do it the right way as most remove changes don't need to trigger
# a reslice (eg; removing hull nodes don't need to trigger reslice).
try:
Application.getInstance().getBackend().forceSlice()
except:
pass
if Selection.isSelected(self._node): # Also remove the selection.
Selection.remove(self._node)
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.Selection import Selection
from UM.Application import Application
## An operation that removes a SceneNode from the scene.
class RemoveSceneNodeOperation(Operation.Operation):
## Initialises the RemoveSceneNodeOperation.
#
# \param node The node to remove.
def __init__(self, node):
super().__init__()
self._node = node
self._parent = node.getParent()
## Undoes the operation, putting the node back in the scene.
def undo(self):
self._node.setParent(self._parent) # Hanging it back under its original parent puts it back in the scene.
## Redo the operation, removing the node again.
def redo(self):
old_parent = self._parent
self._node.setParent(None)
if old_parent and old_parent.callDecoration("isGroup"):
old_parent.callDecoration("recomputeConvexHull")
# Hack to ensure that the _onchanged is triggered correctly.
# We can't do it the right way as most remove changes don't need to trigger
# a reslice (eg; removing hull nodes don't need to trigger reslice).
try:
Application.getInstance().getBackend().forceSlice()
except:
pass
if Selection.isSelected(self._node): # Also remove the selection.
Selection.remove(self._node)
|
Update convex hull of the group when removing a node from the group
|
Update convex hull of the group when removing a node from the group
CURA-2573
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
a17f711a6e055a9de4674e4c35570a2c6d6f0335
|
ttysend.py
|
ttysend.py
|
from __future__ import print_function
import sys
import os
import fcntl
import termios
import argparse
class RootRequired(Exception):
"""Our standard exception."""
pass
def send(data, tty):
"""Send each char of data to tty."""
if(os.getuid() != 0):
raise RootRequired('Only root can send input to other TTYs.')
for c in data:
fcntl.ioctl(tty, termios.TIOCSTI, c)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('tty', type=argparse.FileType('w'),
help='display a square of a given number')
group = parser.add_mutually_exclusive_group()
group.add_argument('-n', action='store_true',
help='Do not print the trailing newline character.')
group.add_argument('--stdin', action='store_true',
help='Read input from stdin.')
args, data = parser.parse_known_args()
# Prepare data
if args.stdin:
data = sys.stdin.read()
else:
data = ' '.join(data)
# Send data
try:
send(data, args.tty)
except RootRequired, e:
sys.exit(print('ERROR:', e, file=sys.stderr))
# Handle trailing newline
if data[-1][-1] != '\n' and not args.n:
send('\n', args.tty)
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import fcntl
import termios
import argparse
class RootRequired(Exception):
"""Our standard exception."""
pass
def send(data, tty):
if len(data):
# Handle trailing newline
if data[-1][-1] != '\n':
data += '\n'
send_raw(data, tty)
def send_raw(data, tty):
"""Send each char of data to tty."""
if(os.getuid() != 0):
raise RootRequired('Only root can send input to other TTYs.')
for c in data:
fcntl.ioctl(tty, termios.TIOCSTI, c)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('tty', type=argparse.FileType('w'),
help='display a square of a given number')
group = parser.add_mutually_exclusive_group()
group.add_argument('-n', action='store_true',
help='Do not force a trailing newline character.')
group.add_argument('--stdin', action='store_true',
help='Read input from stdin.')
args, data = parser.parse_known_args()
# Prepare data
if args.stdin:
data = sys.stdin.read()
else:
data = ' '.join(data)
# Send data
try:
if args.n:
send_raw(data, args.tty)
else:
send(data, args.tty)
except RootRequired, e:
sys.exit(print('ERROR:', e, file=sys.stderr))
|
Move newline handling to a function.
|
Move newline handling to a function.
Allows library users to choose to force trailing newlines.
|
Python
|
mit
|
RichardBronosky/ttysend
|
782c1b8379d38f99de413398919aa797af0df645
|
plot_s_curve.py
|
plot_s_curve.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
from numpy import array, log
import sys
x = []
y = []
infile = open(sys.argv[1])
for line in infile:
data = line.replace('\n','').split()
print(data)
try :
x.append(float(data[0]))
y.append(float(data[1]))
except ValueError:
pass
#x = array(x)
#y = array(y)
figManager = plt.get_current_fig_manager()
figManager.window.showMaximized()
#plt.plot(log(x),log(y))
plt.plot(x,y,"o")
plt.ylabel('$\log T$')
plt.xlabel('$\log \Sigma$')
plt.grid()
plt.show()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
from numpy import array, log
import sys
import os
import matplotlib.animation as animation
fig = plt.figure()
inpath = sys.argv[1]
if os.path.isfile(inpath):
print('Visiting {}'.format(inpath))
filenames = [inpath]
else:
_filenames = os.listdir(inpath)
_filenames.sort()
filesnames = [inpath + '/' + fname for fname in _filesnames if '_tot.dat' in fname]
print('Visiting all files of {}'.format(inpath))
axline, = plt.plot(0, 0, 'o')
def draw_once(filename):
x = []
y = []
if not 'tot.dat' in filename:
return ([0], [0])
else:
print('Visiting {}'.format(filename))
outfile = filename.replace('.dat', '.png')
for line in open(filename):
data = line.replace('\n', '').split()
try :
print (data)
xData = float(data[0])
yData = float(data[1])
x.append(xData)
y.append(yData)
except ValueError:
pass
axline.set_xdata(x)
axline.set_ydata(y)
return axline,
def init():
print('Initialisation')
plt.ylabel('$\log T$')
plt.xlabel('$\log \Sigma$')
plt.xlim(1.8, 4)
plt.ylim(6, 8)
plt.grid()
if len(filenames) > 1:
ani = animation.FuncAnimation(fig, draw_once, filenames, init_func=init, interval=10)
else:
init()
draw_once(filenames[0])
plt.show()
# x, y = draw_once(filenames[2])
# plt.plot(x, y, 'o')
|
Use animation if dirname is provided
|
Use animation if dirname is provided
|
Python
|
mit
|
M2-AAIS/BAD
|
3053219149f7dac7ab073fc24488116b1b280b77
|
money_rounding.py
|
money_rounding.py
|
def get_price_without_vat(price_to_show, vat_percent):
raise NotImplementedError()
def get_price_without_vat_from_other_valuta(conversion_rate, origin_price,
origin_vat, other_vat):
raise NotImplementedError()
|
def show_pretty_price(value):
raise NotImplementedError()
|
Use function described in readme
|
Use function described in readme
|
Python
|
mit
|
coolshop-com/coolshop-application-assignment
|
ea7200bc9774f69562b37f177ad18ca606998dfa
|
perfrunner/utils/debug.py
|
perfrunner/utils/debug.py
|
import glob
import shutil
from optparse import OptionParser
from perfrunner.helpers.remote import RemoteHelper
from perfrunner.settings import ClusterSpec
def get_options():
usage = '%prog -c cluster'
parser = OptionParser(usage)
parser.add_option('-c', dest='cluster_spec_fname',
help='path to the cluster specification file',
metavar='cluster.spec')
options, args = parser.parse_args()
if not options.cluster_spec_fname:
parser.error('Please specify a cluster specification')
return options, args
def main():
options, args = get_options()
cluster_spec = ClusterSpec()
cluster_spec.parse(options.cluster_spec_fname, args)
remote = RemoteHelper(cluster_spec, test_config=None, verbose=False)
remote.collect_info()
for hostname in cluster_spec.yield_hostnames():
for fname in glob.glob('{}/*.zip'.format(hostname)):
shutil.move(fname, '{}.zip'.format(hostname))
if __name__ == '__main__':
main()
|
import glob
import os.path
import shutil
from optparse import OptionParser
from perfrunner.helpers.remote import RemoteHelper
from perfrunner.settings import ClusterSpec
def get_options():
usage = '%prog -c cluster'
parser = OptionParser(usage)
parser.add_option('-c', dest='cluster_spec_fname',
help='path to the cluster specification file',
metavar='cluster.spec')
options, args = parser.parse_args()
if not options.cluster_spec_fname:
parser.error('Please specify a cluster specification')
return options, args
def main():
options, args = get_options()
cluster_spec = ClusterSpec()
cluster_spec.parse(options.cluster_spec_fname, args)
remote = RemoteHelper(cluster_spec, test_config=None, verbose=False)
remote.collect_info()
for hostname in cluster_spec.yield_hostnames():
for fname in glob.glob('{}/*.zip'.format(hostname)):
shutil.move(fname, '{}.zip'.format(hostname))
if cluster_spec.backup is not None:
logs = os.path.join(cluster_spec.backup, 'logs')
if os.path.exists(logs):
shutil.make_archive('tools', 'zip', logs)
if __name__ == '__main__':
main()
|
Archive logs from the tools
|
Archive logs from the tools
Change-Id: I184473d20cc2763fbc97c993bfcab36b80d1c864
Reviewed-on: http://review.couchbase.org/76571
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
|
Python
|
apache-2.0
|
couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner
|
22e82e3fb6949efe862216feafaedb2da9b19c62
|
filehandler.py
|
filehandler.py
|
import csv
import sys
import urllib
from scheduleitem import ScheduleItem
from team import Team
def read(uri):
"""Open a File or a Web URL"""
if uri.startswith('http://') or uri.startswith('https://'):
return open_url(uri)
else:
return open_file(uri)
def open_url(url):
"""Return the games file data as an array"""
try:
with urllib.request.urlopen(url) as response:
return response.read()
except urllib.HTTPError as e:
msg = "Could Not Open URL {}.\nThe Code is: {} "
print(msg.format(url, e.code))
sys.exit(1)
except urllib.URLError as e:
msg = "Could Not Open URL {}.\nThe Reason is: {} "
print(msg.format(url.url, e.reason))
sys.exit(1)
def open_file(uri):
"""Return the games file data as an array"""
try:
with open(uri, 'r') as f:
return f.read()
except IOError:
msg = "Could not open file: `{}`"
print(msg.format(uri))
sys.exit(1)
def load_schedules(games_file):
with open(games_file, 'r') as f:
return [ScheduleItem.from_str(line) for line in f.readlines()]
def load_teams_data(data_file):
with open(data_file, 'r') as csv_file:
reader = csv.reader(csv_file)
# Skip the header row
next(reader)
return [Team(row[0], row[2], row[3]) for row in reader]
|
import csv
import sys
import urllib.error
import urllib.request
from scheduleitem import ScheduleItem
from team import Team
def read(uri):
"""Open a File or a Web URL"""
if uri.startswith('http://') or uri.startswith('https://'):
return open_url(uri)
else:
return open_local_file(uri)
def open_url(url):
"""Return the game file data."""
with urllib.request.urlopen(url) as response:
if response.status != 200:
msg = 'Status {}. Could Not Open URL {}. Reason: {}'
raise urllib.error.HTTPError(
msg.format(response.status, url, response.msg)
)
encoding = sys.getdefaultencoding()
return [line.decode(encoding) for line in response.readlines()]
def open_local_file(uri):
"""Return the games file data as an array"""
with open(uri, 'r') as f:
return f.readlines()
def load_schedules(uri):
data = read(uri)
return [ScheduleItem.from_str(line) for line in data]
def load_teams_data(data_file):
with open(data_file, 'r') as csv_file:
reader = csv.reader(csv_file)
next(reader) # Skip the header row
return [Team(row[0], row[2], row[3]) for row in reader]
|
Update file handlers to use Python3 urllib
|
Update file handlers to use Python3 urllib
|
Python
|
mit
|
brianjbuck/robie
|
7b4b2fcbcb9a95c07f09b71305afa0c5ce95fe99
|
tenant_schemas/routers.py
|
tenant_schemas/routers.py
|
from django.conf import settings
class TenantSyncRouter(object):
"""
A router to control which applications will be synced,
depending if we are syncing the shared apps or the tenant apps.
"""
def allow_syncdb(self, db, model):
# the imports below need to be done here else django <1.5 goes crazy
# https://code.djangoproject.com/ticket/20704
from django.db import connection
from tenant_schemas.utils import get_public_schema_name, app_labels
if connection.schema_name == get_public_schema_name():
if model._meta.app_label not in app_labels(settings.SHARED_APPS):
return False
else:
if model._meta.app_label not in app_labels(settings.TENANT_APPS):
return False
return None
|
from django.conf import settings
class TenantSyncRouter(object):
"""
A router to control which applications will be synced,
depending if we are syncing the shared apps or the tenant apps.
"""
def allow_migrate(self, db, model):
# the imports below need to be done here else django <1.5 goes crazy
# https://code.djangoproject.com/ticket/20704
from django.db import connection
from tenant_schemas.utils import get_public_schema_name, app_labels
if connection.schema_name == get_public_schema_name():
if model._meta.app_label not in app_labels(settings.SHARED_APPS):
return False
else:
if model._meta.app_label not in app_labels(settings.TENANT_APPS):
return False
return None
def allow_syncdb(self, db, model):
# allow_syncdb was changed to allow_migrate in django 1.7
return self.allow_migrate(db, model)
|
Add database router allow_migrate() for Django 1.7
|
Add database router allow_migrate() for Django 1.7
|
Python
|
mit
|
goodtune/django-tenant-schemas,Mobytes/django-tenant-schemas,kajarenc/django-tenant-schemas,honur/django-tenant-schemas,mcanaves/django-tenant-schemas,ArtProcessors/django-tenant-schemas,goodtune/django-tenant-schemas,ArtProcessors/django-tenant-schemas,bernardopires/django-tenant-schemas,bernardopires/django-tenant-schemas,pombredanne/django-tenant-schemas
|
b3acf639f310019d042bbe24e653a6f79c240858
|
setup.py
|
setup.py
|
from distutils.core import Extension, setup
from Cython.Build import cythonize
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=cythonize(extensions)
)
|
from distutils.core import Extension, setup
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
Remove the importing of the "cythonize" function.
|
Remove the importing of the "cythonize" function.
|
Python
|
mit
|
PeithVergil/cython-example
|
d63d391d5b9ee221c0cd67e030895f4598430f08
|
onetime/models.py
|
onetime/models.py
|
from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
class Key(models.Model):
user = models.ForeignKey(User)
key = models.CharField(max_length=40)
created = models.DateTimeField(auto_now_add=True)
usage_left = models.IntegerField(null=True, default=1)
expires = models.DateTimeField(null=True)
next = models.CharField(null=True, max_length=200)
def __unicode__(self):
return '%s (%s)' % (self.key, self.user.username)
def is_valid(self):
if self.usage_left is not None and self.usage_left <= 0:
return False
if self.expires is not None and self.expires < datetime.now():
return False
return True
def update_usage(self):
if self.usage_left is not None:
self.usage_left -= 1
self.save()
|
from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
class Key(models.Model):
user = models.ForeignKey(User)
key = models.CharField(max_length=40)
created = models.DateTimeField(auto_now_add=True)
usage_left = models.IntegerField(null=True, default=1)
expires = models.DateTimeField(null=True)
next = models.CharField(null=True, max_length=200)
def __unicode__(self):
return '%s (%s)' % (self.key, self.user.username)
def is_valid(self):
if self.usage_left is not None and self.usage_left <= 0:
return False
if self.expires is not None and self.expires < datetime.now():
return False
return True
def update_usage(self):
if self.usage_left is not None and self.usage_left > 0:
self.usage_left -= 1
self.save()
|
Update key usage when the usage_left counter is still greater than zero.
|
Update key usage when the usage_left counter is still greater than zero.
|
Python
|
bsd-3-clause
|
uploadcare/django-loginurl,vanschelven/cmsplugin-journal,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,fajran/django-loginurl,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website
|
c8a0f4f439c2123c9b7f9b081f91d75b1f9a8a13
|
dmoj/checkers/linecount.py
|
dmoj/checkers/linecount.py
|
from re import split as resplit
from typing import Callable, Union
from dmoj.result import CheckerResult
from dmoj.utils.unicode import utf8bytes
verdict = u"\u2717\u2713"
def check(process_output: bytes, judge_output: bytes, point_value: float, feedback: bool = True,
match: Callable[[bytes, bytes], bool] = lambda p, j: p.strip() == j.strip(),
**kwargs) -> Union[CheckerResult, bool]:
process_lines = list(filter(None, resplit(b'[\r\n]', utf8bytes(process_output))))
judge_lines = list(filter(None, resplit(b'[\r\n]', utf8bytes(judge_output))))
if len(process_lines) > len(judge_lines):
return False
if not judge_lines:
return True
if isinstance(match, str):
match = eval(match)
cases = [verdict[0]] * len(judge_lines)
count = 0
for i, (process_line, judge_line) in enumerate(zip(process_lines, judge_lines)):
if match(process_line, judge_line):
cases[i] = verdict[1]
count += 1
return CheckerResult(count == len(judge_lines), point_value * (1.0 * count / len(judge_lines)),
''.join(cases) if feedback else "")
check.run_on_error = True # type: ignore
|
from re import split as resplit
from typing import Callable, Union
from dmoj.result import CheckerResult
from dmoj.utils.unicode import utf8bytes
verdict = u"\u2717\u2713"
def check(process_output: bytes, judge_output: bytes, point_value: float, feedback: bool = True,
**kwargs) -> Union[CheckerResult, bool]:
process_lines = list(filter(None, resplit(b'[\r\n]', utf8bytes(process_output))))
judge_lines = list(filter(None, resplit(b'[\r\n]', utf8bytes(judge_output))))
if len(process_lines) > len(judge_lines):
return False
if not judge_lines:
return True
cases = [verdict[0]] * len(judge_lines)
count = 0
for i, (process_line, judge_line) in enumerate(zip(process_lines, judge_lines)):
if process_line.strip() == judge_line.strip():
cases[i] = verdict[1]
count += 1
return CheckerResult(count == len(judge_lines), point_value * (1.0 * count / len(judge_lines)),
''.join(cases) if feedback else "")
check.run_on_error = True # type: ignore
|
Remove the match param to fix RCE.
|
Remove the match param to fix RCE.
|
Python
|
agpl-3.0
|
DMOJ/judge,DMOJ/judge,DMOJ/judge
|
973641c7d68f4b1505541a06ec46901b412ab56b
|
tests/test_constraints.py
|
tests/test_constraints.py
|
import unittest
import numpy as np
from constraints import (generate_constraints_function,
generate_constraint_gradients_function, )
from robot_arm import RobotArm
class TestConstraintFunctions(unittest.TestCase):
def setUp(self):
self.lengths = (3, 2, 2,)
self.destinations = (
(5, 4, 6, 4, 5),
(0, 2, 0.5, -2, -1),
)
self.theta = (np.pi, np.pi / 2, 0,)
self.thetas = np.ones((3 * 5,))
self.robot_arm = RobotArm(self.lengths, self.destinations, self.theta)
self.constraints_func = generate_constraints_function(self.robot_arm)
self.constraint_gradients_func = generate_constraint_gradients_function(self.robot_arm)
def test_constraints_func_return_type(self):
constraints = self.constraints_func(self.thetas)
self.assertEqual(constraints.shape, (2 * 5,))
def test_constraint_gradients_func_return_type(self):
constraint_gradients = self.constraint_gradients_func(self.thetas)
self.assertEqual(constraint_gradients.shape, (3 * 5, 2 * 5))
# print(np.array2string(constraint_gradients, max_line_width=np.inf))
|
import unittest
import numpy as np
from constraints import (generate_constraints_function,
generate_constraint_gradients_function, )
from robot_arm import RobotArm
class TestConstraintFunctions(unittest.TestCase):
def setUp(self):
self.lengths = (3, 2, 2,)
self.destinations = (
(5, 4, 6, 4, 5),
(0, 2, 0.5, -2, -1),
)
self.theta = (np.pi, np.pi / 2, 0,)
self.thetas = np.ones((3 * 5,))
self.robot_arm = RobotArm(self.lengths, self.destinations, self.theta)
self.constraints_func = generate_constraints_function(self.robot_arm)
self.constraint_gradients_func = generate_constraint_gradients_function(self.robot_arm)
def test_constraints_func_return_type(self):
constraints = self.constraints_func(self.thetas)
self.assertEqual(constraints.shape, (2 * 5,))
def test_constraint_gradients_func_return_type(self):
constraint_gradients = self.constraint_gradients_func(self.thetas)
self.assertEqual(constraint_gradients.shape, (3 * 5, 2 * 5))
# print(np.array2string(constraint_gradients, max_line_width=np.inf))
def test_licq(self):
constraint_gradients = self.constraint_gradients_func(self.thetas)
rank = np.linalg.matrix_rank(constraint_gradients)
self.assertEqual(rank, 2 * 5)
|
Test LICQ condition of constraint gradient
|
Test LICQ condition of constraint gradient
|
Python
|
mit
|
JakobGM/robotarm-optimization
|
c8c9c42f14c742c6fcb180b7a3cc1bab1655ac46
|
projections/simpleexpr.py
|
projections/simpleexpr.py
|
import numpy as np
import numpy.ma as ma
import projections.r2py.reval as reval
import projections.r2py.rparser as rparser
class SimpleExpr():
def __init__(self, name, expr):
self.name = name
self.tree = reval.make_inputs(rparser.parse(expr))
lokals = {}
exec(reval.to_py(self.tree, name), lokals)
self.func = lokals[name + '_st']
@property
def syms(self):
return reval.find_inputs(self.tree)
def eval(self, df):
try:
res = self.func(df)
except KeyError as e:
print("Error: input '%s' not defined" % e)
raise e
if not isinstance(res, np.ndarray):
res = ma.masked_array(np.full(tuple(df.values())[0].shape, res,
dtype=np.float32))
return res
|
import numpy as np
import numpy.ma as ma
import projections.r2py.reval as reval
import projections.r2py.rparser as rparser
class SimpleExpr():
def __init__(self, name, expr):
self.name = name
self.tree = reval.make_inputs(rparser.parse(expr))
lokals = {}
exec(reval.to_py(self.tree, name), lokals)
self.func = lokals[name + '_st']
@property
def syms(self):
return reval.find_inputs(self.tree)
def eval(self, df):
try:
res = self.func(df)
except KeyError as e:
print("Error: input '%s' not defined" % e)
raise e
if not isinstance(res, np.ndarray):
arrays = filter(lambda v: isinstance(v, np.ndarray), df.values())
res = ma.masked_array(np.full(tuple(arrays)[0].shape, res,
dtype=np.float32))
return res
|
Improve determination of array shape for constant expressions
|
Improve determination of array shape for constant expressions
When Evaluating a constant expression, I only used to look at the first
column in the df dictionary. But that could also be a constant or
expression. So look instead at all columns and find the first numpy
array.
|
Python
|
apache-2.0
|
ricardog/raster-project,ricardog/raster-project,ricardog/raster-project,ricardog/raster-project,ricardog/raster-project
|
632180274abe4cf91f65cf0e84f817dc7124e293
|
zerver/migrations/0108_fix_default_string_id.py
|
zerver/migrations/0108_fix_default_string_id.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from __future__ import unicode_literals
from django.db import migrations
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
|
Add imports needed for new migration.
|
mypy: Add imports needed for new migration.
|
Python
|
apache-2.0
|
eeshangarg/zulip,Galexrt/zulip,shubhamdhama/zulip,punchagan/zulip,Galexrt/zulip,tommyip/zulip,brainwane/zulip,tommyip/zulip,zulip/zulip,brainwane/zulip,punchagan/zulip,eeshangarg/zulip,rht/zulip,timabbott/zulip,rht/zulip,zulip/zulip,andersk/zulip,synicalsyntax/zulip,brainwane/zulip,rht/zulip,eeshangarg/zulip,rishig/zulip,andersk/zulip,timabbott/zulip,amanharitsh123/zulip,synicalsyntax/zulip,timabbott/zulip,rht/zulip,jackrzhang/zulip,rishig/zulip,mahim97/zulip,Galexrt/zulip,kou/zulip,brockwhittaker/zulip,kou/zulip,eeshangarg/zulip,andersk/zulip,dhcrzf/zulip,hackerkid/zulip,brockwhittaker/zulip,verma-varsha/zulip,showell/zulip,brainwane/zulip,kou/zulip,shubhamdhama/zulip,tommyip/zulip,andersk/zulip,brainwane/zulip,eeshangarg/zulip,hackerkid/zulip,brainwane/zulip,eeshangarg/zulip,zulip/zulip,andersk/zulip,brainwane/zulip,verma-varsha/zulip,jackrzhang/zulip,timabbott/zulip,zulip/zulip,synicalsyntax/zulip,timabbott/zulip,rht/zulip,synicalsyntax/zulip,Galexrt/zulip,zulip/zulip,dhcrzf/zulip,rht/zulip,showell/zulip,rishig/zulip,synicalsyntax/zulip,mahim97/zulip,kou/zulip,mahim97/zulip,verma-varsha/zulip,showell/zulip,Galexrt/zulip,Galexrt/zulip,jackrzhang/zulip,eeshangarg/zulip,synicalsyntax/zulip,hackerkid/zulip,punchagan/zulip,tommyip/zulip,shubhamdhama/zulip,jackrzhang/zulip,amanharitsh123/zulip,tommyip/zulip,jackrzhang/zulip,amanharitsh123/zulip,jackrzhang/zulip,dhcrzf/zulip,rishig/zulip,dhcrzf/zulip,dhcrzf/zulip,mahim97/zulip,shubhamdhama/zulip,shubhamdhama/zulip,rishig/zulip,kou/zulip,hackerkid/zulip,mahim97/zulip,zulip/zulip,tommyip/zulip,kou/zulip,showell/zulip,shubhamdhama/zulip,punchagan/zulip,rishig/zulip,brockwhittaker/zulip,punchagan/zulip,andersk/zulip,synicalsyntax/zulip,amanharitsh123/zulip,dhcrzf/zulip,kou/zulip,rht/zulip,brockwhittaker/zulip,zulip/zulip,rishig/zulip,verma-varsha/zulip,punchagan/zulip,hackerkid/zulip,showell/zulip,brockwhittaker/zulip,hackerkid/zulip,amanharitsh123/zulip,mahim97/zulip,showell/zulip,Galexrt/zulip,hackerkid/zulip,shubhamdhama/zulip,verma-varsha/zulip,amanharitsh123/zulip,brockwhittaker/zulip,timabbott/zulip,verma-varsha/zulip,showell/zulip,punchagan/zulip,tommyip/zulip,jackrzhang/zulip,andersk/zulip,timabbott/zulip,dhcrzf/zulip
|
91ef89371f7ba99346ba982a3fdb7fc2105a9840
|
superdesk/users/__init__.py
|
superdesk/users/__init__.py
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from .users import RolesResource, UsersResource
from .services import DBUsersService, RolesService, is_admin # noqa
import superdesk
def init_app(app):
endpoint_name = 'users'
service = DBUsersService(endpoint_name, backend=superdesk.get_backend())
UsersResource(endpoint_name, app=app, service=service)
endpoint_name = 'roles'
service = RolesService(endpoint_name, backend=superdesk.get_backend())
RolesResource(endpoint_name, app=app, service=service)
superdesk.privilege(name='users', label='User Management', description='User can manage users.')
superdesk.privilege(name='roles', label='Roles Management', description='User can manage roles.')
# Registering with intrinsic privileges because: A user should be allowed to update their own profile.
superdesk.intrinsic_privilege(resource_name='users', method=['PATCH'])
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from .users import RolesResource, UsersResource
from .services import UsersService, DBUsersService, RolesService, is_admin # noqa
import superdesk
def init_app(app):
endpoint_name = 'users'
service = DBUsersService(endpoint_name, backend=superdesk.get_backend())
UsersResource(endpoint_name, app=app, service=service)
endpoint_name = 'roles'
service = RolesService(endpoint_name, backend=superdesk.get_backend())
RolesResource(endpoint_name, app=app, service=service)
superdesk.privilege(name='users', label='User Management', description='User can manage users.')
superdesk.privilege(name='roles', label='Roles Management', description='User can manage roles.')
# Registering with intrinsic privileges because: A user should be allowed to update their own profile.
superdesk.intrinsic_privilege(resource_name='users', method=['PATCH'])
|
Make UsersResource reusable for LDAP
|
Make UsersResource reusable for LDAP
|
Python
|
agpl-3.0
|
ioanpocol/superdesk-core,plamut/superdesk-core,akintolga/superdesk-core,ancafarcas/superdesk-core,ancafarcas/superdesk-core,nistormihai/superdesk-core,superdesk/superdesk-core,sivakuna-aap/superdesk-core,superdesk/superdesk-core,mdhaman/superdesk-core,petrjasek/superdesk-core,mdhaman/superdesk-core,mugurrus/superdesk-core,mugurrus/superdesk-core,mdhaman/superdesk-core,superdesk/superdesk-core,ioanpocol/superdesk-core,sivakuna-aap/superdesk-core,marwoodandrew/superdesk-core,plamut/superdesk-core,superdesk/superdesk-core,petrjasek/superdesk-core,ioanpocol/superdesk-core,marwoodandrew/superdesk-core,hlmnrmr/superdesk-core,akintolga/superdesk-core,nistormihai/superdesk-core,hlmnrmr/superdesk-core,mugurrus/superdesk-core,petrjasek/superdesk-core,petrjasek/superdesk-core
|
05e19922a5a0f7268ce1a34e25e5deb8e9a2f5d3
|
sfmtools.py
|
sfmtools.py
|
""" Utility functions for PhotoScan processing """
import os, sys
import PhotoScan
def align_and_clean_photos(chunk):
ncameras = len(chunk.cameras)
for frame in chunk.frames:
frame.matchPhotos()
chunk.alignCameras()
for camera in chunk.cameras:
if camera.transform is None:
chunk.remove(camera)
naligned = len(chunk.cameras)
print('%d/%d cameras aligned' % (naligned, ncameras))
def export_dems(resolution, formatstring, pathname)
if not os.path.isdir(pathname):
os.mkdir(pathname)
nchunks = len(PhotoScan.app.document.chunks)
nexported = nchunks
for chunk in PhotoScan.app.document.chunks:
filename = ''.join([pathname, chunk.label.split(' '), '.', formatstring])
exported = chunk.exportDem(filename, format=formatstring, dx=resolution, dy=resolution)
if not exported:
print('Export failed:', chunk.label)
nexported -= 1
print('%d/%d DEMs exported' % (nexported, nchunks))
|
""" Utility functions for PhotoScan processing """
import os, sys
import PhotoScan
def align_and_clean_photos(chunk):
ncameras = len(chunk.cameras)
for frame in chunk.frames:
frame.matchPhotos()
chunk.alignCameras()
for camera in chunk.cameras:
if camera.transform is None:
chunk.remove(camera)
naligned = len(chunk.cameras)
print('%d/%d cameras aligned' % (naligned, ncameras))
def export_dems(resolution, formatstring, pathname)
if not os.path.isdir(pathname):
os.mkdir(pathname)
if pathname[-1:] is not '/':
pathname = ''.join(pathname, '/')
nchunks = len(PhotoScan.app.document.chunks)
nexported = nchunks
for chunk in PhotoScan.app.document.chunks:
filename = ''.join([pathname, chunk.label.split(' '), '.', formatstring])
exported = chunk.exportDem(filename, format=formatstring, dx=resolution, dy=resolution)
if not exported:
print('Export failed:', chunk.label)
nexported -= 1
print('%d/%d DEMs exported' % (nexported, nchunks))
|
Check for trailing slash in path
|
Check for trailing slash in path
|
Python
|
mit
|
rmsare/sfmtools
|
0bd84e74a30806f1e317288aa5dee87b4c669790
|
shcol/config.py
|
shcol/config.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013-2015, Sebastian Linke
# Released under the Simplified BSD license
# (see LICENSE file for details).
"""
Constants that are used by `shcol` in many places. This is meant to modified (if
needed) only *before* running `shcol`, since most of these constants are only
read during initialization of the `shcol`-package.
"""
import logging
import os
import sys
ENCODING = 'utf-8'
ERROR_STREAM = sys.stderr
INPUT_STREAM = sys.stdin
LINE_WIDTH = None
LINESEP = '\n'
LOGGER = logging.getLogger('shol')
MAKE_UNIQUE = False
ON_WINDOWS = 'windows' in os.getenv('os', '').lower()
PY_VERSION = sys.version_info[:2]
SORT_ITEMS = False
SPACING = 2
STARTER = os.path.join('bin', 'shcol' + ('.bat' if ON_WINDOWS else ''))
TERMINAL_STREAM = sys.stdout
UNICODE_TYPE = type(u'')
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013-2015, Sebastian Linke
# Released under the Simplified BSD license
# (see LICENSE file for details).
"""
Constants that are used by `shcol` in many places. This is meant to modified (if
needed) only *before* running `shcol`, since most of these constants are only
read during initialization of the `shcol`-package.
"""
import logging
import os
import sys
ERROR_STREAM = sys.stderr
INPUT_STREAM = sys.stdin
LINE_WIDTH = None
LINESEP = '\n'
LOGGER = logging.getLogger('shol')
MAKE_UNIQUE = False
ON_WINDOWS = 'windows' in os.getenv('os', '').lower()
PY_VERSION = sys.version_info[:2]
SORT_ITEMS = False
SPACING = 2
STARTER = os.path.join('bin', 'shcol' + ('.bat' if ON_WINDOWS else ''))
TERMINAL_STREAM = sys.stdout
UNICODE_TYPE = type(u'')
ENCODING = TERMINAL_STREAM.encoding or 'utf-8'
|
Use output stream's encoding (if any). Blindly using UTF-8 would break output on Windows terminals.
|
Use output stream's encoding (if any).
Blindly using UTF-8 would break output on Windows terminals.
|
Python
|
bsd-2-clause
|
seblin/shcol
|
49a275a268fba520252ee864c39934699c053d13
|
csunplugged/resources/views/barcode_checksum_poster.py
|
csunplugged/resources/views/barcode_checksum_poster.py
|
"""Module for generating Barcode Checksum Poster resource."""
from PIL import Image
from utils.retrieve_query_parameter import retrieve_query_parameter
def resource_image(request, resource):
"""Create a image for Barcode Checksum Poster resource.
Args:
request: HTTP request object (QueryDict).
resource: Object of resource data (Resource).
Returns:
A list of Pillow image objects.
"""
# Retrieve parameters
parameter_options = valid_options()
barcode_length = retrieve_query_parameter(request, "barcode_length", parameter_options["barcode_length"])
image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
image = Image.open(image_path.format(barcode_length))
return image
def subtitle(request, resource):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Args:
request: HTTP request object (QueryDict).
resource: Object of resource data (Resource).
Returns:
Text for subtitle (str).
"""
barcode_length = retrieve_query_parameter(request, "barcode_length")
paper_size = retrieve_query_parameter(request, "paper_size")
return "{} digits - {}".format(barcode_length, paper_size)
def valid_options():
"""Provide dictionary of all valid parameters.
This excludes the header text parameter.
Returns:
All valid options (dict).
"""
return {
"barcode_length": ["12", "13"],
"paper_size": ["a4", "letter"],
}
|
"""Module for generating Barcode Checksum Poster resource."""
from PIL import Image
from utils.retrieve_query_parameter import retrieve_query_parameter
def resource(request, resource):
"""Create a image for Barcode Checksum Poster resource.
Args:
request: HTTP request object (QueryDict).
resource: Object of resource data (Resource).
Returns:
A dictionary for the resource page.
"""
# Retrieve parameters
parameter_options = valid_options()
barcode_length = retrieve_query_parameter(request, "barcode_length", parameter_options["barcode_length"])
image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
image = Image.open(image_path.format(barcode_length))
return {"type": "image", "data": image}
def subtitle(request, resource):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Args:
request: HTTP request object (QueryDict).
resource: Object of resource data (Resource).
Returns:
Text for subtitle (str).
"""
barcode_length = retrieve_query_parameter(request, "barcode_length")
paper_size = retrieve_query_parameter(request, "paper_size")
return "{} digits - {}".format(barcode_length, paper_size)
def valid_options():
"""Provide dictionary of all valid parameters.
This excludes the header text parameter.
Returns:
All valid options (dict).
"""
return {
"barcode_length": ["12", "13"],
"paper_size": ["a4", "letter"],
}
|
Update barcode resource to new resource specification
|
Update barcode resource to new resource specification
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
12f3bb8c82b97496c79948d323f7076b6618293a
|
saleor/graphql/scalars.py
|
saleor/graphql/scalars.py
|
from graphene.types import Scalar
from graphql.language import ast
class AttributesFilterScalar(Scalar):
@staticmethod
def coerce_filter(value):
if isinstance(value, tuple) and len(value) == 2:
return ":".join(value)
serialize = coerce_filter
parse_value = coerce_filter
@staticmethod
def parse_literal(node):
if isinstance(node, ast.StringValue):
splitted = node.value.split(":")
if len(splitted) == 2:
return tuple(splitted)
|
from graphene.types import Scalar
from graphql.language import ast
class AttributesFilterScalar(Scalar):
@staticmethod
def parse_literal(node):
if isinstance(node, ast.StringValue):
splitted = node.value.split(":")
if len(splitted) == 2:
return tuple(splitted)
@staticmethod
def parse_value(value):
if isinstance(value, basestring):
splitted = value.split(":")
if len(splitted) == 2:
return tuple(splitted)
@staticmethod
def serialize(value):
if isinstance(value, tuple) and len(value) == 2:
return ":".join(value)
|
Fix parsing attributes filter values in GraphQL API
|
Fix parsing attributes filter values in GraphQL API
|
Python
|
bsd-3-clause
|
KenMutemi/saleor,KenMutemi/saleor,jreigel/saleor,itbabu/saleor,maferelo/saleor,maferelo/saleor,jreigel/saleor,jreigel/saleor,HyperManTT/ECommerceSaleor,mociepka/saleor,UITools/saleor,UITools/saleor,maferelo/saleor,car3oon/saleor,itbabu/saleor,UITools/saleor,HyperManTT/ECommerceSaleor,car3oon/saleor,car3oon/saleor,UITools/saleor,HyperManTT/ECommerceSaleor,tfroehlich82/saleor,itbabu/saleor,tfroehlich82/saleor,tfroehlich82/saleor,KenMutemi/saleor,mociepka/saleor,UITools/saleor,mociepka/saleor
|
5b8ff4276fbe92d5ccd5fa63fecccc5ff7d571a9
|
quokka/core/tests/test_models.py
|
quokka/core/tests/test_models.py
|
# coding: utf-8
from . import BaseTestCase
from ..models import Channel
class TestCoreModels(BaseTestCase):
def setUp(self):
# Create method was not returning the created object with
# the create() method
self.channel, new = Channel.objects.get_or_create(
title=u'Monkey Island',
description=u'The coolest pirate history ever',
)
def tearDown(self):
self.channel.delete()
def test_channel_fields(self):
self.assertEqual(self.channel.title, u'Monkey Island')
self.assertEqual(self.channel.slug, u'monkey-island')
self.assertEqual(self.channel.description,
u'The coolest pirate history ever')
|
# coding: utf-8
from . import BaseTestCase
from ..models import Channel
class TestChannel(BaseTestCase):
def setUp(self):
# Create method was not returning the created object with
# the create() method
self.parent, new = Channel.objects.get_or_create(
title=u'Father',
)
self.channel, new = Channel.objects.get_or_create(
title=u'Monkey Island',
description=u'The coolest pirate history ever',
parent=self.parent,
tags=['tag1', 'tag2'],
)
def tearDown(self):
self.channel.delete()
def test_channel_fields(self):
self.assertEqual(self.channel.title, u'Monkey Island')
self.assertEqual(self.channel.slug, u'monkey-island')
self.assertEqual(self.channel.long_slug, u'father/monkey-island')
self.assertEqual(self.channel.mpath, u',father,monkey-island,')
self.assertEqual(self.channel.description,
u'The coolest pirate history ever')
self.assertEqual(self.channel.tags, ['tag1', 'tag2'])
self.assertEqual(self.channel.parent, self.parent)
self.assertEqual(unicode(self.channel), u'father/monkey-island')
def test_get_ancestors(self):
self.assertEqual(list(self.channel.get_ancestors()), [self.channel,
self.parent])
def test_get_ancestors_slug(self):
self.assertEqual(self.channel.get_ancestors_slugs(),
[u'father/monkey-island', u'father'])
def test_get_children(self):
self.assertEqual(list(self.parent.get_children()), [self.channel])
def test_get_descendants(self):
self.assertEqual(list(self.parent.get_descendants()),
[self.parent, self.channel])
def test_absolute_urls(self):
self.assertEqual(self.channel.get_absolute_url(),
'/father/monkey-island/')
self.assertEqual(self.parent.get_absolute_url(),
'/father/')
def test_get_canonical_url(self):
self.assertEqual(self.channel.get_canonical_url(),
'/father/monkey-island/')
self.assertEqual(self.parent.get_canonical_url(),
'/father/')
|
Add more core tests / Rename test
|
Add more core tests / Rename test
|
Python
|
mit
|
romulocollopy/quokka,felipevolpone/quokka,lnick/quokka,ChengChiongWah/quokka,felipevolpone/quokka,wushuyi/quokka,wushuyi/quokka,cbeloni/quokka,felipevolpone/quokka,CoolCloud/quokka,ChengChiongWah/quokka,lnick/quokka,romulocollopy/quokka,Ckai1991/quokka,cbeloni/quokka,CoolCloud/quokka,alexandre/quokka,felipevolpone/quokka,fdumpling/quokka,fdumpling/quokka,romulocollopy/quokka,CoolCloud/quokka,maurobaraldi/quokka,maurobaraldi/quokka,romulocollopy/quokka,Ckai1991/quokka,fdumpling/quokka,cbeloni/quokka,ChengChiongWah/quokka,lnick/quokka,ChengChiongWah/quokka,Ckai1991/quokka,wushuyi/quokka,lnick/quokka,fdumpling/quokka,CoolCloud/quokka,alexandre/quokka,Ckai1991/quokka,maurobaraldi/quokka,maurobaraldi/quokka,wushuyi/quokka,cbeloni/quokka
|
3037562643bc1ddaf081a6fa9c757aed4101bb53
|
robots/urls.py
|
robots/urls.py
|
try:
from django.conf.urls import patterns, url
except ImportError:
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns(
'robots.views',
url(r'^$', 'rules_list', name='robots_rule_list'),
)
|
from django.conf.urls import url
from robots.views import rules_list
urlpatterns = [
url(r'^$', rules_list, name='robots_rule_list'),
]
|
Fix warnings about URLconf in Django 1.9
|
Fix warnings about URLconf in Django 1.9
* django.conf.urls.patterns will be removed in Django 1.10
* Passing a dotted path and not a view function will be deprecated in
Django 1.10
|
Python
|
bsd-3-clause
|
jezdez/django-robots,jezdez/django-robots,jscott1971/django-robots,jscott1971/django-robots,jazzband/django-robots,jazzband/django-robots
|
76243416f36a932c16bee93cc753de3d71168f0b
|
manager/__init__.py
|
manager/__init__.py
|
import os
from flask import Flask
from flask.ext.assets import Bundle, Environment
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# Load the app config
app.config.from_object("config.Config")
assets = Environment(app)
db= SQLAlchemy(app)
login = LoginManager()
login.init_app(app)
assets.load_path = [
os.path.join(os.path.dirname(__file__), 'static'),
os.path.join(os.path.dirname(__file__), 'static', 'bower_components')
]
assets.register(
'js_all',
Bundle(
'jquery/dist/jquery.min.js',
'bootstrap/dist/js/bootstrap.min.js',
output='js_all.js'
)
)
assets.register(
'css_all',
Bundle(
'bootswatch/sandstone/bootstrap.css',
'css/ignition.css',
output='css_all.css'
)
)
from manager.views import core
|
import os
from flask import Flask
from flask.ext.assets import Bundle, Environment
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# Load the app config
app.config.from_object("config.Config")
assets = Environment(app)
db= SQLAlchemy(app)
login = LoginManager()
login.init_app(app)
assets.load_path = [
os.path.join(os.path.dirname(__file__), 'static'),
os.path.join(os.path.dirname(__file__), 'static', 'bower_components')
]
assets.register(
'js_all',
Bundle(
'jquery/dist/jquery.min.js',
'bootstrap/dist/js/bootstrap.min.js',
output='js_all.js'
)
)
assets.register(
'css_all',
Bundle(
'bootswatch/sandstone/bootstrap.css',
'css/ignition.css',
output='css_all.css'
)
)
from manager.views import core
from manager.models import users
|
Add user table to module init
|
Add user table to module init
|
Python
|
mit
|
hreeder/ignition,hreeder/ignition,hreeder/ignition
|
aba5ae9736b064fd1e3541de3ef36371d92fc875
|
RandoAmisSecours/admin.py
|
RandoAmisSecours/admin.py
|
# -*- coding: utf-8 -*-
# vim: set ts=4
# Copyright 2013 Rémi Duraffort
# This file is part of RandoAmisSecours.
#
# RandoAmisSecours is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RandoAmisSecours is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with RandoAmisSecours. If not, see <http://www.gnu.org/licenses/>
from django.contrib import admin
from models import *
admin.site.register(FriendRequest)
admin.site.register(Outing)
admin.site.register(Profile)
|
# -*- coding: utf-8 -*-
# vim: set ts=4
# Copyright 2013 Rémi Duraffort
# This file is part of RandoAmisSecours.
#
# RandoAmisSecours is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RandoAmisSecours is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with RandoAmisSecours. If not, see <http://www.gnu.org/licenses/>
from django.contrib import admin
from RandoAmisSecours.models import *
admin.site.register(FriendRequest)
admin.site.register(Outing)
admin.site.register(Profile)
|
Fix import when using python3.3
|
Fix import when using python3.3
|
Python
|
agpl-3.0
|
ivoire/RandoAmisSecours,ivoire/RandoAmisSecours
|
b9e1b34348444c4c51c8fd30ff7882552e21939b
|
temba/msgs/migrations/0094_auto_20170501_1641.py
|
temba/msgs/migrations/0094_auto_20170501_1641.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-01 16:41
from __future__ import unicode_literals
from django.db import migrations, models
import temba.utils.models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0093_populate_translatables'),
]
operations = [
migrations.RemoveField(
model_name='broadcast',
name='language_dict',
),
migrations.RemoveField(
model_name='broadcast',
name='media_dict',
),
migrations.RemoveField(
model_name='broadcast',
name='text',
),
migrations.AlterField(
model_name='broadcast',
name='base_language',
field=models.CharField(help_text='The language used to send this to contacts without a language',
max_length=4),
),
migrations.AlterField(
model_name='broadcast',
name='translations',
field=temba.utils.models.TranslatableField(help_text='The localized versions of the message text',
max_length=640, verbose_name='Translations'),
),
migrations.RenameField(
model_name='broadcast',
old_name='translations',
new_name='text',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-01 16:41
from __future__ import unicode_literals
from django.db import migrations, models
import temba.utils.models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0093_populate_translatables'),
]
operations = [
migrations.AlterField(
model_name='broadcast',
name='base_language',
field=models.CharField(help_text='The language used to send this to contacts without a language',
max_length=4),
),
migrations.AlterField(
model_name='broadcast',
name='translations',
field=temba.utils.models.TranslatableField(help_text='The localized versions of the message text',
max_length=640, verbose_name='Translations'),
),
migrations.RemoveField(
model_name='broadcast',
name='language_dict',
),
migrations.RemoveField(
model_name='broadcast',
name='media_dict',
),
migrations.RemoveField(
model_name='broadcast',
name='text',
),
migrations.RenameField(
model_name='broadcast',
old_name='translations',
new_name='text',
),
]
|
Change order of operations within migration so breaking schema changes come last
|
Change order of operations within migration so breaking schema changes come last
|
Python
|
agpl-3.0
|
pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro
|
3c9da01bee3d157e344f3ad317b777b3977b2e4d
|
account_invoice_start_end_dates/models/account_move.py
|
account_invoice_start_end_dates/models/account_move.py
|
# Copyright 2019 Akretion France <https://akretion.com/>
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, models
from odoo.exceptions import UserError
class AccountMove(models.Model):
_inherit = "account.move"
def action_post(self):
for move in self:
for line in move.line_ids:
if line.product_id and line.product_id.must_have_dates:
if not line.start_date or not line.end_date:
raise UserError(
_(
"Missing Start Date and End Date for invoice "
"line with Product '%s' which has the "
"property 'Must Have Start and End Dates'."
)
% (line.product_id.display_name)
)
return super(AccountMove, self).action_post()
|
# Copyright 2019 Akretion France <https://akretion.com/>
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, models
from odoo.exceptions import UserError
class AccountMove(models.Model):
_inherit = "account.move"
def action_post(self):
for move in self:
for line in move.line_ids:
if line.product_id and line.product_id.must_have_dates:
if not line.start_date or not line.end_date:
raise UserError(
_(
"Missing Start Date and End Date for invoice "
"line with Product '%s' which has the "
"property 'Must Have Start and End Dates'."
)
% (line.product_id.display_name)
)
return super().action_post()
|
Use super() instead of super(classname, self)
|
Use super() instead of super(classname, self)
|
Python
|
agpl-3.0
|
OCA/account-closing,OCA/account-closing
|
d0919465239399f1ab6d65bbd8c42b1b9657ddb6
|
scripts/utils.py
|
scripts/utils.py
|
#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
from collections import OrderedDict
import json
import os
json_load_params = {
'object_pairs_hook': OrderedDict
}
def patch_files_filter(files):
"""Filters all file names that can not be among the content of a patch."""
for i in files:
if i != 'files.js':
yield i
json_dump_params = {
'ensure_ascii': False,
'indent': '\t',
'separators': (',', ': '),
'sort_keys': True
}
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file, **json_load_params)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(obj, file, **json_dump_params)
file.write('\n')
|
#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
from collections import OrderedDict
import json
import os
json_load_params = {
'object_pairs_hook': OrderedDict
}
def patch_files_filter(files):
"""Filters all file names that can not be among the content of a patch."""
for i in files:
if i != 'files.js':
yield i
json_dump_params = {
'ensure_ascii': False,
'indent': '\t',
'separators': (',', ': '),
'sort_keys': True
}
# Default parameters for JSON input and output
def json_load(fn, json_kwargs=json_load_params):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file, **json_kwargs)
def json_store(fn, obj, dirs=[''], json_kwargs=json_dump_params):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(obj, file, **json_kwargs)
file.write('\n')
|
Allow to override the JSON loading and dumping parameters.
|
scripts: Allow to override the JSON loading and dumping parameters.
|
Python
|
unlicense
|
VBChunguk/thcrap,thpatch/thcrap,VBChunguk/thcrap,thpatch/thcrap,thpatch/thcrap,thpatch/thcrap,thpatch/thcrap,VBChunguk/thcrap
|
b0254fd4090c0d17f60a87f3fe5fe28c0382310e
|
scripts/v0to1.py
|
scripts/v0to1.py
|
#!/usr/bin/env python
import sys
import h5py
infiles = sys.argv[1:]
for infile in infiles:
with h5py.File(infile, 'a') as h5:
print(infile)
if 'format-version' in h5.attrs and h5.attrs['format-version'] < 1:
if 'matrix' in h5 and not 'pixels' in h5:
print('renaming matrix --> pixels')
h5['pixels'] = h5['matrix']
if 'scaffolds' in h5 and not 'chroms' in h5:
print('renaming scaffolds --> chroms')
h5['chroms'] = h5['scaffolds']
h5.attrs['format-version'] = 1
|
#!/usr/bin/env python
import sys
import h5py
infiles = sys.argv[1:]
for infile in infiles:
with h5py.File(infile, 'a') as h5:
print(infile)
if 'format-version' in h5.attrs and h5.attrs['format-version'] < 1:
if 'matrix' in h5 and not 'pixels' in h5:
print('renaming matrix --> pixels')
h5['pixels'] = h5['matrix']
del h5['matrix']
if 'scaffolds' in h5 and not 'chroms' in h5:
print('renaming scaffolds --> chroms')
h5['chroms'] = h5['scaffolds']
del h5['scaffolds']
h5.attrs['format-version'] = 1
|
Drop old names from v0
|
Drop old names from v0
|
Python
|
bsd-3-clause
|
mirnylab/cooler
|
43350965e171e6a3bfd89af3dd192ab5c9281b3a
|
vumi/blinkenlights/tests/test_message20110818.py
|
vumi/blinkenlights/tests/test_message20110818.py
|
from twisted.trial.unittest import TestCase
import vumi.blinkenlights.message20110818 as message
import time
class TestMessage(TestCase):
def test_to_dict(self):
now = time.time()
datapoint = ("vumi.w1.a_metric", now, 1.5)
msg = message.MetricMessage()
msg.append(datapoint)
self.assertEqual(msg.to_dict(), {
'datapoints': [datapoint],
})
def test_from_dict(self):
now = time.time()
datapoint = ("vumi.w1.a_metric", now, 1.5)
msgdict = {"datapoints": [datapoint]}
msg = message.MetricMessage.from_dict(msgdict)
self.assertEqual(msg._datapoints, [datapoint])
|
from twisted.trial.unittest import TestCase
import vumi.blinkenlights.message20110818 as message
import time
class TestMessage(TestCase):
def test_to_dict(self):
now = time.time()
datapoint = ("vumi.w1.a_metric", now, 1.5)
msg = message.MetricMessage()
msg.append(datapoint)
self.assertEqual(msg.to_dict(), {
'datapoints': [datapoint],
})
def test_from_dict(self):
now = time.time()
datapoint = ("vumi.w1.a_metric", now, 1.5)
msgdict = {"datapoints": [datapoint]}
msg = message.MetricMessage.from_dict(msgdict)
self.assertEqual(msg._datapoints, [datapoint])
def test_extend(self):
now = time.time()
datapoint = ("vumi.w1.a_metric", now, 1.5)
msg = message.MetricMessage()
msg.extend([datapoint, datapoint, datapoint])
self.assertEqual(msg._datapoints, [
datapoint, datapoint, datapoint])
|
Add test for extend method.
|
Add test for extend method.
|
Python
|
bsd-3-clause
|
TouK/vumi,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,TouK/vumi,harrissoerja/vumi,TouK/vumi,harrissoerja/vumi,harrissoerja/vumi
|
159d09e18dc3b10b7ba3c104a2761f300d50ff28
|
organizer/models.py
|
organizer/models.py
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField()
link = models.URLField()
startup = models.ForeignKey(Startup)
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
|
Add options to NewsLink model fields.
|
Ch03: Add options to NewsLink model fields. [skip ci]
Field options allow us to easily customize behavior of a field.
Verbose name documentation:
https://docs.djangoproject.com/en/1.8/ref/models/fields/#verbose-name
https://docs.djangoproject.com/en/1.8/topics/db/models/#verbose-field-names
The max_length field option is defined in CharField and inherited by all
CharField subclasses (but is typically optional in these subclasses,
unlike CharField itself).
The 255 character limit of the URLField is based on RFC 3986.
https://tools.ietf.org/html/rfc3986
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
761fbb68f72ff8f425ad40670ea908b4959d3292
|
specchio/main.py
|
specchio/main.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import time
from watchdog.observers import Observer
from specchio.handlers import SpecchioEventHandler
from specchio.utils import init_logger, logger
def main():
"""Main function for specchio
Example: specchio test/ user@host:test/
:return: None
"""
if len(sys.argv) == 3:
src_path = sys.argv[1].strip()
dst_ssh, dst_path = sys.argv[2].strip().split(":")
event_handler = SpecchioEventHandler(
src_path=src_path, dst_ssh=dst_path, dst_path=dst_path
)
init_logger()
logger.info("Initialize Specchio")
observer = Observer()
observer.schedule(event_handler, src_path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
else:
print """Specchio is a tool that can help you rsync your file,
it use `.gitignore` in git to discern which file is ignored.
Usage: specchio src/ user@host:dst"""
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import time
from watchdog.observers import Observer
from specchio.handlers import SpecchioEventHandler
from specchio.utils import init_logger, logger
def main():
"""Main function for specchio
Example: specchio test/ user@host:test/
:return: None
"""
if len(sys.argv) == 3:
src_path = sys.argv[1].strip()
dst_ssh, dst_path = sys.argv[2].strip().split(":")
event_handler = SpecchioEventHandler(
src_path=src_path, dst_ssh=dst_path, dst_path=dst_path
)
init_logger()
logger.info("Initialize Specchio")
observer = Observer()
observer.schedule(event_handler, src_path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
else:
print """Usage: specchio src/ user@host:dst/"""
|
Fix the output when there is wrong usage
|
Fix the output when there is wrong usage
|
Python
|
mit
|
brickgao/specchio
|
34960807eac1818a8167ff015e941c42be8827da
|
checkenv.py
|
checkenv.py
|
from colorama import Fore
from pkgutil import iter_modules
def check_import(packagename):
"""
Checks that a package is present. Returns true if it is available, and
false if not available.
"""
if packagename in (name for _, name, _ in iter_modules()):
return True
else:
return False
packages = ['missingno', 'pytest', 'pytest_cov', 'tinydb', 'yaml',
'pandas_summary', 'environment_kernels', 'hypothesis']
try:
for pkg in packages:
assert check_import(pkg)
print(Fore.GREEN + 'All packages found; environment checks passed.')
except AssertionError:
print(Fore.RED + f"{pkg} cannot be found. Please pip or conda install.")
|
from colorama import Fore, Style
from pkgutil import iter_modules
def check_import(packagename):
"""
Checks that a package is present. Returns true if it is available, and
false if not available.
"""
if packagename in (name for _, name, _ in iter_modules()):
return True
else:
return False
packages = ['missingno', 'pytest', 'pytest_cov', 'tinydb', 'yaml',
'pandas_summary', 'environment_kernels', 'hypothesis']
try:
for pkg in packages:
assert check_import(pkg)
print(Fore.GREEN + 'All packages found; environment checks passed.')
except AssertionError:
print(Fore.RED + f"{pkg} cannot be found. Please pip or conda install.")
Style.RESET_ALL
|
Reset colors at the end
|
Reset colors at the end
|
Python
|
mit
|
ericmjl/data-testing-tutorial,ericmjl/data-testing-tutorial
|
dfa752590c944fc07253c01c3d99b640a46dae1d
|
jinja2_time/jinja2_time.py
|
jinja2_time/jinja2_time.py
|
# -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
datetime_format='%Y-%m-%d',
)
def _now(self, timezone, datetime_format):
datetime_format = datetime_format or self.environment.datetime_format
return arrow.now(timezone).strftime(datetime_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
|
# -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(datetime_format='%Y-%m-%d')
def _datetime(self, timezone, operator, offset, datetime_format):
d = arrow.now(timezone)
# Parse replace kwargs from offset and include operator
replace_params = {}
for param in offset.split(','):
interval, value = param.split('=')
replace_params[interval] = float(operator + value)
d = d.replace(**replace_params)
if datetime_format is None:
datetime_format = self.environment.datetime_format
return d.strftime(datetime_format)
def _now(self, timezone, datetime_format):
if datetime_format is None:
datetime_format = self.environment.datetime_format
return arrow.now(timezone).strftime(datetime_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
node = parser.parse_expression()
if parser.stream.skip_if('comma'):
datetime_format = parser.parse_expression()
else:
datetime_format = nodes.Const(None)
if isinstance(node, nodes.Add):
call_method = self.call_method(
'_datetime',
[node.left, nodes.Const('+'), node.right, datetime_format],
lineno=lineno,
)
elif isinstance(node, nodes.Sub):
call_method = self.call_method(
'_datetime',
[node.left, nodes.Const('-'), node.right, datetime_format],
lineno=lineno,
)
else:
call_method = self.call_method(
'_now',
[node, datetime_format],
lineno=lineno,
)
return nodes.Output([call_method], lineno=lineno)
|
Implement parser method for optional offset
|
Implement parser method for optional offset
|
Python
|
mit
|
hackebrot/jinja2-time
|
d68f28581cd3c3f57f7c41adbd65676887a51136
|
opps/channels/tests/test_forms.py
|
opps/channels/tests/test_forms.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from opps.channels.models import Channel
from opps.channels.forms import ChannelAdminForm
class ChannelFormTest(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username=u'test', password='test')
self.site = Site.objects.filter(name=u'example.com').get()
self.parent = Channel.objects.create(name=u'Home', slug=u'home',
description=u'home page',
site=self.site, user=self.user)
def test_init(self):
"""
Test successful init without data
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(isinstance(form.instance, Channel))
self.assertEqual(form.instance.pk, self.parent.pk)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from opps.channels.models import Channel
from opps.channels.forms import ChannelAdminForm
class ChannelFormTest(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username=u'test', password='test')
self.site = Site.objects.filter(name=u'example.com').get()
self.parent = Channel.objects.create(name=u'Home', slug=u'home',
description=u'home page',
site=self.site, user=self.user)
def test_init(self):
"""
Test successful init without data
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(isinstance(form.instance, Channel))
self.assertEqual(form.instance.pk, self.parent.pk)
self.assertEqual(int(form.fields['slug'].widget.attrs['maxlength']), 150)
def test_readonly_slug(self):
"""
Check readonly field slug
"""
form = ChannelAdminForm(instance=self.parent)
self.assertTrue(form.fields['slug'].widget.attrs['readonly'])
form_2 = ChannelAdminForm()
self.assertNotIn('readonly', form_2.fields['slug'].widget.attrs)
|
Add test check readonly field slug of channel
|
Add test check readonly field slug of channel
|
Python
|
mit
|
jeanmask/opps,opps/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,opps/opps
|
b97115679929dfe4f69618f756850617f265048f
|
service/pixelated/config/site.py
|
service/pixelated/config/site.py
|
from twisted.web.server import Site, Request
class AddCSPHeaderRequest(Request):
CSP_HEADER_VALUES = "default-src 'self'; style-src 'self' 'unsafe-inline'"
def process(self):
self.setHeader('Content-Security-Policy', self.CSP_HEADER_VALUES)
self.setHeader('X-Content-Security-Policy', self.CSP_HEADER_VALUES)
self.setHeader('X-Webkit-CSP', self.CSP_HEADER_VALUES)
self.setHeader('X-Frame-Options', 'SAMEORIGIN')
self.setHeader('X-XSS-Protection', '1; mode=block')
self.setHeader('X-Content-Type-Options', 'nosniff')
if self.isSecure():
self.setHeader('Strict-Transport-Security', 'max-age=31536000; includeSubDomains')
Request.process(self)
class PixelatedSite(Site):
requestFactory = AddCSPHeaderRequest
@classmethod
def enable_csp_requests(cls):
cls.requestFactory = AddCSPHeaderRequest
@classmethod
def disable_csp_requests(cls):
cls.requestFactory = Site.requestFactory
|
from twisted.web.server import Site, Request
class AddSecurityHeadersRequest(Request):
CSP_HEADER_VALUES = "default-src 'self'; style-src 'self' 'unsafe-inline'"
def process(self):
self.setHeader('Content-Security-Policy', self.CSP_HEADER_VALUES)
self.setHeader('X-Content-Security-Policy', self.CSP_HEADER_VALUES)
self.setHeader('X-Webkit-CSP', self.CSP_HEADER_VALUES)
self.setHeader('X-Frame-Options', 'SAMEORIGIN')
self.setHeader('X-XSS-Protection', '1; mode=block')
self.setHeader('X-Content-Type-Options', 'nosniff')
if self.isSecure():
self.setHeader('Strict-Transport-Security', 'max-age=31536000; includeSubDomains')
Request.process(self)
class PixelatedSite(Site):
requestFactory = AddSecurityHeadersRequest
@classmethod
def enable_csp_requests(cls):
cls.requestFactory = AddSecurityHeadersRequest
@classmethod
def disable_csp_requests(cls):
cls.requestFactory = Site.requestFactory
|
Rename class to match intent
|
Rename class to match intent
|
Python
|
agpl-3.0
|
pixelated-project/pixelated-user-agent,pixelated/pixelated-user-agent,pixelated-project/pixelated-user-agent,pixelated-project/pixelated-user-agent,pixelated-project/pixelated-user-agent,pixelated/pixelated-user-agent,pixelated-project/pixelated-user-agent,pixelated/pixelated-user-agent,pixelated/pixelated-user-agent,pixelated/pixelated-user-agent
|
4b245b9a859552adb9c19fafc4bdfab5780782f2
|
d1_common_python/src/d1_common/__init__.py
|
d1_common_python/src/d1_common/__init__.py
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""d1_common
Shared code for DataONE Python libraries
"""
__version__ = "2.1.0"
__all__ = [
'const',
'exceptions',
'upload',
'xmlrunner',
'types.exceptions',
'types.dataoneTypes',
'types.dataoneErrors',
'ext.mimeparser',
]
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""d1_common
Shared code for DataONE Python libraries
"""
__version__ = "2.1.0"
# Set default logging handler to avoid "No handler found" warnings.
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
|
Add logging NullHandler to prevent "no handler found" errors
|
Add logging NullHandler to prevent "no handler found" errors
This fixes the issue where "no handler found" errors would be printed by
the library if library clients did not set up logging.
|
Python
|
apache-2.0
|
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
|
af8a96e08029e2dc746cfa1ecbd7a6d02be1c374
|
InvenTree/company/forms.py
|
InvenTree/company/forms.py
|
"""
Django Forms for interacting with Company app
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from InvenTree.forms import HelperForm
from .models import Company
from .models import SupplierPart
from .models import SupplierPriceBreak
class EditCompanyForm(HelperForm):
""" Form for editing a Company object """
class Meta:
model = Company
fields = [
'name',
'description',
'website',
'address',
'phone',
'email',
'contact',
'is_customer',
'is_supplier',
'notes'
]
class CompanyImageForm(HelperForm):
""" Form for uploading a Company image """
class Meta:
model = Company
fields = [
'image'
]
class EditSupplierPartForm(HelperForm):
""" Form for editing a SupplierPart object """
class Meta:
model = SupplierPart
fields = [
'part',
'supplier',
'SKU',
'description',
'manufacturer',
'MPN',
'URL',
'note',
'base_cost',
'multiple',
'packaging',
'lead_time'
]
class EditPriceBreakForm(HelperForm):
""" Form for creating / editing a supplier price break """
class Meta:
model = SupplierPriceBreak
fields = [
'part',
'quantity',
'cost'
]
|
"""
Django Forms for interacting with Company app
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from InvenTree.forms import HelperForm
from .models import Company
from .models import SupplierPart
from .models import SupplierPriceBreak
class EditCompanyForm(HelperForm):
""" Form for editing a Company object """
class Meta:
model = Company
fields = [
'name',
'description',
'website',
'address',
'phone',
'email',
'contact',
'is_customer',
'is_supplier',
'notes'
]
class CompanyImageForm(HelperForm):
""" Form for uploading a Company image """
class Meta:
model = Company
fields = [
'image'
]
class EditSupplierPartForm(HelperForm):
""" Form for editing a SupplierPart object """
class Meta:
model = SupplierPart
fields = [
'part',
'supplier',
'SKU',
'description',
'manufacturer',
'MPN',
'URL',
'note',
'base_cost',
'multiple',
'packaging',
'lead_time'
]
class EditPriceBreakForm(HelperForm):
""" Form for creating / editing a supplier price break """
class Meta:
model = SupplierPriceBreak
fields = [
'part',
'quantity',
'cost',
'currency',
]
|
Add option to edit currency
|
Add option to edit currency
|
Python
|
mit
|
SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree
|
824c46b7d3953e1933a72def4edf058a577487ea
|
byceps/services/attendance/transfer/models.py
|
byceps/services/attendance/transfer/models.py
|
"""
byceps.services.attendance.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from attr import attrib, attrs
from ....services.seating.models.seat import Seat
from ....services.user.models.user import User
@attrs(slots=True) # Not yet frozen b/c models are not immutable.
class Attendee:
user = attrib(type=User)
seat = attrib(type=Seat)
checked_in = attrib(type=bool)
|
"""
byceps.services.attendance.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from ....services.seating.models.seat import Seat
from ....services.user.models.user import User
@dataclass # Not yet frozen b/c models are not immutable.
class Attendee:
user: User
seat: Seat
checked_in: bool
|
Use `dataclass` instead of `attr` for attendance model
|
Use `dataclass` instead of `attr` for attendance model
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
7d52ee6030b2e59a6b6cb6dce78686e8d551281b
|
examples/horizontal_boxplot.py
|
examples/horizontal_boxplot.py
|
"""
Horizontal boxplot with observations
====================================
_thumb: .7, .37
"""
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(style="ticks")
# Initialize the figure
f, ax = plt.subplots(figsize=(7, 6))
ax.set_xscale("log")
# Load the example planets dataset
planets = sns.load_dataset("planets")
# Plot the orbital period with horizontal boxes
sns.boxplot(x="distance", y="method", data=planets,
whis=np.inf, palette="vlag")
# Add in points to show each observation
sns.swarmplot(x="distance", y="method", data=planets,
size=2, color=".3", linewidth=0)
# Make the quantitative axis logarithmic
ax.xaxis.grid(True)
ax.set(ylabel="")
sns.despine(trim=True, left=True)
|
"""
Horizontal boxplot with observations
====================================
_thumb: .7, .37
"""
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(style="ticks")
# Initialize the figure with a logarithmic x axis
f, ax = plt.subplots(figsize=(7, 6))
ax.set_xscale("log")
# Load the example planets dataset
planets = sns.load_dataset("planets")
# Plot the orbital period with horizontal boxes
sns.boxplot(x="distance", y="method", data=planets,
whis=np.inf, palette="vlag")
# Add in points to show each observation
sns.swarmplot(x="distance", y="method", data=planets,
size=2, color=".3", linewidth=0)
# Tweak the visual presentation
ax.xaxis.grid(True)
ax.set(ylabel="")
sns.despine(trim=True, left=True)
|
Fix comments in horizontal boxplot example
|
Fix comments in horizontal boxplot example
|
Python
|
bsd-3-clause
|
mwaskom/seaborn,phobson/seaborn,arokem/seaborn,lukauskas/seaborn,anntzer/seaborn,arokem/seaborn,sauliusl/seaborn,mwaskom/seaborn,phobson/seaborn,petebachant/seaborn,anntzer/seaborn,lukauskas/seaborn
|
ca4be3892ec0c1b5bc337a9fae10503b5f7f765a
|
bika/lims/browser/validation.py
|
bika/lims/browser/validation.py
|
from Products.Archetypes.browser.validation import InlineValidationView as _IVV
from Acquisition import aq_inner
from Products.CMFCore.utils import getToolByName
import json
SKIP_VALIDATION_FIELDTYPES = ('image', 'file', 'datetime', 'reference')
class InlineValidationView(_IVV):
def __call__(self, uid, fname, value):
'''Validate a given field. Return any error messages.
'''
res = {'errmsg': ''}
if value not in self.request:
return json.dumps(res)
rc = getToolByName(aq_inner(self.context), 'reference_catalog')
instance = rc.lookupObject(uid)
# make sure this works for portal_factory items
if instance is None:
instance = self.context
field = instance.getField(fname)
if field and field.type not in SKIP_VALIDATION_FIELDTYPES:
return super(InlineValidationView, self).__call__(uid, fname, value)
self.request.response.setHeader('Content-Type', 'application/json')
return json.dumps(res)
|
from Products.Archetypes.browser.validation import InlineValidationView as _IVV
from Acquisition import aq_inner
from Products.CMFCore.utils import getToolByName
import json
SKIP_VALIDATION_FIELDTYPES = ('image', 'file', 'datetime', 'reference')
class InlineValidationView(_IVV):
def __call__(self, uid, fname, value):
'''Validate a given field. Return any error messages.
'''
res = {'errmsg': ''}
rc = getToolByName(aq_inner(self.context), 'reference_catalog')
instance = rc.lookupObject(uid)
# make sure this works for portal_factory items
if instance is None:
instance = self.context
field = instance.getField(fname)
if field and field.type not in SKIP_VALIDATION_FIELDTYPES:
return super(InlineValidationView, self).__call__(uid, fname, value)
self.request.response.setHeader('Content-Type', 'application/json')
return json.dumps(res)
|
Revert "Inline Validation fails silently if request is malformed"
|
Revert "Inline Validation fails silently if request is malformed"
This reverts commit 723e4eb603568d3a60190d8d292cc335a74b79d5.
|
Python
|
agpl-3.0
|
labsanmartin/Bika-LIMS,veroc/Bika-LIMS,veroc/Bika-LIMS,rockfruit/bika.lims,veroc/Bika-LIMS,labsanmartin/Bika-LIMS,anneline/Bika-LIMS,DeBortoliWines/Bika-LIMS,DeBortoliWines/Bika-LIMS,anneline/Bika-LIMS,DeBortoliWines/Bika-LIMS,anneline/Bika-LIMS,rockfruit/bika.lims,labsanmartin/Bika-LIMS
|
6949339cda8c60b74341f854d9a00aa8abbfe4d5
|
test/level_sets_measure_test.py
|
test/level_sets_measure_test.py
|
__author__ = 'intsco'
import cPickle
from engine.pyIMS.image_measures.level_sets_measure import measure_of_chaos_dict
from unittest import TestCase
import unittest
from os.path import join, realpath, dirname
class MeasureOfChaosDictTest(TestCase):
def setUp(self):
self.rows, self.cols = 65, 65
self.input_fn = join(dirname(realpath(__file__)), 'data/measure_of_chaos_dict_test_input.pkl')
with open(self.input_fn) as f:
self.input_data = cPickle.load(f)
def testMOCBoundaries(self):
for img_d in self.input_data:
if len(img_d) > 0:
assert 0 <= measure_of_chaos_dict(img_d, self.rows, self.cols) <= 1
def testEmptyInput(self):
# print measure_of_chaos_dict({}, self.cols, self.cols)
self.assertRaises(Exception, measure_of_chaos_dict, {}, self.cols, self.cols)
self.assertRaises(Exception, measure_of_chaos_dict, None, self.cols, self.cols)
self.assertRaises(Exception, measure_of_chaos_dict, (), self.cols, self.cols)
self.assertRaises(Exception, measure_of_chaos_dict, [], self.cols, self.cols)
def testMaxInputDictKeyVal(self):
max_key_val = self.rows * self.cols - 1
self.assertRaises(Exception, measure_of_chaos_dict, {max_key_val + 10: 1}, self.rows, self.cols)
if __name__ == '__main__':
unittest.main()
|
import unittest
import numpy as np
from ..image_measures.level_sets_measure import measure_of_chaos, _nan_to_zero
class MeasureOfChaosTest(unittest.TestCase):
def test__nan_to_zero_with_ge_zero(self):
ids = (
np.zeros(1),
np.ones(range(1, 10)),
np.arange(1024 * 1024)
)
for id_ in ids:
before = id_.copy()
_nan_to_zero(id_)
np.testing.assert_array_equal(before, id_)
def test__nan_to_zero_with_negatives(self):
negs = (
np.array([-1]),
-np.arange(1, 1024 * 1024 + 1).reshape((1024, 1024)),
np.linspace(0, -20, 201)
)
for neg in negs:
sh = neg.shape
_nan_to_zero(neg)
np.testing.assert_array_equal(neg, np.zeros(sh))
if __name__ == '__main__':
unittest.main()
|
Implement first tests for _nan_to_zero
|
Implement first tests for _nan_to_zero
- Remove outdated dict test class
- write some test methods
|
Python
|
apache-2.0
|
andy-d-palmer/pyIMS,alexandrovteam/pyImagingMSpec
|
12f4b26d98c3ba765a11efeca3b646b5e9d0a0fb
|
running.py
|
running.py
|
import tcxparser
from configparser import ConfigParser
from datetime import datetime
import urllib.request
import dateutil.parser
t = '1984-06-02T19:05:00.000Z'
# Darksky weather API
# Create config file manually
parser = ConfigParser()
parser.read('slowburn.config', encoding='utf-8')
darksky_key = parser.get('darksky', 'key')
tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx')
run_time = tcx.completed_at
def convert_time_to_unix(time):
parsed_time = dateutil.parser.parse(time)
time_in_unix = parsed_time.strftime('%s')
return time_in_unix
unix_run_time = convert_time_to_unix(run_time)
darksky_request = urllib.request.urlopen("https://api.darksky.net/forecast/" + darksky_key + "/42.3601,-71.0589," + unix_run_time + "?exclude=currently,flags").read()
print(darksky_request)
class getWeather:
def __init__(self, date, time):
self.date = date
self.time = time
def goodbye(self, date):
print("my name is " + date)
|
import tcxparser
from configparser import ConfigParser
from datetime import datetime
import urllib.request
import dateutil.parser
t = '1984-06-02T19:05:00.000Z'
# Darksky weather API
# Create config file manually
parser = ConfigParser()
parser.read('slowburn.config', encoding='utf-8')
darksky_key = parser.get('darksky', 'key')
tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx')
run_time = tcx.completed_at
def convert_time_to_unix(time):
parsed_time = dateutil.parser.parse(time)
time_in_unix = parsed_time.strftime('%s')
return time_in_unix
unix_run_time = convert_time_to_unix(run_time)
darksky_request = urllib.request.urlopen("https://api.darksky.net/forecast/" + darksky_key + "/" + str(tcx.latitude) + "," + str(tcx.longitude) + "," + unix_run_time + "?exclude=currently,flags").read()
print(darksky_request)
class getWeather:
def __init__(self, date, time):
self.date = date
self.time = time
def goodbye(self, date):
print("my name is " + date)
|
Use TCX coordinates to fetch local weather
|
Use TCX coordinates to fetch local weather
|
Python
|
mit
|
briansuhr/slowburn
|
5a7b13e26e94d03bc92600d9c24b3b2e8bc4321c
|
dstar_lib/aprsis.py
|
dstar_lib/aprsis.py
|
import aprslib
import logging
import nmea
class AprsIS:
logger = None
def __init__(self, callsign, password):
self.logger = logging.getLogger(__name__)
self.aprs_connection = aprslib.IS(callsign, password)
self.aprs_connection.connect()
def send_beacon(self, callsign, sfx, message, gpgga):
position = nmea.gpgga_get_position(gpgga)
aprs_frame = callsign+'>APK'+sfx+',DSTAR*:!'+position['lat'] + position['lat_coord'] + '\\'+position['long']+position['long_coord']+'a/A=' + position['height'] + message
self.logger.info("Sending APRS Frame: " + aprs_frame)
try:
self.aprs_connection.sendall(aprs.Frame(aprs_frame))
except:
self.logger.info("Invalid aprs frame: " + aprs_frame)
|
import aprslib
import logging
import nmea
class AprsIS:
logger = None
def __init__(self, callsign, password):
self.logger = logging.getLogger(__name__)
self.aprs_connection = aprslib.IS(callsign, password)
self.aprs_connection.connect()
def send_beacon(self, callsign, sfx, message, gpgga):
position = nmea.gpgga_get_position(gpgga)
aprs_frame = callsign+'>APK'+sfx+',DSTAR*:!'+position['lat'] + position['lat_coord'] + '\\'+position['long']+position['long_coord']+'a/A=' + position['height'] + message
self.logger.info("Sending APRS Frame: " + aprs_frame)
try:
self.aprs_connection.sendall(aprs_frame)
self.logger.info("APRS Beacon sent!")
except Exception, e:
self.logger.info("Invalid aprs frame [%s] - %s" % (aprs_frame, str(e))
|
Fix an issue with the new aprslib
|
Fix an issue with the new aprslib
|
Python
|
mit
|
elielsardanons/dstar_sniffer,elielsardanons/dstar_sniffer
|
132b148ca8701ee867b7a08432a3595a213ce470
|
cedexis/radar/tests/test_cli.py
|
cedexis/radar/tests/test_cli.py
|
import unittest
import types
import cedexis.radar.cli
class TestCommandLineInterface(unittest.TestCase):
def test_main(self):
self.assertTrue(isinstance(cedexis.radar.cli.main, types.FunctionType))
|
import unittest
from unittest.mock import patch, MagicMock, call
import types
from pprint import pprint
import cedexis.radar.cli
class TestCommandLineInterface(unittest.TestCase):
def test_main(self):
self.assertTrue(isinstance(cedexis.radar.cli.main, types.FunctionType))
@patch('logging.getLogger')
@patch('argparse.ArgumentParser')
@patch('cedexis.radar.run_session')
@patch('time.sleep')
def test_config_file_with_cli_params(self, mock_sleep, mock_run_session,
mock_ArgumentParser, mock_getLogger):
args = make_default_args()
args.continuous = True
args.max_runs = 3
args.repeat_delay = 60
mock_parser = MagicMock()
mock_parser.parse_args.return_value = args
mock_ArgumentParser.return_value = mock_parser
cedexis.radar.cli.main()
# Assert
# print(mock_run_session.call_args)
self.assertEqual(
mock_run_session.call_args_list,
[
call(1, 12345, 'sandbox', False, None, None, False, None),
call(1, 12345, 'sandbox', False, None, None, False, None),
call(1, 12345, 'sandbox', False, None, None, False, None)
])
# print(mock_sleep.call_args)
self.assertEqual(mock_sleep.call_args_list, [call(60),call(60)])
def make_default_args():
args = lambda: None
args.zone_id = 1
args.customer_id = 12345
args.api_key = 'sandbox'
args.secure = False
args.config_file = 'some config file path'
args.tracer = None
args.provider_id = None
args.report_server = None
args.max_runs = None
args.repeat_delay = None
return args
|
Add unit test for overrides
|
Add unit test for overrides
|
Python
|
mit
|
cedexis/cedexis.radar
|
70f167d3d5a7540fb3521b82ec70bf7c6db09a99
|
tests/test_contrib.py
|
tests/test_contrib.py
|
from __future__ import print_function
import cooler.contrib.higlass as cch
import h5py
import os.path as op
testdir = op.realpath(op.dirname(__file__))
def test_data_retrieval():
data_file = op.join(testdir, 'data', 'dixon2012-h1hesc-hindiii-allreps-filtered.1000kb.multires.cool')
f = h5py.File(data_file, 'r')
data = cch.get_data(f, 0, 0, 3276799999, 0, 3276799999)
assert(data['genome_start1'].iloc[0] == 0.)
assert(data['genome_start2'].iloc[0] == 0.)
data = cch.get_data(f, 4, 0, 256000000, 0, 256000000)
assert(data['genome_start1'].iloc[-1] > 255000000)
assert(data['genome_start1'].iloc[-1] < 256000000)
#print("ge1", data['genome_end1'])
|
from __future__ import print_function
import cooler.contrib.higlass as cch
import cooler.contrib.recursive_agg_onefile as ra
import h5py
import os.path as op
testdir = op.realpath(op.dirname(__file__))
def test_data_retrieval():
data_file = op.join(testdir, 'data', 'dixon2012-h1hesc-hindiii-allreps-filtered.1000kb.multires.cool')
f = h5py.File(data_file, 'r')
data = cch.get_data(f, 0, 0, 3276799999, 0, 3276799999)
assert(data['genome_start1'].iloc[0] == 0.)
assert(data['genome_start2'].iloc[0] == 0.)
data = cch.get_data(f, 4, 0, 256000000, 0, 256000000)
assert(data['genome_start1'].iloc[-1] > 255000000)
assert(data['genome_start1'].iloc[-1] < 256000000)
#print("ge1", data['genome_end1'])
def test_recursive_agg():
infile = op.join(testdir, 'data', 'GM12878-MboI-matrix.2000kb.cool')
outfile = '/tmp/bla.cool'
chunksize = int(10e6)
n_zooms = 2
n_cpus = 8
ra.aggregate(infile, outfile, n_zooms, chunksize, n_cpus)
ra.balance(outfile, n_zooms, chunksize, n_cpus)
|
Add test for recursive agg
|
Add test for recursive agg
|
Python
|
bsd-3-clause
|
mirnylab/cooler
|
27a0165d45f52114ebb65d59cf8e4f84f3232881
|
tests/test_lattice.py
|
tests/test_lattice.py
|
import rml.lattice
def test_create_lattice():
l = rml.lattice.Lattice()
assert(len(l)) == 0
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
|
import rml.lattice
import rml.element
def test_create_lattice():
l = rml.lattice.Lattice()
assert(len(l)) == 0
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice()
element_length = 1.5
e = rml.element.Element('dummy', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() = element_length
|
Test simple lattice with one element.
|
Test simple lattice with one element.
|
Python
|
apache-2.0
|
willrogers/pml,willrogers/pml,razvanvasile/RML
|
7591189527ad05be62a561afadf70b217d725b1f
|
scrapi/processing/osf/__init__.py
|
scrapi/processing/osf/__init__.py
|
from scrapi.processing.osf import crud
from scrapi.processing.osf import collision
from scrapi.processing.base import BaseProcessor
class OSFProcessor(BaseProcessor):
NAME = 'osf'
def process_normalized(self, raw_doc, normalized):
if crud.is_event(normalized):
crud.create_event(normalized)
return
report_hash = collision.generate_report_hash_list(normalized)
resource_hash = collision.generate_resource_hash_list(normalized)
report = collision.detect_collisions(report_hash)
resource = collision.detect_collisions(resource_hash)
if not resource:
resource = crud.create_resource(normalized, resource_hash)
elif not crud.is_claimed(resource):
crud.update_resource(normalized, resource)
if not report:
crud.create_report(normalized, resource, report_hash)
else:
crud.update_report(normalized, report)
|
from scrapi.processing.osf import crud
from scrapi.processing.osf import collision
from scrapi.processing.base import BaseProcessor
class OSFProcessor(BaseProcessor):
NAME = 'osf'
def process_normalized(self, raw_doc, normalized):
if crud.is_event(normalized):
crud.create_event(normalized)
return
normalized['collisionCategory'] = crud.get_collision_cat(normalized['source'])
report_norm = normalized
resource_norm = crud.clean_report(normalized)
report_hash = collision.generate_report_hash_list(report_norm)
resource_hash = collision.generate_resource_hash_list(resource_norm)
report = collision.detect_collisions(report_hash)
resource = collision.detect_collisions(resource_hash)
if not resource:
resource = crud.create_resource(resource_norm, resource_hash)
elif not crud.is_claimed(resource):
crud.update_resource(resource_norm, resource)
if not report:
crud.create_report(report_norm, resource, report_hash)
else:
crud.update_report(report_norm, report)
|
Make sure to keep certain report fields out of resources
|
Make sure to keep certain report fields out of resources
|
Python
|
apache-2.0
|
alexgarciac/scrapi,erinspace/scrapi,icereval/scrapi,mehanig/scrapi,felliott/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,mehanig/scrapi,ostwald/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,felliott/scrapi,jeffreyliu3230/scrapi
|
166e0980fc20b507763395297e8a67c7dcb3a3da
|
examples/neural_network_inference/onnx_converter/small_example.py
|
examples/neural_network_inference/onnx_converter/small_example.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from onnx_coreml import convert
# Step 0 - (a) Define ML Model
class small_model(nn.Module):
def __init__(self):
super(small_model, self).__init__()
self.fc1 = nn.Linear(768, 256)
self.fc2 = nn.Linear(256, 10)
def forward(self, x):
y = F.relu(self.fc1(x))
y = F.softmax(self.fc2(y))
return y
# Step 0 - (b) Create model or Load from dist
model = small_model()
dummy_input = torch.randn(768)
# Step 1 - PyTorch to ONNX model
torch.onnx.export(model, dummy_input, './small_model.onnx')
# Step 2 - ONNX to CoreML model
mlmodel = convert(model='./small_model.onnx', target_ios='13')
# Save converted CoreML model
mlmodel.save('small_model.mlmodel')
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from onnx_coreml import convert
# Step 0 - (a) Define ML Model
class small_model(nn.Module):
def __init__(self):
super(small_model, self).__init__()
self.fc1 = nn.Linear(768, 256)
self.fc2 = nn.Linear(256, 10)
def forward(self, x):
y = F.relu(self.fc1(x))
y = F.softmax(self.fc2(y))
return y
# Step 0 - (b) Create model or Load from dist
model = small_model()
dummy_input = torch.randn(768)
# Step 1 - PyTorch to ONNX model
torch.onnx.export(model, dummy_input, './small_model.onnx')
# Step 2 - ONNX to CoreML model
mlmodel = convert(model='./small_model.onnx', minimum_ios_deployment_target='13')
# Save converted CoreML model
mlmodel.save('small_model.mlmodel')
|
Update the example with latest interface
|
Update the example with latest interface
Update the example with the latest interface of the function "convert"
|
Python
|
bsd-3-clause
|
apple/coremltools,apple/coremltools,apple/coremltools,apple/coremltools
|
967ea6b083437cbe6c87b173567981e1ae41fefc
|
project/wsgi/tomodev.py
|
project/wsgi/tomodev.py
|
"""
WSGI config for project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings.tomodev")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.handlers.wsgi import WSGIHandler
application = WSGIHandler()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
"""
WSGI config for project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import site
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings.tomodev")
base_path = os.path.abspath("../..")
site.addsitedir(base_path)
site.addsitedir(os.path.join(base_path, 'virtualenv/lib/python2.6/site-packages'))
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.handlers.wsgi import WSGIHandler
application = WSGIHandler()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
Set Python path inside WSGI application
|
Set Python path inside WSGI application
|
Python
|
agpl-3.0
|
ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo
|
2198ae847cb257d210c043bb08d52206df749a24
|
Jeeves/jeeves.py
|
Jeeves/jeeves.py
|
import discord
import asyncio
import random
import configparser
import json
def RunBot(config_file):
config = configparser.ConfigParser()
config.read(config_file)
client = discord.Client()
@client.event
async def on_ready():
print('------')
print('Logged in as %s (%s)' % (client.user.name, client.user.id))
print('------')
@client.event
async def on_message(message):
if message.channel.id == "123410749765713920":
if message.content.startswith('-knugen'):
await client.send_message(message.channel, random.choice(knugenLinks))
client.run(config['Bot']['token'])
if __name__ == "__main__":
print("Please use the start.py script in the root directory instead")
|
import discord
import asyncio
import random
import configparser
import json
def RunBot(config_file):
config = configparser.ConfigParser()
config.read(config_file)
client = discord.Client()
@client.event
async def on_ready():
print('------')
print('Logged in as %s (%s)' % (client.user.name, client.user.id))
print('------')
@client.event
async def on_message(message):
if message.channel.id == "123410749765713920":
if message.content.startswith('-knugen'):
with open('config/data.json') as data_file:
data = json.loads(data_file.read())
await client.send_message(message.channel, random.choice(data['knugenLinks']))
client.run(config['Bot']['token'])
if __name__ == "__main__":
print("Please use the start.py script in the root directory instead")
|
Change knugen command to use array in config/data.json instead of hardcoded array.
|
Change knugen command to use array in config/data.json instead of hardcoded array.
|
Python
|
mit
|
havokoc/MyManJeeves
|
74728ef66fd13bfd7ad01f930114c2375e752d13
|
examples/skel.py
|
examples/skel.py
|
try:
import _path
except NameError:
pass
import pygame
import spyral
import sys
SIZE = (640, 480)
BG_COLOR = (0, 0, 0)
class Game(spyral.Scene):
"""
A Scene represents a distinct state of your game. They could be menus,
different subgames, or any other things which are mostly distinct.
A Scene should define two methods, update and render.
"""
def __init__(self):
"""
The __init__ message for a scene should set up the camera(s) for the
scene, and other structures which are needed for the scene
"""
spyral.Scene.__init__(self, SIZE)
self.register("system.quit", sys.exit)
print spyral.widgets
spyral.widgets.register('Testing', 'a')
print spyral.widgets.Testing(1,2,3)
print spyral.widgets.TextInputWidget
if __name__ == "__main__":
spyral.director.init(SIZE) # the director is the manager for your scenes
spyral.director.run(scene=Game()) # This will run your game. It will not return.
|
try:
import _path
except NameError:
pass
import pygame
import spyral
import sys
SIZE = (640, 480)
BG_COLOR = (0, 0, 0)
class Game(spyral.Scene):
"""
A Scene represents a distinct state of your game. They could be menus,
different subgames, or any other things which are mostly distinct.
A Scene should define two methods, update and render.
"""
def __init__(self):
"""
The __init__ message for a scene should set up the camera(s) for the
scene, and other structures which are needed for the scene
"""
spyral.Scene.__init__(self, SIZE)
self.register("system.quit", sys.exit)
if __name__ == "__main__":
spyral.director.init(SIZE) # the director is the manager for your scenes
spyral.director.run(scene=Game()) # This will run your game. It will not return.
|
Remove some accidentally committed code.
|
Remove some accidentally committed code.
|
Python
|
lgpl-2.1
|
platipy/spyral
|
b881247b182a45774ed494146904dcf2b1826d5e
|
sla_bot.py
|
sla_bot.py
|
import discord
import asyncio
client = discord.Client()
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
await client.send_message(message.channel, 'Hello world!')
client.run('paste_token_here')
|
import asyncio
import discord
from discord.ext import commands
bot = commands.Bot(command_prefix='!', description='test')
@bot.event
async def on_ready():
print('Logged in as')
print(bot.user.name)
print(bot.user.id)
print('------')
@bot.command()
async def test():
await bot.say('Hello World!')
bot.run('paste_token_here')
|
Switch to Bot object instead of Client
|
Switch to Bot object instead of Client
Better reflects examples in discord.py project
|
Python
|
mit
|
EsqWiggles/SLA-bot,EsqWiggles/SLA-bot
|
e4d746ba6c5b842529c9dafb31a90bdd31fee687
|
performanceplatform/__init__.py
|
performanceplatform/__init__.py
|
# Namespace package: https://docs.python.org/2/library/pkgutil.html
try:
import pkg_resources
pkg_resources.declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
|
__import__('pkg_resources').declare_namespace(__name__)
|
Fix namespacing for PyPi installs
|
Fix namespacing for PyPi installs
See https://github.com/alphagov/performanceplatform-client/pull/5
|
Python
|
mit
|
alphagov/performanceplatform-collector,alphagov/performanceplatform-collector,alphagov/performanceplatform-collector
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.