commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
cc7253020251bc96d7d7f22a991b094a60bbc104
|
startServers.py
|
startServers.py
|
import sys
import time
import subprocess
import psutil
def startServer(command):
if sys.platform.startswith('win'):
return psutil.Popen(command, creationflags=subprocess.CREATE_NEW_CONSOLE)
else:
linuxCommand = 'xterm -hold -e "%s"' % command
return psutil.Popen(linuxCommand, shell=True)
def main(baseCommand, startingPort, count):
servers = {}
for i in range(1,count + 1):
command = baseCommand + ' ' + str(startingPort + i)
servers[i] = {
'command': command,
'process': startServer(command),
}
time.sleep(3)
while True:
for i, server in servers.iteritems():
if not server['process'].is_running():
servers[i]['process'] = startServer(servers[i]['command'])
if __name__ == '__main__':
print sys.argv
main(sys.argv[1], int(sys.argv[2]), int(sys.argv[3]))
|
import sys
import time
import subprocess
def main(baseCommand, startingPort, count):
procs = []
for i in range(1,count + 1):
command = baseCommand + ' ' + str(startingPort + i)
if sys.platform.startswith('win'):
process = subprocess.Popen(command, creationflags=subprocess.CREATE_NEW_CONSOLE)
else:
linuxCommand = 'xterm -hold -e "%s"' % command
process = subprocess.Popen(linuxCommand, shell=True)
procs.append(process)
time.sleep(3)
try:
input('Enter to exit from Python script...')
except:
pass
if __name__ == '__main__':
print sys.argv
main(sys.argv[1], int(sys.argv[2]), int(sys.argv[3]))
|
Revert "keep servers running for fun and profit"
|
Revert "keep servers running for fun and profit"
This reverts commit c574ba41fb609db7a2c75340363fe1a1dcc31399.
|
Python
|
mit
|
IngenuityEngine/coren_proxy,IngenuityEngine/coren_proxy
|
6ac172843dc78ae6af87f00b260ef70f8965b3b7
|
start_server.py
|
start_server.py
|
#!/usr/bin/env python3
# tsuserver3, an Attorney Online server
#
# Copyright (C) 2016 argoneus <argoneuscze@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from server.tsuserver import TsuServer3
# Idiotproof setup
def check_pyyaml():
try:
import yaml
except ModuleNotFoundError:
print("Couldn't import PyYAML. Installing it for you...")
import pip
pip.main(["install", "--user", "pyyaml"])
def main():
server = TsuServer3()
server.start()
if __name__ == '__main__':
check_pyyaml()
main()
|
#!/usr/bin/env python3
# tsuserver3, an Attorney Online server
#
# Copyright (C) 2016 argoneus <argoneuscze@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Idiotproof setup
def check_pyyaml():
try:
import yaml
except ModuleNotFoundError:
print("Couldn't import PyYAML. Installing it for you...")
try:
import pip
pip.main(["install", "--user", "pyyaml"])
except ModuleNotFoundError:
print("Couldn't install it for you, because you don't have pip.")
def main():
from server.tsuserver import TsuServer3
server = TsuServer3()
server.start()
if __name__ == '__main__':
check_pyyaml()
main()
|
Handle case where pip is not found
|
Handle case where pip is not found
|
Python
|
agpl-3.0
|
Attorney-Online-Engineering-Task-Force/tsuserver3,Mariomagistr/tsuserver3
|
e54fa97cb44557454655efd24380da5223a1c5ae
|
tests/random_object_id/random_object_id_test.py
|
tests/random_object_id/random_object_id_test.py
|
import contextlib
import re
import sys
import mock
from six.moves import cStringIO
from random_object_id.random_object_id import \
gen_random_object_id, parse_args, main
@contextlib.contextmanager
def captured_output():
new_out = StringIO()
old_out = sys.stdout
try:
sys.stdout = new_out
yield sys.stdout
finally:
sys.stdout = old_out
def test_gen_random_object_id():
assert re.match('[0-9a-f]{24}', gen_random_object_id())
def test_gen_random_object_id_time():
with mock.patch('time.time') as mock_time:
mock_time.return_value = 1429506585.786924
object_id = gen_random_object_id()
assert re.match('55348a19', object_id)
def test_parse_args():
assert parse_args(['-l']).long_form
def test_main():
with mock.patch('sys.argv', ['random_object_id']):
with captured_output() as output:
main()
assert re.match('[0-9a-f]{24}\n', output.getvalue())
def test_main_l():
with mock.patch('sys.argv', ['random_object_id', '-l']):
with captured_output() as output:
main()
assert re.match('ObjectId\("[0-9a-f]{24}"\)\n', output.getvalue())
|
import contextlib
import re
import sys
import mock
import six
from random_object_id.random_object_id import \
gen_random_object_id, parse_args, main
@contextlib.contextmanager
def captured_output():
old_out = sys.stdout
try:
sys.stdout = six.StringIO()
yield sys.stdout
finally:
sys.stdout = old_out
def test_gen_random_object_id():
assert re.match('[0-9a-f]{24}', gen_random_object_id())
def test_gen_random_object_id_time():
with mock.patch('time.time') as mock_time:
mock_time.return_value = 1429506585.786924
object_id = gen_random_object_id()
assert re.match('55348a19', object_id)
def test_parse_args():
assert parse_args(['-l']).long_form
def test_main():
with mock.patch('sys.argv', ['random_object_id']):
with captured_output() as output:
main()
assert re.match('[0-9a-f]{24}\n', output.getvalue())
def test_main_l():
with mock.patch('sys.argv', ['random_object_id', '-l']):
with captured_output() as output:
main()
assert re.match('ObjectId\("[0-9a-f]{24}"\)\n', output.getvalue())
|
Change how StringIO is imported
|
Change how StringIO is imported
|
Python
|
mit
|
mxr/random-object-id
|
80da397eb882622bc0bf1641bc4ee4e5813cf655
|
lopypi/pypi.py
|
lopypi/pypi.py
|
import re
from urlparse import urlsplit
from bs4 import BeautifulSoup
import requests
from urlparse import urldefrag, urljoin
class PyPI(object):
def __init__(self, index="http://pypi.python.org/simple"):
self._index = index
def list_packages(self):
resp = requests.get(self._index)
soup = BeautifulSoup(package_list)
for link in soup.find_all("a"):
yield link.text
def list_files(self, package):
package_uri = "%s/%s/" % (self._index, package)
resp = requests.get(package_uri)
soup = BeautifulSoup(resp.content)
for link in soup.find_all("a"):
# filter out links to home and download pages
if "rel" in link.attrs:
continue
filename = link.text
# filter out links to other web pages
if urlsplit(filename).scheme:
continue
file_uri, frag = urldefrag(link.attrs['href'])
file_uri = urljoin(package_uri, file_uri)
mo = re.match(r"^md5=([a-fA-F0-9]{32})$", frag)
md5 = mo.group(1) if mo else ""
yield dict(filename=filename,
remote_uri=file_uri,
md5=md5)
|
import re
from urlparse import urlsplit
from bs4 import BeautifulSoup
import requests
from urlparse import urldefrag, urljoin
class PyPI(object):
def __init__(self, index="http://pypi.python.org/simple"):
self._index = index
def list_packages(self):
resp = requests.get(self._index)
soup = BeautifulSoup(resp.content)
for link in soup.find_all("a"):
yield link.text
def list_files(self, package):
package_uri = "%s/%s/" % (self._index, package)
resp = requests.get(package_uri)
soup = BeautifulSoup(resp.content)
for link in soup.find_all("a"):
# filter out links to home and download pages
if "rel" in link.attrs:
continue
filename = link.text
# filter out links to other web pages
if urlsplit(filename).scheme:
continue
file_uri, frag = urldefrag(link.attrs['href'])
file_uri = urljoin(package_uri, file_uri)
mo = re.match(r"^md5=([a-fA-F0-9]{32})$", frag)
md5 = mo.group(1) if mo else ""
yield dict(filename=filename,
remote_uri=file_uri,
md5=md5)
|
Replace reference to previously factored out variable
|
Replace reference to previously factored out variable
|
Python
|
mit
|
bwhmather/LoPyPI,bwhmather/LoPyPI
|
4b6ae0eb113689515ba38e85c33a2ba40e58a163
|
src/minerva/storage/trend/engine.py
|
src/minerva/storage/trend/engine.py
|
from contextlib import closing
from operator import contains
from functools import partial
from minerva.util import k, identity
from minerva.directory import EntityType
from minerva.storage import Engine
from minerva.storage.trend import TableTrendStore
class TrendEngine(Engine):
@staticmethod
def store_cmd(package, filter_package=k(identity)):
"""
Return a function to bind a data source to the store command.
:param package: A DataPackageBase subclass instance
:param filter_package: (TableTrendStore) -> (DataPackage) -> DataPackage
:return: function that binds a data source to the store command
:rtype: (data_source) -> (conn) -> None
"""
def bind_data_source(data_source):
def execute(conn):
entity_type_name = package.entity_type_name()
with closing(conn.cursor()) as cursor:
entity_type = EntityType.get_by_name(entity_type_name)(
cursor
)
trend_store = TableTrendStore.get(
data_source, entity_type, package.granularity
)(cursor)
trend_store.store(
filter_package(trend_store)(package)
).run(conn)
return execute
return bind_data_source
def filter_existing_trends(trend_store):
existing_trend_names = {trend.name for trend in trend_store.trends}
def f(package):
return package.filter_trends(partial(contains, existing_trend_names))
return f
|
from contextlib import closing
from operator import contains
from functools import partial
from minerva.util import k, identity
from minerva.directory import EntityType
from minerva.storage import Engine
from minerva.storage.trend import TableTrendStore
class TrendEngine(Engine):
@staticmethod
def store_cmd(package, transform_package=k(identity)):
"""
Return a function to bind a data source to the store command.
:param package: A DataPackageBase subclass instance
:param transform_package: (TableTrendStore) -> (DataPackage) -> DataPackage
:return: function that binds a data source to the store command
:rtype: (data_source) -> (conn) -> None
"""
def bind_data_source(data_source):
def execute(conn):
entity_type_name = package.entity_type_name()
with closing(conn.cursor()) as cursor:
entity_type = EntityType.get_by_name(entity_type_name)(
cursor
)
trend_store = TableTrendStore.get(
data_source, entity_type, package.granularity
)(cursor)
trend_store.store(
transform_package(trend_store)(package)
).run(conn)
return execute
return bind_data_source
def filter_existing_trends(trend_store):
existing_trend_names = {trend.name for trend in trend_store.trends}
def f(package):
return package.filter_trends(partial(contains, existing_trend_names))
return f
|
Rename parameter filter_package to a more appropriate transform_package
|
Rename parameter filter_package to a more appropriate transform_package
|
Python
|
agpl-3.0
|
hendrikx-itc/minerva,hendrikx-itc/minerva
|
1db5ed3fa2fbb724c480bbf52c1d40c390dc857f
|
examples/example1.py
|
examples/example1.py
|
import fte.encoder
regex = '^(a|b)+$'
fixed_slice = 512
input_plaintext = 'test'
fteObj = fte.encoder.RegexEncoder(regex, fixed_slice)
ciphertext = fteObj.encode(input_plaintext)
output_plaintext = fteObj.decode(ciphertext)
print 'regex='+regex
print 'fixed_slice='+str(fixed_slice)
print 'input_plaintext='+input_plaintext
print 'ciphertext='+ciphertext[:16]+'...'+ciphertext[-16:]
print 'output_plaintext='+output_plaintext[0]
|
import regex2dfa
import fte.encoder
regex = '^(a|b)+$'
fixed_slice = 512
input_plaintext = 'test'
dfa = regex2dfa.regex2dfa(regex)
fteObj = fte.encoder.DfaEncoder(dfa, fixed_slice)
ciphertext = fteObj.encode(input_plaintext)
[output_plaintext, remainder] = fteObj.decode(ciphertext)
print 'input_plaintext='+input_plaintext
print 'ciphertext='+ciphertext[:16]+'...'+ciphertext[-16:]
print 'output_plaintext='+output_plaintext
|
Update example code to represent current FTE API and usage.
|
Update example code to represent current FTE API and usage.
|
Python
|
apache-2.0
|
kpdyer/libfte,kpdyer/libfte
|
f38b117316039042f3c00c73bbb7ceaeb0f2e6e1
|
src/python/pants/core_tasks/noop.py
|
src/python/pants/core_tasks/noop.py
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.task.noop_exec_task import NoopExecTask
class NoopCompile(NoopExecTask):
"""A no-op that provides a product type that can be used to force scheduling."""
@classmethod
def product_types(cls):
return ['ran_compile']
class NoopTest(NoopExecTask):
"""A no-op that provides a product type that can be used to force scheduling."""
@classmethod
def product_types(cls):
return ['ran_tests']
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.task.noop_exec_task import NoopExecTask
class NoopCompile(NoopExecTask):
"""A no-op that provides a product type that can be used to force scheduling.
:API: public
"""
@classmethod
def product_types(cls):
return ['ran_compile']
class NoopTest(NoopExecTask):
"""A no-op that provides a product type that can be used to force scheduling.
:API: public
"""
@classmethod
def product_types(cls):
return ['ran_tests']
|
Add public api markers for core_tasks
|
Add public api markers for core_tasks
The following modules were reviewed and all api's were left as private. As
far as I can tell these modules are not currently used by plugins.
* pants.core_tasks.bash_completion.py
* pants.core_tasks.changed_target_tasks.py
* pants.core_tasks.clean.py
* pants.core_tasks.deferred_sources_mapper.py
* pants.core_tasks.explain_options_task.py
* pants.core_tasks.invalidate.py
* pants.core_tasks.list_goals.py
* pants.core_tasks.pantsd_kill.py
* pants.core_tasks.register.py
* pants.core_tasks.reporting_server_kill.py
* pants.core_tasks.reporting_server_run.py
* pants.core_tasks.roots.py
* pants.core_tasks.run_prep_command.py
* pants.core_tasks.targets_help.py
* pants.core_tasks.what_changed.py
Testing Done:
CI green: https://travis-ci.org/pantsbuild/pants/builds/111549646
Bugs closed: 2710, 2955
Reviewed at https://rbcommons.com/s/twitter/r/3490/
|
Python
|
apache-2.0
|
manasapte/pants,twitter/pants,fkorotkov/pants,jsirois/pants,pantsbuild/pants,peiyuwang/pants,pombredanne/pants,cevaris/pants,fkorotkov/pants,mateor/pants,baroquebobcat/pants,gmalmquist/pants,peiyuwang/pants,fkorotkov/pants,wisechengyi/pants,fkorotkov/pants,UnrememberMe/pants,wisechengyi/pants,ericzundel/pants,ericzundel/pants,landism/pants,ericzundel/pants,cevaris/pants,tdyas/pants,pombredanne/pants,jsirois/pants,gmalmquist/pants,kwlzn/pants,baroquebobcat/pants,gmalmquist/pants,15Dkatz/pants,manasapte/pants,peiyuwang/pants,peiyuwang/pants,twitter/pants,ericzundel/pants,pombredanne/pants,kwlzn/pants,baroquebobcat/pants,jsirois/pants,baroquebobcat/pants,twitter/pants,pombredanne/pants,foursquare/pants,baroquebobcat/pants,lahosken/pants,mateor/pants,tdyas/pants,lahosken/pants,UnrememberMe/pants,fkorotkov/pants,manasapte/pants,foursquare/pants,twitter/pants,lahosken/pants,benjyw/pants,lahosken/pants,wisechengyi/pants,ity/pants,pombredanne/pants,foursquare/pants,ity/pants,mateor/pants,landism/pants,fkorotkov/pants,gmalmquist/pants,ericzundel/pants,wisechengyi/pants,lahosken/pants,mateor/pants,UnrememberMe/pants,15Dkatz/pants,15Dkatz/pants,kwlzn/pants,foursquare/pants,dbentley/pants,dbentley/pants,wisechengyi/pants,pantsbuild/pants,tdyas/pants,benjyw/pants,lahosken/pants,wisechengyi/pants,landism/pants,cevaris/pants,pantsbuild/pants,gmalmquist/pants,UnrememberMe/pants,cevaris/pants,lahosken/pants,benjyw/pants,tdyas/pants,pombredanne/pants,ericzundel/pants,tdyas/pants,baroquebobcat/pants,dbentley/pants,UnrememberMe/pants,benjyw/pants,mateor/pants,UnrememberMe/pants,benjyw/pants,baroquebobcat/pants,ity/pants,foursquare/pants,pantsbuild/pants,manasapte/pants,landism/pants,kwlzn/pants,landism/pants,pantsbuild/pants,landism/pants,twitter/pants,ity/pants,wisechengyi/pants,15Dkatz/pants,peiyuwang/pants,kwlzn/pants,gmalmquist/pants,twitter/pants,cevaris/pants,foursquare/pants,dbentley/pants,twitter/pants,ity/pants,benjyw/pants,mateor/pants,foursquare/pants,manasapte/pants,pantsbuild/pants,cevaris/pants,dbentley/pants,cevaris/pants,fkorotkov/pants,lahosken/pants,mateor/pants,ity/pants,UnrememberMe/pants,15Dkatz/pants,ericzundel/pants,pantsbuild/pants,twitter/pants,UnrememberMe/pants,peiyuwang/pants,dbentley/pants,gmalmquist/pants,ericzundel/pants,landism/pants,peiyuwang/pants,manasapte/pants,tdyas/pants,foursquare/pants,wisechengyi/pants,tdyas/pants,dbentley/pants,landism/pants,benjyw/pants,manasapte/pants,mateor/pants,15Dkatz/pants,ity/pants,peiyuwang/pants,tdyas/pants,UnrememberMe/pants,wisechengyi/pants,baroquebobcat/pants,twitter/pants,fkorotkov/pants,kwlzn/pants,15Dkatz/pants,15Dkatz/pants,baroquebobcat/pants,foursquare/pants,kwlzn/pants,tdyas/pants,pombredanne/pants
|
a4184edab35890673b8b6a67e68a73e6ab7f0b89
|
tests/runtests.py
|
tests/runtests.py
|
#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', )
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', extra_tests)
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
|
Add back in running of extra tests
|
Add back in running of extra tests
|
Python
|
bsd-3-clause
|
jmagnusson/wtforms,cklein/wtforms,Xender/wtforms,pawl/wtforms,Aaron1992/wtforms,pawl/wtforms,subyraman/wtforms,wtforms/wtforms,skytreader/wtforms,hsum/wtforms,Aaron1992/wtforms,crast/wtforms
|
52ddec80be8e2c90807a7b07425a6f260c9e86e0
|
src/zeit/retresco/tests/test_tag.py
|
src/zeit/retresco/tests/test_tag.py
|
# coding: utf8
import unittest
class TagTest(unittest.TestCase):
"""Testing ..tag.Tag."""
def test_from_code_generates_a_tag_object_equal_to_its_source(self):
from ..tag import Tag
tag = Tag(u'Vipraschül', 'Person')
self.assertEqual(tag, Tag.from_code(tag.code))
|
# coding: utf8
import zeit.cms.interfaces
import zeit.retresco.testing
class TagTest(zeit.retresco.testing.FunctionalTestCase):
"""Testing ..tag.Tag."""
def test_from_code_generates_a_tag_object_equal_to_its_source(self):
from ..tag import Tag
tag = Tag(u'Vipraschül', 'Person')
self.assertEqual(tag, Tag.from_code(tag.code))
def test_uniqueId_from_tag_can_be_adapted_to_tag(self):
from ..tag import Tag
tag = Tag(u'Vipraschül', 'Person')
self.assertEqual(tag, zeit.cms.interfaces.ICMSContent(tag.uniqueId))
|
Test that adapter in `zeit.cms` handles unicode escaped uniqueId correctly.
|
ZON-3199: Test that adapter in `zeit.cms` handles unicode escaped uniqueId correctly.
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.retresco
|
0b6e0e09abd007dad504693ca8cae4c7b0222765
|
gamernews/apps/threadedcomments/views.py
|
gamernews/apps/threadedcomments/views.py
|
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted(request):
if request.GET['c']:
comment_id, blob_id = request.GET['c']
comment = Comment.objects.get( pk=comment_id )
blob = Blob.objects.get(pk=blob_id)
if blob:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
|
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted(request):
if request.GET['c']:
blob_id = request.GET['c']
blob = Blob.objects.get(pk=blob_id)
if blob:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
|
Remove name, url and email from comment form
|
Remove name, url and email from comment form
|
Python
|
mit
|
underlost/GamerNews,underlost/GamerNews
|
716c0c4ab08266ce42f65afc0cd4bd8e0ed191e0
|
table_parser.py
|
table_parser.py
|
#!/usr/bin/python
import sys
import latex_table
import table_to_file
if __name__ == "__main__":
# Parse arguments
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("input", help="the LaTeX input file to be parsed")
# Add two mutually exclusive arguments: grouped/ungrouped
parser_grouped = parser.add_mutually_exclusive_group()
parser_grouped.add_argument("--grouped", help="group systematics", action="store_true")
parser_grouped.add_argument("--ungrouped", help="do *not* group systematics", action="store_false")
# Add optional arguments for file output
parser.add_argument("--json", dest="json_file", help="output a JSON file")
parser.add_argument("--tex", dest="tex_file", help="output a LaTeX file")
args = parser.parse_args()
if args.grouped:
print "Grouping systematics is not yet implemented"
sys.exit(1)
table = latex_table.readFromLatex(args.input)
rows = table.getRows()
columns = table.getColumns()
dict = table.getEntries()
if args.json_file:
table_to_file.storeJSON(table, args.json_file)
if args.tex_file:
print "Printing to TEX file not yet implemented"
table_to_file.storeTEX(table, args.tex_file)
sys.exit(1)
for row in rows:
for column in columns:
print "%s %s %s" % (row, column, dict[row][column])
|
#!/usr/bin/python
import sys
import latex_table
import table_to_file
if __name__ == "__main__":
# Parse arguments
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("input", help="the LaTeX input file to be parsed")
# Add two mutually exclusive arguments: grouped/ungrouped
parser_grouped = parser.add_mutually_exclusive_group()
parser_grouped.add_argument("--grouped", help="group systematics", action="store_true")
parser_grouped.add_argument("--ungrouped", help="do *not* group systematics", action="store_false")
# Add optional arguments for file output
parser.add_argument("--json", dest="json_file", help="output a JSON file")
parser.add_argument("--tex", dest="tex_file", help="output a LaTeX file")
args = parser.parse_args()
if args.grouped:
print "Grouping systematics is not yet implemented"
sys.exit(1)
table = latex_table.readFromLatex(args.input)
rows = table.getRows()
columns = table.getColumns()
dict = table.getEntries()
if args.json_file:
table_to_file.storeJSON(table, args.json_file)
if args.tex_file:
table_to_file.storeTEX(table, args.tex_file)
for row in rows:
for column in columns:
print "%s %s %s" % (row, column, dict[row][column])
|
Remove exit statement and error message for tex output
|
Remove exit statement and error message for tex output
|
Python
|
mit
|
knutzk/parse_latex_table
|
a0fa76a7aeb3dba3b358abeab95fc03a90a0e8b6
|
members/views.py
|
members/views.py
|
from django.shortcuts import render
def homepage(request):
return render(request, "index.html", {})
|
from django.shortcuts import render
from django.http import HttpResponse
from .models import User
def homepage(request):
return render(request, "index.html", {})
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return HttpResponse(json_data, mimetype='application/json')
|
Add view for searching users and return json format
|
Add view for searching users and return json format
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
4e31496e1d9e0b2af2ce8aa4bb58baa86f352521
|
flake8_docstrings.py
|
flake8_docstrings.py
|
# -*- coding: utf-8 -*-
"""pep257 docstrings convention needs error code and class parser for be
included as module into flake8
"""
import io
import pep8
import pep257
__version__ = '0.2.2'
class pep257Checker(object):
"""flake8 needs a class to check python file."""
name = 'pep257'
version = __version__
STDIN_NAMES = set(['stdin', '-', '(none)', None])
def __init__(self, tree, filename='(none)', builtins=None):
self.tree = tree
self.filename = filename
self.source = self.load_source()
self.checker = pep257.PEP257Checker()
def run(self):
"""Use directly check() api from pep257."""
for error in self.checker.check_source(self.source, self.filename):
# Ignore AllError, Environment error.
if isinstance(error, pep257.Error):
# NOTE(sigmavirus24): Fixes GitLab#3
message = '%s %s' % (error.code, error.short_desc)
yield (error.line, 0, message, type(self))
def load_source(self):
if self.filename in self.STDIN_NAMES:
self.filename = 'stdin'
self.source = pep8.stdin_get_value()
else:
with io.open(self.filename, encoding='utf-8') as fd:
self.source = fd.read()
|
# -*- coding: utf-8 -*-
"""Implementation of pep257 integration with Flake8.
pep257 docstrings convention needs error code and class parser for be
included as module into flake8
"""
import io
import pep8
import pep257
__version__ = '0.2.2'
class pep257Checker(object):
"""Flake8 needs a class to check python file."""
name = 'flake8-docstrings'
version = __version__ + ', pep257: {0}'.format(pep257.__version__)
STDIN_NAMES = set(['stdin', '-', '(none)', None])
def __init__(self, tree, filename='(none)', builtins=None):
"""Placeholder."""
self.tree = tree
self.filename = filename
self.source = self.load_source()
self.checker = pep257.PEP257Checker()
def run(self):
"""Use directly check() api from pep257."""
for error in self.checker.check_source(self.source, self.filename):
# Ignore AllError, Environment error.
if isinstance(error, pep257.Error):
# NOTE(sigmavirus24): Fixes GitLab#3
message = '%s %s' % (error.code, error.short_desc)
yield (error.line, 0, message, type(self))
def load_source(self):
"""Load the source for the specified file."""
if self.filename in self.STDIN_NAMES:
self.filename = 'stdin'
self.source = pep8.stdin_get_value()
else:
with io.open(self.filename, encoding='utf-8') as fd:
self.source = fd.read()
|
Fix up a couple of minor issues
|
Fix up a couple of minor issues
|
Python
|
mit
|
PyCQA/flake8-docstrings
|
e5ef9ca9c089ce1da4ff363d0c5a5090785ae0c5
|
test_scraper.py
|
test_scraper.py
|
from scraper import search_CL
from scraper import read_search_results
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
def test_read_search_result():
test_body, test_encoding = read_search_results()
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
|
from scraper import search_CL
from scraper import read_search_results
from scraper import parse_source
from scraper import extract_listings
import bs4
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100, maxAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
def test_read_search_result():
test_body, test_encoding = read_search_results()
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
def test_parse_source():
test_body, test_encoding = read_search_results()
test_parse = parse_source(test_body, test_encoding)
assert isinstance(test_parse, bs4.BeautifulSoup)
def test_extract_listings():
test_body, test_encoding = read_search_results()
test_parse = parse_source(test_body, test_encoding)
for row in extract_listings(test_parse):
print type(row)
assert isinstance(row, bs4.element.Tag)
|
Add test for extract listings that asserts each listing is a bs4.element.Tag
|
Add test for extract listings that asserts each listing is a bs4.element.Tag
|
Python
|
mit
|
jefrailey/basic-scraper
|
aca158817c21b8baeeb64d7290d61c32a79124f9
|
tests/test_heat_demand.py
|
tests/test_heat_demand.py
|
"""
Test the electricity demand
SPDX-FileCopyrightText: Uwe Krien <krien@uni-bremen.de>
SPDX-FileCopyrightText: Patrik Schönfeldt
SPDX-License-Identifier: MIT
"""
import numpy as np
from demandlib.examples import heat_demand_example
def test_heat_example():
"""Test the results of the heat example."""
ann_demands_per_type = {'efh': 25000,
'mfh': 80000,
'ghd': 140000}
demands = heat_demand_example.heat_example(
ann_demands_per_type=ann_demands_per_type,
testmode=True).sum()
for key in ann_demands_per_type:
assert np.isclose(demands[key], ann_demands_per_type[key])
|
"""
Test the electricity demand
SPDX-FileCopyrightText: Uwe Krien <krien@uni-bremen.de>
SPDX-FileCopyrightText: Patrik Schönfeldt
SPDX-License-Identifier: MIT
"""
import numpy as np
from demandlib.examples import heat_demand_example
def test_heat_example():
"""Test the results of the heat example."""
ann_demands_per_type = {'efh': 25000,
'mfh': 80000,
'ghd': 140000}
demands = heat_demand_example.heat_example(
ann_demands_per_type=ann_demands_per_type,
testmode=True).sum()
for key in ann_demands_per_type:
assert np.isclose(demands[key], ann_demands_per_type[key], rtol=1e-04)
|
Increase tollerance for heat demand test
|
Increase tollerance for heat demand test
|
Python
|
mit
|
oemof/demandlib
|
0060a32b58c7769ac97ac894cbaf6a2eaa1b389f
|
mmiisort/main.py
|
mmiisort/main.py
|
from isort import SortImports
import mothermayi.colors
import mothermayi.errors
import mothermayi.files
def plugin():
return {
'name' : 'isort',
'pre-commit' : pre_commit,
}
def do_sort(filename):
results = SortImports(filename)
return getattr(results, 'in_lines', None) and results.in_lines != results.out_lines
def get_status(had_changes):
return mothermayi.colors.red('unsorted') if had_changes else mothermayi.colors.green('sorted')
def pre_commit(config, staged):
python_files = list(mothermayi.files.python_source(staged))
if not python_files:
return
changes = [do_sort(filename) for filename in python_files]
messages = [get_status(had_change) for had_change in changes]
lines = [" {0:<30} ... {1:<10}".format(filename, message) for filename, message in zip(python_files, messages)]
result = "\n".join(lines)
if any(changes):
raise mothermayi.errors.FailHook(result)
return result
|
from isort import SortImports
import mothermayi.colors
import mothermayi.errors
import mothermayi.files
def plugin():
return {
'name' : 'isort',
'pre-commit' : pre_commit,
}
def do_sort(filename):
results = SortImports(filename, check=True)
return results.incorrectly_sorted
def get_status(had_changes):
return mothermayi.colors.red('unsorted') if had_changes else mothermayi.colors.green('sorted')
def pre_commit(config, staged):
python_files = list(mothermayi.files.python_source(staged))
if not python_files:
return
changes = [do_sort(filename) for filename in python_files]
messages = [get_status(had_change) for had_change in changes]
lines = [" {0:<30} ... {1:<10}".format(filename, message) for filename, message in zip(python_files, messages)]
result = "\n".join(lines)
if any(changes):
raise mothermayi.errors.FailHook(result)
return result
|
Leverage isort's check mode to make our logic simpler
|
Leverage isort's check mode to make our logic simpler
This avoids having to check for in_lines or compare against the
out_lines by just asking for a check and using the results
|
Python
|
mit
|
EliRibble/mothermayi-isort
|
ed76f648f60f96216377e4f12fea7043eaed904b
|
tests/helpers.py
|
tests/helpers.py
|
import virtualbox
def list_machines():
vbox = virtualbox.vb_get_manager()
for machine in vbox.getArray(vbox, "Machines"):
print "Machine '%s' logs in '%s'" % (
machine.name,
machine.logFolder
)
|
import unittest
import virtualbox
class VirtualboxTestCase(unittest.TestCase):
def setUp(self):
self.vbox = virtualbox.vb_get_manager()
def assertMachineExists(self, name, msg=None):
try:
self.vbox.findMachine(name)
except Exception as e:
if msg:
self.fail(msg)
else:
self.fail(e.message)
def assertMachineDoesNotExist(self, name, msg=None):
self.assertRaisesRegexp(Exception, "Could not find a registered machine", self.vbox.findMachine, name)
def list_machines():
vbox = virtualbox.vb_get_manager()
for machine in vbox.getArray(vbox, "Machines"):
print "Machine '%s' logs in '%s'" % (
machine.name,
machine.logFolder
)
|
Create a basic VirtualBoxTestCase with helper assertions
|
Create a basic VirtualBoxTestCase with helper assertions
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
990a3266739e5a4d763dd585f7cb722c0fe2b0f5
|
astroplpython/function/statistic/Maximum.py
|
astroplpython/function/statistic/Maximum.py
|
'''
Created on Feb 6, 2015
@author: thomas
'''
class Maximum (object):
@staticmethod
def calculate (measurement_list):
import numpy as np
'''
Find the maximum measurement value for any list of
measured values.
'''
x = []
for val in measurement_list:
x.append(val.x)
return measurement_list[np.argmax(x)]
def __init__(self, ndarray):
'''
Constructor
'''
|
'''
Created on Feb 6, 2015
@author: thomas
'''
class Maximum (object):
@staticmethod
def calculate (measurement_list):
import numpy as np
'''
Find the maximum measurement value for any list of
measured values.
'''
x = []
for val in measurement_list:
x.append(val.x)
return measurement_list[np.argmax(x)]
|
Remove initializer..this is a 'static' class which
|
Remove initializer..this is a 'static' class which
we are using functional approach with, e.g. no instances
if we can help it..
|
Python
|
mit
|
brianthomas/astroplpython,brianthomas/astroplpython
|
554ef995f8c4ba42d00482480bf291bac2fd96e1
|
utils/database.py
|
utils/database.py
|
import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [__import__("time").time(), ""]
}
failed = False
try:
user = self[channel][nick]
except KeyError:
failed = True
self[channel][nick] = temp
if not failed:
del temp['seen']
user.update(temp)
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
|
import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [__import__("time").time(), ""]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
|
Reduce code to a simpler form that checks if a user is already in the DB
|
Reduce code to a simpler form that checks if a user is already in the DB
|
Python
|
mit
|
wolfy1339/Python-IRC-Bot
|
12f3cc403f6ba0be957d1fb18253fb7529009764
|
moss/plotting.py
|
moss/plotting.py
|
import matplotlib.pyplot as plt
def grid_axes_labels(f, xlabel=None, ylabel=None, **kws):
axes = f.axes
plt.setp(axes.flat, xlabel="", ylabel="")
if xlabel is not None:
for ax in axes[-1]:
ax.set_xlabel(xlabel, **kws)
if ylabel is not None:
for ax in axes[0]:
ax.set_ylabel(ylabel, **kws)
|
import matplotlib.pyplot as plt
def grid_axes_labels(axes, xlabel=None, ylabel=None, **kws):
plt.setp(axes.flat, xlabel="", ylabel="")
if xlabel is not None:
for ax in axes[-1]:
ax.set_xlabel(xlabel, **kws)
if ylabel is not None:
for ax in axes[0]:
ax.set_ylabel(ylabel, **kws)
|
Use matrix of axes not figure
|
Use matrix of axes not figure
|
Python
|
bsd-3-clause
|
mwaskom/moss,mwaskom/moss
|
02e03748e66ebf516a4a9b24f52563362e6bb895
|
command_line/scale_down_images.py
|
command_line/scale_down_images.py
|
from __future__ import division
def nproc():
from libtbx.introspection import number_of_processors
return number_of_processors(return_value_if_unknown=-1)
def joiner(args):
from dials.util.scale_down_image import scale_down_image
scale_down_image(*args)
def scale_down_images(in_template, out_template, start, end, scale_factor):
from multiprocessing import Pool
jobs = [(in_template % j, out_template % j, scale_factor) for j in
range(start, end + 1)]
pool = Pool(processes=nproc())
result = pool.map_async(joiner, jobs)
result.get()
return result
if __name__ == '__main__':
import sys
if len(sys.argv) != 6:
raise RuntimeError, '%s in_\%04d.cbf out_\%04d.cbf start end scale' % \
sys.argv[0]
in_template = sys.argv[1]
out_template = sys.argv[2]
start = int(sys.argv[3])
end = int(sys.argv[4])
scale_factor = float(sys.argv[5])
scale_down_images(in_template, out_template, start, end, scale_factor)
|
from __future__ import division
def nproc():
from libtbx.introspection import number_of_processors
return number_of_processors(return_value_if_unknown=-1)
def joiner(args):
from dials.util.scale_down_image import scale_down_image
scale_down_image(*args)
print args[1]
def scale_down_images(in_template, out_template, start, end, scale_factor):
from multiprocessing import Pool
jobs = [(in_template % j, out_template % j, scale_factor) for j in
range(start, end + 1)]
pool = Pool(processes=nproc())
result = pool.map_async(joiner, jobs)
result.get()
return result
if __name__ == '__main__':
import sys
if len(sys.argv) != 6:
raise RuntimeError, '%s in_\%04d.cbf out_\%04d.cbf start end scale' % \
sys.argv[0]
in_template = sys.argv[1]
out_template = sys.argv[2]
start = int(sys.argv[3])
end = int(sys.argv[4])
scale_factor = float(sys.argv[5])
scale_down_images(in_template, out_template, start, end, scale_factor)
|
Print out file name after writing
|
Print out file name after writing
|
Python
|
bsd-3-clause
|
dials/dials,dials/dials,dials/dials,dials/dials,dials/dials
|
49ce9aa1bdd3479c31b8aa2e606b1768a444aea2
|
irrigator_pro/farms/templatetags/today_filters.py
|
irrigator_pro/farms/templatetags/today_filters.py
|
from django import template
from datetime import date, datetime, timedelta
register = template.Library()
@register.filter(expects_localtime=True)
def is_today(value):
if isinstance(value, datetime):
value = value.date()
return value == date.today()
@register.filter(expects_localtime=True)
def is_past(value):
if isinstance(value, datetime):
value = value.date()
return value < date.today()
@register.filter(expects_localtime=True)
def is_future(value):
if isinstance(value, datetime):
value = value.date()
return value > date.today()
@register.filter(expects_localtime=True)
def compare_today(value):
if isinstance(value, datetime):
value = value.date()
return value - date.today()
|
from django import template
from datetime import date, datetime, timedelta
register = template.Library()
@register.filter(expects_localtime=True)
def is_today(value):
if isinstance(value, datetime):
value = value.date()
return value == date.today()
@register.filter(expects_localtime=True)
def is_past(value):
if isinstance(value, datetime):
value = value.date()
return value < date.today()
@register.filter(expects_localtime=True)
def is_future(value):
if isinstance(value, datetime):
value = value.date()
return value > date.today()
@register.filter(expects_localtime=True)
def compare_today(value):
if isinstance(value, datetime):
value = value.date()
return value - date.today()
@register.filter(expects_locattime=True)
def today_in_season(season):
start_date = season.season_start_date
end_date = season.season_end_date
return (start_date <= date.today() <= end_date)
|
Add new filter to determine if today is within the time period for a season.
|
Add new filter to determine if today is within the time period for a season.
|
Python
|
mit
|
warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro
|
0e0b96d0d800716102204cfdca7317ccb92cee95
|
pytextql/util.py
|
pytextql/util.py
|
# -*- coding: utf-8 -*-
import csv
import itertools
def grouper(iterable, n):
"""
Slice up `iterable` into iterables of `n` items.
:param iterable: Iterable to splice.
:param n: Number of items per slice.
:returns: iterable of iterables
"""
it = iter(iterable)
while True:
chunk = itertools.islice(it, n)
try:
first = next(chunk)
except StopIteration:
return
yield itertools.chain([first], chunk)
class UnicodeCSVReader(object):
"""
An extremely minimal wrapper around csv.reader to assist in
reading Unicode data.
"""
def __init__(self, *args, **kwargs):
self.encoding = kwargs.pop('encoding', 'utf8')
self.pad_to = kwargs.pop('pad_to', 0)
self.pad_with = kwargs.pop('pad_with', '')
self.reader = csv.reader(*args, **kwargs)
def next(self):
row = self.reader.next()
padding = [self.pad_with] * (self.pad_to - len(row))
return [unicode(c, self.encoding) for c in row] + padding
def __iter__(self):
return self
@property
def dialect(self):
return self.reader.dialect
@property
def line_num(self):
return self.reader.line_num
|
# -*- coding: utf-8 -*-
import csv
import itertools
def grouper(iterable, n):
"""
Slice up `iterable` into iterables of `n` items.
:param iterable: Iterable to splice.
:param n: Number of items per slice.
:returns: iterable of iterables
"""
it = iter(iterable)
while True:
chunk = itertools.islice(it, n)
try:
first = next(chunk)
except StopIteration:
return
yield itertools.chain([first], chunk)
class UnicodeCSVReader(object):
"""
An extremely minimal wrapper around csv.reader to assist in
reading Unicode data.
"""
def __init__(self, *args, **kwargs):
self.encoding = kwargs.pop('encoding', 'utf8')
self.pad_to = kwargs.pop('pad_to', 0)
self.pad_with = kwargs.pop('pad_with', '')
self.reader = csv.reader(*args, **kwargs)
def next(self):
row = self.reader.next()
padding = [self.pad_with] * (self.pad_to - len(row))
return [unicode(c, self.encoding) for c in row] + padding
def __iter__(self):
return self
@property
def dialect(self):
return self.reader.dialect
@property
def line_num(self):
return self.reader.line_num
class UnicodeCSVWriter(object):
def __init__(self, *args, **kwargs):
self.encoding = kwargs.pop('encoding', 'utf8')
self.writer = csv.writer(*args, **kwargs)
def writerow(self, row):
self.writer.writerow([
column.encode(self.encoding) for column in row
])
def writerows(self, rows):
for row in rows:
self.writerow(row)
|
Add a simple UnicodeCSVWriter, probably flawed.
|
Add a simple UnicodeCSVWriter, probably flawed.
|
Python
|
mit
|
TkTech/pytextql
|
c67acb72d5ddea8a1e4fb8a12aa3a6913629e0cb
|
Lib/setup.py
|
Lib/setup.py
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('scipy',parent_package,top_path)
#config.add_subpackage('cluster')
#config.add_subpackage('fftpack')
#config.add_subpackage('integrate')
#config.add_subpackage('interpolate')
#config.add_subpackage('io')
config.add_subpackage('lib')
config.add_subpackage('linalg')
#config.add_subpackage('linsolve')
#config.add_subpackage('maxentropy')
config.add_subpackage('misc')
#config.add_subpackage('montecarlo')
config.add_subpackage('optimize')
#config.add_subpackage('sandbox')
#config.add_subpackage('signal')
#config.add_subpackage('sparse')
config.add_subpackage('special')
config.add_subpackage('stats')
#config.add_subpackage('ndimage')
#config.add_subpackage('weave')
config.make_svn_version_py() # installs __svn_version__.py
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('scipy',parent_package,top_path)
config.add_subpackage('cluster')
config.add_subpackage('fftpack')
config.add_subpackage('integrate')
config.add_subpackage('interpolate')
config.add_subpackage('io')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('linsolve')
config.add_subpackage('maxentropy')
config.add_subpackage('misc')
#config.add_subpackage('montecarlo')
config.add_subpackage('optimize')
config.add_subpackage('sandbox')
config.add_subpackage('signal')
config.add_subpackage('sparse')
config.add_subpackage('special')
config.add_subpackage('stats')
config.add_subpackage('ndimage')
config.add_subpackage('weave')
config.make_svn_version_py() # installs __svn_version__.py
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
Fix problem with __all__ variable and update weave docs a bit. Update compiler_cxx too.
|
Fix problem with __all__ variable and update weave docs a bit. Update compiler_cxx too.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@2022 d6536bca-fef9-0310-8506-e4c0a848fbcf
|
Python
|
bsd-3-clause
|
lesserwhirls/scipy-cwt,scipy/scipy-svn,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,scipy/scipy-svn,scipy/scipy-svn,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,scipy/scipy-svn,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor
|
1441654c46e08b7286999b6887e59c56fa238ff7
|
python/piling-up.py
|
python/piling-up.py
|
from collections import deque
def isVerticallyStackable(pile):
vertical_stack = []
while pile:
largest_cube, cube_sizes = remove_largest_cube_from_pile(pile)
if vertical_stack == []:
vertical_stack.append(largest_cube)
else:
top_of_stack = vertical_stack[-1]
if(top_of_stack < largest_cube):
return False
vertical_stack.append(largest_cube)
return True
def remove_largest_cube_from_pile(cube_sizes):
if(cube_sizes == []):
return (None, cube_sizes)
elif(cube_sizes[0] > cube_sizes[-1]):
largest_cube = cube_sizes.popleft()
return (largest_cube, cube_sizes)
else:
largest_cube = cube_sizes.pop()
return (largest_cube, cube_sizes)
num_test_cases = int(input())
for i in range(num_test_cases):
num_cubes = int(input())
pile = deque(map(int, input().strip().split(" ")))
if(isVerticallyStackable(pile)):
print("Yes")
else:
print("No")
|
from collections import deque
def isVerticallyStackable(pile):
vertical_stack = []
while pile:
largest_cube = remove_largest_cube_from_pile(pile)
if vertical_stack == []:
vertical_stack.append(largest_cube)
else:
top_of_stack = vertical_stack[-1]
if(top_of_stack < largest_cube):
return False
vertical_stack.append(largest_cube)
return True
def remove_largest_cube_from_pile(cube_sizes):
if(cube_sizes == []):
return None
elif(cube_sizes[0] > cube_sizes[-1]):
return cube_sizes.popleft()
else:
return cube_sizes.pop()
num_test_cases = int(input())
for i in range(num_test_cases):
num_cubes = int(input())
pile = deque(map(int, input().strip().split(" ")))
if(isVerticallyStackable(pile)):
print("Yes")
else:
print("No")
|
Remove returned pile b/c mutating directly
|
Remove returned pile b/c mutating directly
|
Python
|
mit
|
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
|
bb229be50e37bb710c32541cec7b159da9508335
|
tests/functional/subcommands/test_subcommands.py
|
tests/functional/subcommands/test_subcommands.py
|
import subprocess
def test_subcommand():
"""
Test that a command from the example project is registered.
"""
output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT)
assert b'testcommand' in output
def test_subcommand_group():
"""
Test that a command group is registered.
"""
output = subprocess.check_output(['textx', 'testgroup'],
stderr=subprocess.STDOUT)
assert b'groupcommand1' in output
assert b'groupcommand2' in output
|
import sys
import pytest
import subprocess
if (3, 6) <= sys.version_info < (3, 8):
pytest.skip("Temporary workaround for Travis problems", allow_module_level=True)
def test_subcommand():
"""
Test that a command from the example project is registered.
"""
output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT)
assert b'testcommand' in output
def test_subcommand_group():
"""
Test that a command group is registered.
"""
output = subprocess.check_output(['textx', 'testgroup'],
stderr=subprocess.STDOUT)
assert b'groupcommand1' in output
assert b'groupcommand2' in output
|
Add workaround for Travis CI problems
|
Add workaround for Travis CI problems
|
Python
|
mit
|
igordejanovic/textX,igordejanovic/textX,igordejanovic/textX
|
b0e5dff69b9e40b916ad8a6655624de7fa85d247
|
chmvh_website/team/migrations/0002_auto_20161024_2338.py
|
chmvh_website/team/migrations/0002_auto_20161024_2338.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-24 23:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('team', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='teammember',
options={'ordering': ('order',)},
),
migrations.AddField(
model_name='teammember',
name='order',
field=models.PositiveSmallIntegerField(default=0),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-24 23:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('team', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='teammember',
name='order',
field=models.PositiveSmallIntegerField(default=0),
),
migrations.AlterModelOptions(
name='teammember',
options={'ordering': ('order',)},
),
]
|
Change order of migration operations.
|
Change order of migration operations.
|
Python
|
mit
|
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
|
85fce5f5ab57b6c2144c92ec0d9b185740d7dc91
|
pyinform/__init__.py
|
pyinform/__init__.py
|
# Copyright 2016 ELIFE. All rights reserved.
# Use of this source code is governed by a MIT
# license that can be found in the LICENSE file.
from ctypes import CDLL
def get_libpath():
"""
Get the library path of the the distributed inform binary.
"""
import os
import re
from os.path import dirname, abspath, realpath, join
libre = re.compile(r"^inform-(\d+)\.(\d+)\.(\d+)$")
root = dirname(abspath(realpath(__file__)))
libdir = None
major, minor, revision = 0, 0, 0
for _, dirnames, _ in os.walk(root):
for dirname in dirnames:
match = libre.match(dirname)
if match:
a, b, c = tuple(int(x) for x in match.group(1,2,3))
if (major, minor, revision) < (a,b,c):
major, minor, revision = a, b, c
libdir = join(root, match.group())
break
break
if libdir is None:
raise ImportError("cannot find libinform")
else:
return "{}/lib/libinform.so.{}.{}.{}".format(libdir,major,minor,revision)
_inform = CDLL(get_libpath())
|
# Copyright 2016 ELIFE. All rights reserved.
# Use of this source code is governed by a MIT
# license that can be found in the LICENSE file.
from ctypes import CDLL
def get_libpath():
"""
Get the library path of the the distributed inform binary.
"""
import os
import re
from os.path import dirname, abspath, realpath, join
from platform import system
libre = re.compile(r"^inform-(\d+)\.(\d+)\.(\d+)$")
root = dirname(abspath(realpath(__file__)))
libdir = None
major, minor, revision = 0, 0, 0
for _, dirnames, _ in os.walk(root):
for dirname in dirnames:
match = libre.match(dirname)
if match:
a, b, c = tuple(int(x) for x in match.group(1,2,3))
if (major, minor, revision) < (a,b,c):
major, minor, revision = a, b, c
libdir = join(root, match.group())
break
break
if libdir is None:
raise ImportError("cannot find libinform")
if system() is 'Windows':
return "{}/lib/inform.dll".format(libdir)
else:
return "{}/lib/libinform.so.{}.{}.{}".format(libdir,major,minor,revision)
_inform = CDLL(get_libpath())
|
Resolve the library on windows
|
Resolve the library on windows
|
Python
|
mit
|
ELIFE-ASU/PyInform
|
4551732c93b248e669b63d8ea6a9705c52b69dc3
|
projects/urls.py
|
projects/urls.py
|
from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add_project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit_project'),
url(r'^status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'),
url(r'^archive/$', 'projects_archive', name='projects_archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
|
from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add_project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit_project'),
url(r'^edit_status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'),
url(r'^status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'),
url(r'^archive/$', 'projects_archive', name='projects_archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
|
Add url for project_status_edit option
|
Add url for project_status_edit option
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
1cc68fee10975f85ca5a2e2a63b972314a1b62d9
|
tests/test_redis_storage.py
|
tests/test_redis_storage.py
|
import unittest
import datetime
import hiro
import redis
from sifr.span import Minute, Day
from sifr.storage import MemoryStorage, RedisStorage
class RedisStorageTests(unittest.TestCase):
def setUp(self):
self.redis = redis.Redis()
self.redis.flushall()
def test_incr_simple_minute(self):
span = Minute(datetime.datetime.now(), ["minute_span"])
storage = RedisStorage(self.redis)
storage.incr(span)
storage.incr(span)
self.assertEqual(storage.get(span), 2)
def test_incr_unique_minute(self):
red = redis.Redis()
span = Minute(datetime.datetime.now(), ["minute_span"])
storage = RedisStorage(red)
storage.incr_unique(span, "1")
storage.incr_unique(span, "1")
storage.incr_unique(span, "2")
self.assertEqual(storage.get_unique(span), 2)
def test_tracker_minute(self):
span = Minute(datetime.datetime.now(), ["minute_span"])
storage = RedisStorage(self.redis)
storage.track(span, "1", 3)
storage.track(span, "1", 3)
storage.track(span, "2", 3)
storage.track(span, "3", 3)
self.assertEqual(storage.enumerate(span), set(["1", "2", "3"]))
|
import unittest
import datetime
import hiro
import redis
from sifr.span import Minute, Day
from sifr.storage import MemoryStorage, RedisStorage
class RedisStorageTests(unittest.TestCase):
def setUp(self):
self.redis = redis.Redis()
self.redis.flushall()
def test_incr_simple_minute(self):
span = Minute(datetime.datetime.now(), ["minute_span"])
storage = RedisStorage(self.redis)
storage.incr(span)
storage.incr(span)
self.assertEqual(storage.get(span), 2)
def test_incr_unique_minute(self):
red = redis.Redis()
span = Minute(datetime.datetime.now(), ["minute_span"])
storage = RedisStorage(red)
storage.incr_unique(span, "1")
storage.incr_unique(span, "1")
storage.incr_unique(span, "2")
self.assertEqual(storage.get_unique(span), 2)
def test_tracker_minute(self):
span = Minute(datetime.datetime.now(), ["minute_span"])
storage = RedisStorage(self.redis)
storage.track(span, "1")
storage.track(span, "1")
storage.track(span, "2")
storage.track(span, "3")
self.assertEqual(storage.enumerate(span), set(["1", "2", "3"]))
|
Remove old extra argument from track tests
|
Remove old extra argument from track tests
|
Python
|
mit
|
alisaifee/sifr,alisaifee/sifr
|
9f1783ac694d91b287dcb5840f54fb3df746a963
|
bot/action/core/action.py
|
bot/action/core/action.py
|
from bot.api.api import Api
from bot.multithreading.scheduler import SchedulerApi
from bot.storage import Config, State, Cache
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache, scheduler: SchedulerApi):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.scheduler = scheduler
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
super().setup(*args)
self.for_each(lambda action: action.setup(*args))
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
for action in self.actions:
func(action)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
|
from bot.api.api import Api
from bot.multithreading.scheduler import SchedulerApi
from bot.storage import Config, State, Cache
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache, scheduler: SchedulerApi):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.scheduler = scheduler
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
def pre_shutdown(self):
pass
def shutdown(self):
self.pre_shutdown()
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
super().setup(*args)
self.for_each(lambda action: action.setup(*args))
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def shutdown(self):
self.for_each(lambda action: action.shutdown())
super().shutdown()
def for_each(self, func):
for action in self.actions:
func(action)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
|
Add shutdown callback support to Action
|
Add shutdown callback support to Action
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
2bcc941b015c443c64f08a13012e8caf70028754
|
ideascube/search/migrations/0001_initial.py
|
ideascube/search/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import ideascube.search.models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Search',
fields=[
('rowid', models.IntegerField(serialize=False, primary_key=True)),
('model', models.CharField(max_length=64)),
('model_id', models.IntegerField()),
('public', models.BooleanField(default=True)),
('text', ideascube.search.models.SearchField()),
],
options={
'db_table': 'idx',
'managed': False,
},
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from ideascube.search.utils import create_index_table
class CreateSearchModel(migrations.CreateModel):
def database_forwards(self, *_):
# Don't run the parent method, we create the table our own way
create_index_table()
class Migration(migrations.Migration):
dependencies = [
]
operations = [
CreateSearchModel(
name='Search',
fields=[],
options={
'db_table': 'idx',
'managed': False,
},
),
]
|
Fix the initial search migration
|
Fix the initial search migration
There is no point in creating the model in this way, that's just not how
it's used: instead we want to use the FTS4 extension from SQLite.
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
fc818ccd0d83ff6b37b38e5e9d03abcae408b503
|
froide/problem/templatetags/problemreport_tags.py
|
froide/problem/templatetags/problemreport_tags.py
|
from collections import defaultdict
from django import template
from ..models import ProblemReport
from ..forms import ProblemReportForm
register = template.Library()
@register.inclusion_tag('problem/message_toolbar_item.html')
def render_problem_button(message):
if not hasattr(message, 'problemreports'):
# Get all problem reports for all messages
request = message.request
reports = ProblemReport.objects.filter(message__in=request.messages)
message_reports = defaultdict(list)
for report in reports:
message_reports[report.message_id].append(report)
for message in request.messages:
message.problemreports = message_reports[message.id]
message.problemreports_count = len(message.problemreports)
message.problemreports_unresolved_count = len([
r for r in message.problemreports if not r.resolved
])
message.problemreports_form = ProblemReportForm(message=message)
return {
'message': message
}
|
from collections import defaultdict
from django import template
from ..models import ProblemReport
from ..forms import ProblemReportForm
register = template.Library()
@register.inclusion_tag('problem/message_toolbar_item.html')
def render_problem_button(message):
if not hasattr(message, 'problemreports'):
# Get all problem reports for all messages
request = message.request
reports = ProblemReport.objects.filter(message__in=request.messages)
message_reports = defaultdict(list)
for report in reports:
message_reports[report.message_id].append(report)
for mes in request.messages:
mes.problemreports = message_reports[mes.id]
mes.problemreports_count = len(mes.problemreports)
mes.problemreports_unresolved_count = len([
r for r in mes.problemreports if not r.resolved
])
mes.problemreports_form = ProblemReportForm(message=mes)
return {
'message': message
}
|
Fix overriding variable in problem report tag
|
Fix overriding variable in problem report tag
|
Python
|
mit
|
stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide
|
af5eae0b477c73c1c8d1bbce646d94858d157142
|
whip/web.py
|
whip/web.py
|
#!/usr/bin/env python
import socket
from flask import Flask, abort, make_response, request
from whip.db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS', silent=True)
db = None
@app.before_first_request
def _open_db():
global db
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
key = socket.inet_aton(ip)
except socket.error:
abort(400)
dt = request.args.get('datetime')
if dt:
dt = dt.encode('ascii')
else:
dt = None # account for empty parameter value
info_as_json = db.lookup(key, dt)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
|
#!/usr/bin/env python
import socket
from flask import Flask, abort, make_response, request
from whip.db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS', silent=True)
db = None
@app.before_first_request
def _open_db():
global db
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
key = socket.inet_aton(ip)
except socket.error:
abort(400)
dt = request.args.get('datetime')
if dt:
dt = dt.encode('ascii')
else:
dt = None # account for empty parameter value
info_as_json = db.lookup(key, dt)
if info_as_json is None:
info_as_json = b'{}' # empty dict, JSON-encoded
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
|
Return empty responses (not HTTP 404) in REST API for missing data
|
Return empty responses (not HTTP 404) in REST API for missing data
|
Python
|
bsd-3-clause
|
wbolster/whip
|
a5b73a7ded0e277662308e0b4d38ac0429c404fb
|
django_facebook/models.py
|
django_facebook/models.py
|
from django.db import models
class FacebookProfileModel(models.Model):
'''
Abstract class to add to your profile model.
NOTE: If you don't use this this abstract class, make sure you copy/paste
the fields in.
'''
about_me = models.TextField(blank=True, null=True)
facebook_id = models.IntegerField(blank=True, null=True)
facebook_name = models.CharField(max_length=255, blank=True, null=True)
facebook_profile_url = models.TextField(blank=True, null=True)
website_url = models.TextField(blank=True, null=True)
blog_url = models.TextField(blank=True, null=True)
image = models.ImageField(blank=True, null=True, upload_to='profile_images')
date_of_birth = models.DateField(blank=True, null=True)
class Meta:
abstract = True
|
from django.db import models
from django.contrib.auth.models import User
class FacebookProfileModel(models.Model):
'''
Abstract class to add to your profile model.
NOTE: If you don't use this this abstract class, make sure you copy/paste
the fields in.
'''
user = models.OneToOneField(User)
about_me = models.TextField(blank=True, null=True)
facebook_id = models.IntegerField(blank=True, null=True)
facebook_name = models.CharField(max_length=255, blank=True, null=True)
facebook_profile_url = models.TextField(blank=True, null=True)
website_url = models.TextField(blank=True, null=True)
blog_url = models.TextField(blank=True, null=True)
image = models.ImageField(blank=True, null=True, upload_to='profile_images')
date_of_birth = models.DateField(blank=True, null=True)
def __unicode__(self):
return self.user.__unicode__()
class Meta:
abstract = True
|
Add reference to user model and __unicode__() method to FacebookProfileModel
|
Add reference to user model and __unicode__() method to FacebookProfileModel
|
Python
|
bsd-3-clause
|
pjdelport/Django-facebook,QLGu/Django-facebook,Shekharrajak/Django-facebook,VishvajitP/Django-facebook,troygrosfield/Django-facebook,QLGu/Django-facebook,cyrixhero/Django-facebook,rafaelgontijo/Django-facebook-fork,fyndsi/Django-facebook,jcpyun/Django-facebook,abendleiter/Django-facebook,danosaure/Django-facebook,cyrixhero/Django-facebook,andriisoldatenko/Django-facebook,Fiedzia/Django-facebook,sitsbeyou/Django-facebook,tuxos/Django-facebook,takeshineshiro/Django-facebook,troygrosfield/Django-facebook,takeshineshiro/Django-facebook,abhijo89/Django-facebook,selwin/Django-facebook,fivejjs/Django-facebook,ganescoo/Django-facebook,selwin/Django-facebook,PeterWangPo/Django-facebook,fyndsi/Django-facebook,Shekharrajak/Django-facebook,abendleiter/Django-facebook,abhijo89/Django-facebook,selwin/Django-facebook,javipalanca/Django-facebook,rafaelgontijo/Django-facebook-fork,pjdelport/Django-facebook,sitsbeyou/Django-facebook,Fiedzia/Django-facebook,Shekharrajak/Django-facebook,PeterWangPo/Django-facebook,ganescoo/Django-facebook,javipalanca/Django-facebook,andriisoldatenko/Django-facebook,sitsbeyou/Django-facebook,christer155/Django-facebook,troygrosfield/Django-facebook,VishvajitP/Django-facebook,tuxos/Django-facebook,jcpyun/Django-facebook,javipalanca/Django-facebook,VishvajitP/Django-facebook,tuxos/Django-facebook,fyndsi/Django-facebook,danosaure/Django-facebook,christer155/Django-facebook,andriisoldatenko/Django-facebook,cyrixhero/Django-facebook,jcpyun/Django-facebook,pjdelport/Django-facebook,rafaelgontijo/Django-facebook-fork,ganescoo/Django-facebook,christer155/Django-facebook,fivejjs/Django-facebook,QLGu/Django-facebook,danosaure/Django-facebook,takeshineshiro/Django-facebook,abendleiter/Django-facebook,andriisoldatenko/Django-facebook,PeterWangPo/Django-facebook,abhijo89/Django-facebook,Fiedzia/Django-facebook,fivejjs/Django-facebook
|
3122736e0eccd4d4b1f003faa1db6ec05710883f
|
addstr.py
|
addstr.py
|
#!/usr/bin/python
import argparse
from dx.dex import Dex
from sha1 import update_signature
from adler32 import update_checksum
def main():
parser = argparse.ArgumentParser(description="Parse and reconstruct dex file")
parser.add_argument('target',help='Target DEX file')
parser.add_argument('string',help='String to be added')
args = parser.parse_args()
dex = Dex(args.target)
dex.add_string(args.string)
dex.save("out2.dex")
update_signature("out2.dex")
update_checksum("out2.dex")
print "Done"
if __name__ == '__main__':
main()
|
#!/usr/bin/python
import argparse
from dx.dex import Dex
from dx.hash import update_signature, update_checksum
def main():
parser = argparse.ArgumentParser(description="Parse and reconstruct dex file")
parser.add_argument('target',help='Target DEX file')
parser.add_argument('string',help='String to be added')
args = parser.parse_args()
dex = Dex(args.target)
dex.add_string(args.string)
dex.save("out2.dex")
update_signature("out2.dex")
update_checksum("out2.dex")
print "Done"
if __name__ == '__main__':
main()
|
Fix attempted import from non-existent module.
|
Fix attempted import from non-existent module.
|
Python
|
bsd-3-clause
|
strazzere/dexterity,strazzere/dexterity,rchiossi/dexterity,strazzere/dexterity,rchiossi/dexterity,rchiossi/dexterity
|
edd50431f9c99bcbc765cc85786ead60ba8ba6e4
|
admin/base/migrations/0002_groups.py
|
admin/base/migrations/0002_groups.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
group, created = Group.objects.get_or_create(name='nodes_and_users')
if created:
logger.info('nodes_and_users group created')
try:
group = Group.objects.get(name='prereg_group')
group.name = 'prereg'
group.save()
logger.info('prereg_group renamed to prereg')
except Group.DoesNotExist:
group, created = Group.objects.get_or_create(name='prereg')
if created:
logger.info('prereg group created')
class Migration(migrations.Migration):
dependencies = [
('base', '0001_groups'),
]
operations = [
migrations.RunPython(add_groups),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
group, created = Group.objects.get_or_create(name='nodes_and_users')
if created:
logger.info('nodes_and_users group created')
try:
group = Group.objects.get(name='prereg_group')
group.name = 'prereg'
group.save()
logger.info('prereg_group renamed to prereg')
except Group.DoesNotExist:
group, created = Group.objects.get_or_create(name='prereg')
if created:
logger.info('prereg group created')
def remove_groups(*args):
Group.objects.filter(name='nodes_and_users').delete()
group = Group.objects.get(name='prereg')
group.name = 'prereg_group'
group.save()
class Migration(migrations.Migration):
dependencies = [
('base', '0001_groups'),
]
operations = [
migrations.RunPython(add_groups, remove_groups),
]
|
Add reverse migration for new groups
|
Add reverse migration for new groups
|
Python
|
apache-2.0
|
brianjgeiger/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,binoculars/osf.io,acshi/osf.io,chrisseto/osf.io,acshi/osf.io,crcresearch/osf.io,aaxelb/osf.io,erinspace/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,erinspace/osf.io,baylee-d/osf.io,mfraezz/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,saradbowman/osf.io,acshi/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,chennan47/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,felliott/osf.io,hmoco/osf.io,cwisecarver/osf.io,pattisdr/osf.io,chrisseto/osf.io,adlius/osf.io,pattisdr/osf.io,monikagrabowska/osf.io,adlius/osf.io,icereval/osf.io,baylee-d/osf.io,cslzchen/osf.io,felliott/osf.io,caneruguz/osf.io,TomBaxter/osf.io,caneruguz/osf.io,baylee-d/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,caseyrollins/osf.io,hmoco/osf.io,felliott/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,crcresearch/osf.io,mattclark/osf.io,cwisecarver/osf.io,laurenrevere/osf.io,hmoco/osf.io,saradbowman/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,icereval/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,laurenrevere/osf.io,sloria/osf.io,caseyrollins/osf.io,Nesiehr/osf.io,leb2dg/osf.io,acshi/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,mattclark/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,felliott/osf.io,icereval/osf.io,crcresearch/osf.io,mattclark/osf.io,acshi/osf.io,sloria/osf.io,caneruguz/osf.io,mfraezz/osf.io,binoculars/osf.io,adlius/osf.io,aaxelb/osf.io,caneruguz/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,erinspace/osf.io,hmoco/osf.io,chrisseto/osf.io,Nesiehr/osf.io,chennan47/osf.io,adlius/osf.io
|
eef28c81f19d7e5eb72635cc2e6bf3b74331c743
|
quilt/patch.py
|
quilt/patch.py
|
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from quilt.utils import Process
class Patch(object):
def __init__(self, cwd, patch_file, backup=False, prefix=None):
cmd = ["patch"]
if backup:
cmd.append("--backup")
if prefix:
cmd.append("--prefix")
cmd.append(prefix)
cmd.append("-i")
cmd.append(patch_file)
Process(cmd).run(cwd=cwd)
|
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import os
from quilt.utils import Process
class Patch(object):
def __init__(self, cwd, patch_file, backup=False, prefix=None):
cmd = ["patch"]
if backup:
cmd.append("--backup")
if prefix:
cmd.append("--prefix")
if not prefix[-1] == os.sep:
prefix += os.sep
cmd.append(prefix)
cmd.append("-i")
cmd.append(patch_file)
Process(cmd).run(cwd=cwd)
|
Patch parameter --prefix does need a path seperator
|
Patch parameter --prefix does need a path seperator
The --prefix parameter of the patch command needs a path seperator at
the end to store the backup in a directory.
|
Python
|
mit
|
vadmium/python-quilt,bjoernricks/python-quilt
|
a37ac8daad8eee1f044d3e19a80a172138460ec3
|
google_analytics/models.py
|
google_analytics/models.py
|
from django.db import models
from django.conf import settings
from django.contrib.sites.admin import SiteAdmin
from django.contrib.sites.models import Site
from django.contrib import admin
if getattr(settings, 'GOOGLE_ANALYTICS_MODEL', False):
class Analytic(models.Model):
site = models.ForeignKey(Site, unique=True)
analytics_code = models.CharField(blank=True, max_length=100)
def __unicode__(self):
return u"%s" % (self.analytics_code)
class AnalyticAdmin(admin.ModelAdmin):
list_display = ('site', 'analytics_code',)
admin.site.register(Analytic, AnalyticAdmin)
|
from django.contrib import admin
from django.contrib.sites.models import Site
from django.db import models
class Analytic(models.Model):
site = models.ForeignKey(Site, unique=True)
analytics_code = models.CharField(blank=True, max_length=100)
|
Fix django version problem with new menu options in admin app.
|
Fix django version problem with new menu options in admin app.
|
Python
|
agpl-3.0
|
OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server,Linaro/lava-server,OSSystems/lava-server
|
2b99108a817a642c86be06a14ac8d71cdc339555
|
scripts/speak.py
|
scripts/speak.py
|
#!/usr/bin/env python
import rospy
from sound_play.msg import SoundRequest
from sound_play.libsoundplay import SoundClient
from std_msgs.msg import String
class ChatbotSpeaker:
def __init__(self):
rospy.init_node('chatbot_speaker')
self._client = SoundClient()
rospy.Subscriber('chatbot_responses', String, self._response_callback)
rospy.spin()
def _response_callback(self, data):
self._client.say(data.data)
def main():
speaker = ChatbotSpeaker()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import os
import rospy
from sound_play.msg import SoundRequest
from sound_play.libsoundplay import SoundClient
from std_msgs.msg import String
import urllib
tts_cmd = (
'wget -q -U "Mozilla/5.0" -O -
"http://translate.google.com/translate_tts?tl=en-uk&q={}" > /tmp/speech.mp3'
)
sox_cmd = 'sox /tmp/speech.mp3 /tmp/speech.wav'
class ChatbotSpeaker:
def __init__(self):
rospy.init_node('chatbot_speaker')
self._client = SoundClient()
rospy.Subscriber('chatbot_responses', String, self._response_callback)
rospy.spin()
def _response_callback(self, data):
query = urllib.quote(data.data)
os.system(tts_cmd.format(query))
os.system(sox_cmd)
self._client.playWave('/tmp/speech.wav')
def main():
speaker = ChatbotSpeaker()
if __name__ == '__main__':
main()
|
Use Google Translate API to get a female TTS
|
Use Google Translate API to get a female TTS
|
Python
|
mit
|
jstnhuang/chatbot
|
11be4b77e84c721ef8de583b0dcf1035367d4b25
|
libtmux/__about__.py
|
libtmux/__about__.py
|
__title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.0'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016-2018 Tony Narlock'
|
__title__ = 'libtmux'
__package_name__ = 'libtmux'
__version__ = '0.8.0'
__description__ = 'scripting library / orm for tmux'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tmux-python/libtmux'
__pypi__ = 'https://pypi.python.org/pypi/libtmux'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016-2018 Tony Narlock'
|
Add __pypi__ url to metadata
|
Add __pypi__ url to metadata
|
Python
|
bsd-3-clause
|
tony/libtmux
|
f3fef8dab576ef5d7a4120a4041ade326868f0ca
|
flexget/plugins/ui/execute.py
|
flexget/plugins/ui/execute.py
|
import logging
from flask import render_template, request, Response, redirect, flash
from flask import Module, escape
from flexget.webui import register_plugin, manager, BufferQueue
from Queue import Empty
from flask.helpers import jsonify
execute = Module(__name__, url_prefix='/execute')
log = logging.getLogger('ui.execute')
bufferqueue = BufferQueue()
@execute.route('/', methods=['POST', 'GET'])
def index():
context = {'help': manager.parser.get_help()}
if request.method == 'POST':
options = manager.parser.parse_args(request.form.get('options', ''))[0]
if manager.parser.error_msg:
flash(escape(manager.parser.error_msg), 'error')
context['options'] = request.form['options']
else:
flash('Manual execution started.', 'success')
from flexget.webui import executor
executor.execute(options=options, output=bufferqueue)
return render_template('execute.html', **context)
@execute.route('/progress.json')
def progress():
'''
Gives takes messages from the queue and exports them to JSON.
'''
result = {'items': []}
try:
while 1:
item = bufferqueue.get_nowait()
if item != '\n':
result['items'].append(item)
except Empty:
pass
return jsonify(result)
register_plugin(execute, menu='Execute')
|
import logging
from flask import render_template, request, Response, redirect, flash
from flask import Module, escape
from flexget.webui import register_plugin, manager, BufferQueue
from Queue import Empty
from flask.helpers import jsonify
execute = Module(__name__, url_prefix='/execute')
log = logging.getLogger('ui.execute')
bufferqueue = BufferQueue()
@execute.route('/', methods=['POST', 'GET'])
def index():
context = {'help': manager.parser.get_help()}
if request.method == 'POST':
options = manager.parser.parse_args(request.form.get('options', ''))[0]
if manager.parser.error_msg:
flash(escape(manager.parser.error_msg), 'error')
context['options'] = request.form['options']
else:
flash('Manual execution started.', 'success')
from flexget.webui import executor
executor.execute(options=options, output=bufferqueue)
return render_template('execute.html', **context)
@execute.route('/progress.json')
def progress():
'''
Gives takes messages from the queue and exports them to JSON.
'''
result = {'items': []}
try:
while 1:
item = bufferqueue.get_nowait()
if item != '\n':
result['items'].append(item)
bufferqueue.task_done()
except Empty:
pass
return jsonify(result)
register_plugin(execute, menu='Execute')
|
Fix an issue with repeated messages in json execution output provider.
|
Fix an issue with repeated messages in json execution output provider.
git-svn-id: 555d7295f8287ebc42f8316c6775e40d702c4756@1726 3942dd89-8c5d-46d7-aeed-044bccf3e60c
|
Python
|
mit
|
oxc/Flexget,tsnoam/Flexget,offbyone/Flexget,malkavi/Flexget,ibrahimkarahan/Flexget,ratoaq2/Flexget,asm0dey/Flexget,sean797/Flexget,OmgOhnoes/Flexget,ibrahimkarahan/Flexget,drwyrm/Flexget,jawilson/Flexget,thalamus/Flexget,tarzasai/Flexget,tvcsantos/Flexget,tarzasai/Flexget,Danfocus/Flexget,drwyrm/Flexget,xfouloux/Flexget,xfouloux/Flexget,tsnoam/Flexget,Pretagonist/Flexget,crawln45/Flexget,tobinjt/Flexget,tsnoam/Flexget,tarzasai/Flexget,antivirtel/Flexget,asm0dey/Flexget,patsissons/Flexget,lildadou/Flexget,LynxyssCZ/Flexget,thalamus/Flexget,jacobmetrick/Flexget,jawilson/Flexget,lildadou/Flexget,X-dark/Flexget,Flexget/Flexget,xfouloux/Flexget,vfrc2/Flexget,v17al/Flexget,qvazzler/Flexget,oxc/Flexget,tobinjt/Flexget,Flexget/Flexget,oxc/Flexget,malkavi/Flexget,drwyrm/Flexget,tobinjt/Flexget,spencerjanssen/Flexget,patsissons/Flexget,grrr2/Flexget,antivirtel/Flexget,crawln45/Flexget,vfrc2/Flexget,voriux/Flexget,tvcsantos/Flexget,cvium/Flexget,spencerjanssen/Flexget,cvium/Flexget,Pretagonist/Flexget,sean797/Flexget,ZefQ/Flexget,dsemi/Flexget,thalamus/Flexget,poulpito/Flexget,spencerjanssen/Flexget,sean797/Flexget,camon/Flexget,patsissons/Flexget,jacobmetrick/Flexget,offbyone/Flexget,offbyone/Flexget,voriux/Flexget,LynxyssCZ/Flexget,Pretagonist/Flexget,ibrahimkarahan/Flexget,JorisDeRieck/Flexget,ratoaq2/Flexget,ianstalk/Flexget,antivirtel/Flexget,Danfocus/Flexget,grrr2/Flexget,ZefQ/Flexget,ianstalk/Flexget,v17al/Flexget,Flexget/Flexget,lildadou/Flexget,ianstalk/Flexget,malkavi/Flexget,JorisDeRieck/Flexget,jawilson/Flexget,OmgOhnoes/Flexget,Danfocus/Flexget,OmgOhnoes/Flexget,qk4l/Flexget,Flexget/Flexget,qk4l/Flexget,qk4l/Flexget,JorisDeRieck/Flexget,ZefQ/Flexget,crawln45/Flexget,qvazzler/Flexget,asm0dey/Flexget,ratoaq2/Flexget,dsemi/Flexget,camon/Flexget,crawln45/Flexget,dsemi/Flexget,X-dark/Flexget,gazpachoking/Flexget,tobinjt/Flexget,cvium/Flexget,gazpachoking/Flexget,vfrc2/Flexget,JorisDeRieck/Flexget,malkavi/Flexget,jacobmetrick/Flexget,LynxyssCZ/Flexget,qvazzler/Flexget,Danfocus/Flexget,poulpito/Flexget,jawilson/Flexget,X-dark/Flexget,LynxyssCZ/Flexget,poulpito/Flexget,grrr2/Flexget,v17al/Flexget
|
9346b34c68fc08dfba0002e907d73829000068cd
|
labmanager/shell.py
|
labmanager/shell.py
|
import cmd
class LMShell(cmd.Cmd):
def __init__(self, lmapi, completekey='tab', stdin=None, stdout=None):
cmd.Cmd.__init__(self, completekey, stdin, stdout)
self._lmapi = lmapi
def do_list(self, line):
configs = self._lmapi.list_library_configurations()
print configs
def do_EOF(self, line):
return True
def main():
from labmanager import api
from labmanager import config
import argparse
import getpass
parser = argparse.ArgumentParser()
parser.add_argument('--hostname')
parser.add_argument('--username')
parser.add_argument('--organization')
parser.add_argument('--workspace', default='Main')
parser.add_argument('--timeout', default=None)
parser.add_argument('--section', default='default')
args = parser.parse_args()
api_config = config.load_config(parser, args)
if api_config.password is None:
api_config.password = getpass.getpass('password: ')
client = api.create_soap_client(api_config)
labmanager_api = api.LabManager(client)
sh = LMShell(labmanager_api)
sh.cmdloop()
|
import cmd
class LMShell(cmd.Cmd):
def __init__(self, lmapi, completekey='tab', stdin=None, stdout=None):
cmd.Cmd.__init__(self, completekey, stdin, stdout)
self._lmapi = lmapi
def do_list(self, line):
configs = self._lmapi.list_library_configurations()
print configs
def do_EOF(self, line):
return True
def do_quit(self, line):
return True
def main():
from labmanager import api
from labmanager import config
import argparse
import getpass
parser = argparse.ArgumentParser()
parser.add_argument('--hostname')
parser.add_argument('--username')
parser.add_argument('--organization')
parser.add_argument('--workspace', default='Main')
parser.add_argument('--timeout', default=None)
parser.add_argument('--section', default='default')
args = parser.parse_args()
api_config = config.load_config(parser, args)
if api_config.password is None:
api_config.password = getpass.getpass('password: ')
client = api.create_soap_client(api_config)
labmanager_api = api.LabManager(client)
sh = LMShell(labmanager_api)
sh.cmdloop()
|
Add 'quit' command to lmsh
|
Add 'quit' command to lmsh
|
Python
|
bsd-3-clause
|
jamesls/labmanager-shell
|
713fcc3f86b4be4d35f0c5ba081a4f786648320a
|
vim/pythonx/elixir_helpers.py
|
vim/pythonx/elixir_helpers.py
|
"""
Elixir-related Ultisnips snippet helper functions.
NOTE: Changes to this file require restarting Vim!
"""
import re
_DASHES_AND_UNDERSCORES = re.compile("[-_]")
_MODULE_FILEPATH = re.compile(r"lib\/([^\/]+)\/([\w+\/]+)*\/([^\/]+).ex")
def closing_character(tabstop):
"""
Return closing character for a tabstop containing an opening character.
"""
if tabstop.startswith("("):
return ")"
if tabstop.startswith("{"):
return "}"
if tabstop.startswith("["):
return "]"
if tabstop.startswith("\""):
return "\""
return ""
def module_path_match(path, regex=_MODULE_FILEPATH):
"""
Return match data for an Elixir module from a file path.
"""
return re.search(regex, path)
def outer_module_name(path):
"""
Return name for an outer Elixir module from a file path.
"""
outer_module_path = module_path_match(path).group(1)
return to_module_name(outer_module_path)
def to_module_name(string):
"""
Convert string into an Elixir module name
"""
return (
re.sub(_DASHES_AND_UNDERSCORES, " ", string)
.title()
.replace(" ", "")
.replace(".ex", "")
)
|
"""
Elixir-related Ultisnips snippet helper functions.
NOTE: Changes to this file require restarting Vim!
"""
import re
_DASHES_AND_UNDERSCORES = re.compile("[-_]")
_MODULE_FILEPATH = re.compile(r"lib\/([^\/]+)\/([\w+\/]+)*\/([^\/]+).ex")
_CLOSING_CHARACTERS = {
"(": ")",
"{": "}",
"[": "]",
"\"": "\""
}
def closing_character(tabstop):
"""
Return closing character for a tabstop containing an opening character.
"""
if tabstop:
return _CLOSING_CHARACTERS.get(tabstop[0], "")
return ""
def module_path_match(path, regex=_MODULE_FILEPATH):
"""
Return match data for an Elixir module from a file path.
"""
return re.search(regex, path)
def outer_module_name(path):
"""
Return name for an outer Elixir module from a file path.
"""
outer_module_path = module_path_match(path).group(1)
return to_module_name(outer_module_path)
def to_module_name(string):
"""
Convert string into an Elixir module name
"""
return (
re.sub(_DASHES_AND_UNDERSCORES, " ", string)
.title()
.replace(" ", "")
.replace(".ex", "")
)
|
Refactor python if statement into dictionary
|
Refactor python if statement into dictionary
|
Python
|
mit
|
paulfioravanti/dotfiles,paulfioravanti/dotfiles,paulfioravanti/dotfiles
|
4e3f10cc417f28badc34646cc89fcd9d0307b4be
|
utility/lambdas/s3-static-site-deploy/lambda_function.py
|
utility/lambdas/s3-static-site-deploy/lambda_function.py
|
# import boto3
def lambda_handler(event, context):
pass
|
# Invoked by: CloudFormation
# Returns: A `Data` object to a pre-signed URL
#
# Deploys the contents of a versioned zip file object from one bucket in S3
# to a another bucket
import sys
import boto3
from botocore.client import Config
import io
import zipfile
import os
import urllib.request
import json
import traceback
s3 = boto3.client('s3', config=Config(signature_version='s3v4'))
STATUS_SUCCESS = 'SUCCESS'
STATUS_FAILED = 'FAILED'
def send_response(event, context, res_status, res_reason='Done', res_data={}):
res_data = json.dumps({
'Status': res_status,
'Reason': res_reason,
'PhysicalResourceId': context.log_stream_name,
'StackId': event['StackId'],
'RequestId': event['RequestId'],
'LogicalResourceId': event['LogicalResourceId'],
'Data': res_data
}).encode()
headers = {'content-type': ''}
url = event['ResponseURL']
req = urllib.request.Request(url, data=res_data, method='PUT', headers=headers)
urllib.request.urlopen(req)
def lambda_handler(event, context):
try:
print(event)
if event['RequestType'] == 'Create' or event['RequestType'] == 'Update':
# The location of the built static site archive file in S3
bucket = event['ResourceProperties']['StaticSiteArchiveS3Bucket']
key = event['ResourceProperties']['StaticSiteArchiveS3Object']
version = event['ResourceProperties']['StaticSiteArchiveS3ObjectVersion']
# Get the archive object
s3_obj = s3.get_object(Bucket=bucket, Key=key, VersionId=version)
unzip_dir = f"/tmp/unzip-{event['RequestId']}"
# Unzip the archive, to disk
with zipfile.ZipFile(io.BytesIO(s3_obj['Body'].read()), 'r') as zip:
zip.extractall(unzip_dir)
# The bucket to deploy the static to
deploy_bucket = event['ResourceProperties']['StaticSiteS3DeployBucket']
# Upload everything from the unzipped archive
for root, dirs, files in os.walk(unzip_dir):
for filename in files:
local_path = os.path.join(root, filename)
s3_key = os.path.relpath(local_path, unzip_dir)
print(f"Uploading {s3_key} to {deploy_bucket}")
s3.upload_file(local_path, deploy_bucket, s3_key)
send_response(event, context, STATUS_SUCCESS)
else:
send_response(event, context, STATUS_SUCCESS)
except Exception as e:
print('Function failed due to exception.')
print(e)
traceback.print_exc()
send_response(event, context, STATUS_FAILED, res_reason=str(e))
|
Add S3 static deploy custom resource Lambda function
|
Add S3 static deploy custom resource Lambda function
|
Python
|
mit
|
PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure
|
aed82bc0995cf4175c0ab8c521dfc8e89d776a7e
|
Mac/scripts/zappycfiles.py
|
Mac/scripts/zappycfiles.py
|
# Zap .pyc files
import os
import sys
doit = 1
def main():
if os.name == 'mac':
import macfs
fss, ok = macfs.GetDirectory('Directory to zap pyc files in')
if not ok:
sys.exit(0)
dir = fss.as_pathname()
zappyc(dir)
else:
if not sys.argv[1:]:
print 'Usage: zappyc dir ...'
sys.exit(1)
for dir in sys.argv[1:]:
zappyc(dir)
def zappyc(dir):
os.path.walk(dir, walker, None)
def walker(dummy, top, names):
for name in names:
if name[-4:] == '.pyc':
path = os.path.join(top, name)
print 'Zapping', path
if doit:
os.unlink(path)
if __name__ == '__main__':
main()
|
#!/usr/local/bin/python
"""Recursively zap all .pyc files"""
import os
import sys
# set doit true to actually delete files
# set doit false to just print what would be deleted
doit = 1
def main():
if not sys.argv[1:]:
if os.name == 'mac':
import macfs
fss, ok = macfs.GetDirectory('Directory to zap pyc files in')
if not ok:
sys.exit(0)
dir = fss.as_pathname()
zappyc(dir)
else:
print 'Usage: zappyc dir ...'
sys.exit(1)
for dir in sys.argv[1:]:
zappyc(dir)
def zappyc(dir):
os.path.walk(dir, walker, None)
def walker(dummy, top, names):
for name in names:
if name[-4:] == '.pyc':
path = os.path.join(top, name)
print 'Zapping', path
if doit:
os.unlink(path)
if __name__ == '__main__':
main()
|
Patch by Russel Owen: if we have command line arguments zap pyc files in the directories given.
|
Patch by Russel Owen: if we have command line arguments zap pyc files
in the directories given.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
db04d6884c68b1f673a785866155427af86fad65
|
apps/predict/templatetags/jsonify.py
|
apps/predict/templatetags/jsonify.py
|
"""Add a template tag to turn python objects into JSON"""
import types
import json
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter
def jsonify(obj):
if isinstance(obj, types.GeneratorType):
obj = list(obj)
return mark_safe(json.dumps(obj))
|
"""Add a template tag to turn python objects into JSON"""
import types
import json
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter
def jsonify(obj):
"""Turn object into a json instance"""
if isinstance(obj, types.GeneratorType):
obj = list(obj)
return mark_safe(json.dumps(obj).replace("'", "\\'"))
|
Remove single quote marks from jsonif
|
Remove single quote marks from jsonif
|
Python
|
agpl-3.0
|
IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site
|
8ed2aa1a8108ae3a678ff18f4e8fda3539f4b603
|
avalonstar/components/games/admin.py
|
avalonstar/components/games/admin.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Game, Platform
class GameAdmin(admin.ModelAdmin):
list_display = ['name', 'platform', 'gbid', 'is_abandoned', 'is_completed']
raw_id_fields = ['platform']
autocomplete_lookup_fields = { 'fk': ['platform'] }
admin.site.register(Game, GameAdmin)
class PlatformAdmin(admin.ModelAdmin):
pass
admin.site.register(Platform, PlatformAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Game, Platform
class GameAdmin(admin.ModelAdmin):
list_display = ['name', 'platform', 'gbid', 'is_abandoned', 'is_completed']
list_editable = ['is_abandoned', 'is_completed']
raw_id_fields = ['platform']
autocomplete_lookup_fields = { 'fk': ['platform'] }
admin.site.register(Game, GameAdmin)
class PlatformAdmin(admin.ModelAdmin):
pass
admin.site.register(Platform, PlatformAdmin)
|
Make the game booleans editable.
|
Make the game booleans editable.
|
Python
|
apache-2.0
|
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
|
12683ea64a875b624230f2dd84609a77eaec1095
|
cd_wizard.py
|
cd_wizard.py
|
#!/usr/bin/env python
"""Wizard to guide user to:
- insert cd
- please rip with eac
- check for a good rip
- upload with metadata (freedb, musicmind)
"""
from PyQt4 import QtGui
def createIntroPage():
page = QtGui.QWizardPage()
page.setTitle("Introduction")
page.setSubTitle("This wizard will help you archive your CDs in your Personal Music Locker")
label = QtGui.QLabel("Please insert a CD")
label.setWordWrap(True)
layout = QtGui.QVBoxLayout()
layout.addWidget(label)
page.setLayout(layout)
return page
def createConclusionPage():
page = QtGui.QWizardPage()
page.setTitle("Conclusion")
label = QtGui.QLabel("You are now added this CD to your locker!")
label.setWordWrap(True)
layout = QtGui.QVBoxLayout()
layout.addWidget(label)
page.setLayout(layout)
return page
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
wizard = QtGui.QWizard()
wizard.addPage(createIntroPage())
wizard.addPage(createConclusionPage())
wizard.setWindowTitle("Music Locker Uploader")
wizard.show()
sys.exit(wizard.exec_())
|
#!/usr/bin/env python
"""Wizard to guide user to:
- insert cd
- please rip with eac
- check for a good rip
- upload with metadata (freedb, musicmind)
"""
from PyQt4 import QtGui
def createIntroPage():
page = QtGui.QWizardPage()
page.setTitle("Introduction")
page.setSubTitle("This wizard will help you archive your CDs in your Personal Music Locker")
label = QtGui.QLabel("Please insert a CD")
label.setWordWrap(True)
layout = QtGui.QVBoxLayout()
layout.addWidget(label)
page.setLayout(layout)
return page
def choose_cd():
page = QtGui.QWizardPage()
page.setTitle("Choose CD Drive")
file_dialog = QtGui.QFileDialog()
file_dialog.setFileMode(QtGui.QFileDialog.Directory)
file_dialog.setOptions(QtGui.QFileDialog.ShowDirsOnly)
file_dialog.setDirectory('/')
layout = QtGui.QVBoxLayout()
layout.addWidget(file_dialog)
page.setLayout(layout)
return page
def createConclusionPage():
page = QtGui.QWizardPage()
page.setTitle("Conclusion")
label = QtGui.QLabel("You are now added this CD to your locker!")
label.setWordWrap(True)
layout = QtGui.QVBoxLayout()
layout.addWidget(label)
page.setLayout(layout)
return page
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
wizard = QtGui.QWizard()
wizard.addPage(createIntroPage())
wizard.addPage(choose_cd())
wizard.addPage(createConclusionPage())
wizard.setWindowTitle("Music Locker Uploader")
wizard.show()
sys.exit(wizard.exec_())
|
Add file browser to choose a CD.
|
Add file browser to choose a CD.
|
Python
|
agpl-3.0
|
brewsterkahle/archivecd
|
e7cb5b0be49bc5e811809c56eb4ad3c0dc861cdf
|
examples/child_watcher.py
|
examples/child_watcher.py
|
import logging
import random
from tornado import gen
from zoonado import exc
log = logging.getLogger()
def arguments(parser):
parser.add_argument(
"--path", "-p", type=str, default="/examplewatcher",
help="ZNode path to use for the example."
)
def watcher_callback(children):
children.sort()
log.info("There are %d items now: %s", len(children), children)
@gen.coroutine
def run(client, args):
yield client.start()
try:
yield client.create(args.path)
except exc.NodeExists:
pass
watcher = client.recipes.ChildrenWatcher()
watcher.add_callback(args.path, watcher_callback)
to_make = ["cat", "dog", "mouse", "human"]
random.shuffle(to_make)
for item in to_make:
yield client.create(args.path + "/" + item, ephemeral=True)
yield gen.sleep(1)
for item in to_make:
yield client.delete(args.path + "/" + item)
|
import logging
import random
from tornado import gen
from zoonado import exc
log = logging.getLogger()
def arguments(parser):
parser.add_argument(
"--path", "-p", type=str, default="/examplewatcher",
help="ZNode path to use for the example."
)
def watcher_callback(children):
children.sort()
log.info("There are %d items now: %s", len(children), ", ".join(children))
@gen.coroutine
def run(client, args):
yield client.start()
try:
yield client.create(args.path)
except exc.NodeExists:
pass
watcher = client.recipes.ChildrenWatcher()
watcher.add_callback(args.path, watcher_callback)
to_make = ["cat", "dog", "mouse", "human"]
random.shuffle(to_make)
for item in to_make:
yield client.create(args.path + "/" + item, ephemeral=True)
yield gen.sleep(1)
for item in to_make:
yield client.delete(args.path + "/" + item)
yield gen.sleep(1)
|
Fix up to the child watcher example.
|
Fix up to the child watcher example.
Without yielding to the ioloop after each call to client.delete() the child
znodes would be deleted but that would never be reported.
|
Python
|
apache-2.0
|
wglass/zoonado
|
615e57fefa2b3b52ce351ef1d8039216927dc891
|
Parallel/Testing/Cxx/TestSockets.py
|
Parallel/Testing/Cxx/TestSockets.py
|
""" Driver script for testing sockets
Unix only
"""
import os, sys, time
# Fork, run server in child, client in parent
pid = os.fork()
if pid == 0:
# exec the parent
os.execv(sys.argv[1], ('-D', sys.argv[3]))
else:
# wait a little to make sure that the server is ready
time.sleep(10)
# run the client
retVal = os.system('%s -D %s -V %s' % ( sys.argv[2], sys.argv[3],
sys.argv[4] ))
# in case the client fails, we need to kill the server
# or it will stay around
time.sleep(20)
os.kill(pid, 15)
sys.exit(os.WEXITSTATUS(retVal))
|
""" Driver script for testing sockets
Unix only
"""
import os, sys, time
# Fork, run server in child, client in parent
pid = os.fork()
if pid == 0:
# exec the parent
os.execv(sys.argv[1], ('-D', sys.argv[3]))
else:
# wait a little to make sure that the server is ready
time.sleep(10)
# run the client
retVal = os.system('"%s" -D "%s" -V "%s"' % ( sys.argv[2], sys.argv[3],
sys.argv[4] ))
# in case the client fails, we need to kill the server
# or it will stay around
time.sleep(20)
try:
os.kill(pid, 15)
except:
pass
sys.exit(os.WEXITSTATUS(retVal))
|
Fix space problem and put try around os.kill
|
ERR: Fix space problem and put try around os.kill
|
Python
|
bsd-3-clause
|
SimVascular/VTK,johnkit/vtk-dev,gram526/VTK,daviddoria/PointGraphsPhase1,sankhesh/VTK,hendradarwin/VTK,jeffbaumes/jeffbaumes-vtk,ashray/VTK-EVM,msmolens/VTK,SimVascular/VTK,arnaudgelas/VTK,gram526/VTK,berendkleinhaneveld/VTK,johnkit/vtk-dev,Wuteyan/VTK,aashish24/VTK-old,mspark93/VTK,hendradarwin/VTK,collects/VTK,collects/VTK,sumedhasingla/VTK,ashray/VTK-EVM,SimVascular/VTK,biddisco/VTK,cjh1/VTK,keithroe/vtkoptix,spthaolt/VTK,candy7393/VTK,candy7393/VTK,naucoin/VTKSlicerWidgets,sumedhasingla/VTK,spthaolt/VTK,berendkleinhaneveld/VTK,demarle/VTK,naucoin/VTKSlicerWidgets,sgh/vtk,gram526/VTK,msmolens/VTK,jmerkow/VTK,aashish24/VTK-old,biddisco/VTK,berendkleinhaneveld/VTK,msmolens/VTK,candy7393/VTK,keithroe/vtkoptix,daviddoria/PointGraphsPhase1,mspark93/VTK,demarle/VTK,jmerkow/VTK,spthaolt/VTK,gram526/VTK,johnkit/vtk-dev,jeffbaumes/jeffbaumes-vtk,mspark93/VTK,sankhesh/VTK,johnkit/vtk-dev,demarle/VTK,SimVascular/VTK,jmerkow/VTK,biddisco/VTK,ashray/VTK-EVM,hendradarwin/VTK,jmerkow/VTK,keithroe/vtkoptix,gram526/VTK,candy7393/VTK,berendkleinhaneveld/VTK,arnaudgelas/VTK,sankhesh/VTK,aashish24/VTK-old,aashish24/VTK-old,arnaudgelas/VTK,ashray/VTK-EVM,sumedhasingla/VTK,hendradarwin/VTK,daviddoria/PointGraphsPhase1,sgh/vtk,candy7393/VTK,sankhesh/VTK,johnkit/vtk-dev,demarle/VTK,Wuteyan/VTK,johnkit/vtk-dev,aashish24/VTK-old,demarle/VTK,mspark93/VTK,sankhesh/VTK,msmolens/VTK,hendradarwin/VTK,keithroe/vtkoptix,collects/VTK,candy7393/VTK,SimVascular/VTK,gram526/VTK,mspark93/VTK,sumedhasingla/VTK,mspark93/VTK,sankhesh/VTK,naucoin/VTKSlicerWidgets,mspark93/VTK,cjh1/VTK,msmolens/VTK,jeffbaumes/jeffbaumes-vtk,collects/VTK,jmerkow/VTK,naucoin/VTKSlicerWidgets,daviddoria/PointGraphsPhase1,biddisco/VTK,berendkleinhaneveld/VTK,johnkit/vtk-dev,candy7393/VTK,sumedhasingla/VTK,Wuteyan/VTK,biddisco/VTK,Wuteyan/VTK,demarle/VTK,candy7393/VTK,keithroe/vtkoptix,keithroe/vtkoptix,sgh/vtk,sumedhasingla/VTK,sankhesh/VTK,sumedhasingla/VTK,collects/VTK,arnaudgelas/VTK,Wuteyan/VTK,naucoin/VTKSlicerWidgets,msmolens/VTK,daviddoria/PointGraphsPhase1,demarle/VTK,demarle/VTK,arnaudgelas/VTK,jmerkow/VTK,gram526/VTK,ashray/VTK-EVM,ashray/VTK-EVM,jeffbaumes/jeffbaumes-vtk,jeffbaumes/jeffbaumes-vtk,SimVascular/VTK,msmolens/VTK,Wuteyan/VTK,biddisco/VTK,keithroe/vtkoptix,sumedhasingla/VTK,berendkleinhaneveld/VTK,hendradarwin/VTK,collects/VTK,spthaolt/VTK,sankhesh/VTK,cjh1/VTK,cjh1/VTK,cjh1/VTK,SimVascular/VTK,jeffbaumes/jeffbaumes-vtk,ashray/VTK-EVM,sgh/vtk,ashray/VTK-EVM,Wuteyan/VTK,gram526/VTK,biddisco/VTK,hendradarwin/VTK,berendkleinhaneveld/VTK,msmolens/VTK,sgh/vtk,daviddoria/PointGraphsPhase1,mspark93/VTK,keithroe/vtkoptix,sgh/vtk,jmerkow/VTK,cjh1/VTK,arnaudgelas/VTK,spthaolt/VTK,spthaolt/VTK,jmerkow/VTK,naucoin/VTKSlicerWidgets,SimVascular/VTK,spthaolt/VTK,aashish24/VTK-old
|
e8d57ef08616b06e5f94da7e01ba96c13b9124d7
|
perfrunner/celeryremote.py
|
perfrunner/celeryremote.py
|
BROKER_URL = 'amqp://couchbase:couchbase@172.23.97.73:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
|
BROKER_URL = 'amqp://couchbase:couchbase@172.23.97.73:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
CELERYD_HIJACK_ROOT_LOGGER = False
|
Disable hijacking of previously configured log handlers
|
Disable hijacking of previously configured log handlers
See also:
http://docs.celeryproject.org/en/3.1/configuration.html#celeryd-hijack-root-logger
Change-Id: Ibf4618e8bfeb28f877db4a40b4a911ff00442cc9
Reviewed-on: http://review.couchbase.org/82543
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
|
Python
|
apache-2.0
|
couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner
|
dc883b81a2c5714d9401fb113101639e13e396f5
|
integration_tests/tests/hello_world_sleep_and_time.py
|
integration_tests/tests/hello_world_sleep_and_time.py
|
integration_test = True
timeout = 2
SLEEP_INTERVAL = int(100e6)
def check_state(state):
import re
from functools import partial
from operator import is_not
r = re.compile('^(\d+) \[.*\] Hello World!')
lines = map(r.match, state.console.split('\n'))
lines = filter(partial(is_not, None), lines)
times = map(lambda m: int(m.group(1)), lines)
times = list(times)
min_times = (timeout - 1) * int(1e9) // SLEEP_INTERVAL
assert len(times) >= min_times, "Expected at least {0} hello worlds".format(min_times)
prev = 0
for t in times:
diff = t - prev
assert diff >= SLEEP_INTERVAL, "Sleep interval must be >= {0}".format(SLEEP_INTERVAL)
prev = diff
|
integration_test = True
timeout = 2
SLEEP_INTERVAL = int(100e6)
MIN_TIME = 1451606400000000000 # 2016-1-1 0:0:0.0 UTC
def check_state(state):
import re
from functools import partial
from operator import is_not
r = re.compile('^(\d+) \[.*\] Hello World!')
lines = map(r.match, state.console.split('\n'))
lines = filter(partial(is_not, None), lines)
times = map(lambda m: int(m.group(1)), lines)
times = list(times)
min_times = (timeout - 1) * int(1e9) // SLEEP_INTERVAL
assert len(times) >= min_times, "Expected at least {0} hello worlds".format(min_times)
prev = 0
for t in times:
diff = t - prev
assert diff >= SLEEP_INTERVAL, "Sleep interval must be >= {0}".format(SLEEP_INTERVAL)
assert t >= MIN_TIME, "Time must be after {0}".format(MIN_TIME)
prev = diff
|
Make sure current date is late enough
|
Make sure current date is late enough
|
Python
|
bsd-2-clause
|
unigornel/unigornel,unigornel/unigornel
|
f3fb5bd0dbb3e19e58558af015aaee5ec120af71
|
portal/template_helpers.py
|
portal/template_helpers.py
|
""" Module for helper functions used inside jinja2 templates """
# NB, each blueprint must individually load any functions defined below
# for them to appear in the namespace when invoked from respective blueprint
# See @<blueprint>.context_processor decorator for more info.
def split_string(s, delimiter=','):
return s.split(delimiter)
|
""" Module for helper functions used inside jinja2 templates """
# NB, each blueprint must individually load any functions defined below
# for them to appear in the namespace when invoked from respective blueprint
# See @<blueprint>.context_processor decorator for more info.
def split_string(s, delimiter=','):
"""Given string (or tuple) return the delimited values"""
# If given a tuple, split already happened
if isinstance(s, (list, tuple)):
return s
return s.split(delimiter)
|
Allow for list/tuples in config files when looking for comma delimited strings.
|
Allow for list/tuples in config files when looking for comma delimited
strings.
|
Python
|
bsd-3-clause
|
uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal
|
674f6e0b9fbb76684a9b05d16a5da0d4cc732b1d
|
scripts/analysis/plot_tracking_vector_estimator_stats.py
|
scripts/analysis/plot_tracking_vector_estimator_stats.py
|
#!/usr/bin/env python2
import numpy as np
import matplotlib.pyplot as plt
import argparse
import sys
import os
parser = argparse.ArgumentParser(
prog='plot_tracking_vector_estimator')
parser.add_argument('directory', type=str, help='Data directory')
args = parser.parse_args()
data = np.genfromtxt(
os.path.join(
args.directory,
'tracking_vector_estimator'),
delimiter=',', names=True)
state_labels = ['Marker_x', 'Marker_y', 'Marker_z', 'Velocity_x', 'Velocity_y', 'Velocity_z']
noise_labels = ['Noise_x', 'Noise_y', 'Noise_z', 'Noise_vx', 'Noise_vy', 'Noise_vz']
meas_labels = ['Measured_Marker_x', 'Measured_Marker_y', 'Measured_Marker_y', 'Measured_Velocity_x', 'Measured_Velocity_y', 'Measured_Velocity_z']
ts = (data['Time'] - data['Time'][0]) / 1e9
plt.figure(1)
for i in range(6):
plt.subplot(2, 3, i+1)
plt.plot(ts, data[meas_labels[i]])
plt.errorbar(ts, data[state_labels[i]], yerr=data[noise_labels[i]])
plt.ylabel(state_labels[i])
plt.xlabel('Time (seconds)')
plt.legend([meas_labels[i], state_labels[i]])
plt.show()
|
#!/usr/bin/env python2
import numpy as np
import matplotlib.pyplot as plt
import argparse
import sys
import os
parser = argparse.ArgumentParser(
prog='plot_tracking_vector_estimator')
parser.add_argument('directory', type=str, help='Data directory')
args = parser.parse_args()
data = np.genfromtxt(
os.path.join(
args.directory,
'tracking_vector_estimator'),
delimiter=',', names=True)
state_labels = ['Marker_x', 'Marker_y', 'Marker_z']
noise_labels = ['Noise_x', 'Noise_y', 'Noise_z']
meas_labels = ['Measured_Marker_x', 'Measured_Marker_y', 'Measured_Marker_y']
meas_noise_labels = ['Meas_noise_x', 'Meas_noise_y', 'Meas_noise_z']
ts = (data['Time'] - data['Time'][0]) / 1e9
plt.figure(1)
for i in range(3):
plt.subplot(2, 3, i+1)
plt.errorbar(ts, data[meas_labels[i]], yerr=data[meas_noise_labels[i]])
plt.errorbar(ts, data[state_labels[i]], yerr=data[noise_labels[i]])
plt.ylabel(state_labels[i])
plt.xlabel('Time (seconds)')
plt.legend([meas_labels[i], state_labels[i]])
plt.show()
|
Change estimator script based on modifications to estimator
|
Change estimator script based on modifications to estimator
|
Python
|
mpl-2.0
|
jhu-asco/aerial_autonomy,jhu-asco/aerial_autonomy,jhu-asco/aerial_autonomy,jhu-asco/aerial_autonomy,jhu-asco/aerial_autonomy,jhu-asco/aerial_autonomy
|
a0aa74d9e6295e34f02b4eefd76e7eb9a1e6425f
|
node/floor_divide.py
|
node/floor_divide.py
|
#!/usr/bin/env python
from nodes import Node
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
|
#!/usr/bin/env python
from nodes import Node
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
@Node.test_func(["134", 1], [["134"]])
@Node.test_func(["1234", 2], [["12", "34"]])
@Node.test_func(["1234", 3], [["1", "2", "34"]])
@Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]])
def chunk(self, inp:Node.indexable, num:Node.number):
"""Return inp seperated into num groups"""
rtn = []
last = 0
size = len(inp)//num
for i in range(size, len(inp), size):
rtn.append(inp[last:i])
last = i
if len(rtn) != num:
rtn.append(inp[last:])
else:
rtn[-1] += inp[last:]
if len(rtn):
if isinstance(inp, str):
rtn[-1] = "".join(rtn[-1])
else:
rtn[-1] = type(inp)(rtn[-1])
return [rtn]
|
Add a group chunk, chunks a list into N groups
|
Add a group chunk, chunks a list into N groups
|
Python
|
mit
|
muddyfish/PYKE,muddyfish/PYKE
|
9361af556cfa7f4fb6bb3c53b4e74e2c115cd7d7
|
annict/client.py
|
annict/client.py
|
# -*- coding: utf-8 -*-
from operator import methodcaller
import requests
from furl import furl
class Client(object):
def __init__(self, access_token, base_url='https://api.annict.com', api_version='v1'):
self.access_token = access_token
self.base_url = base_url
self.api_version = api_version
def _request(self, http_method, path, kwargs=None):
kwargs['access_token'] = self.access_token
d = {}
if http_method == 'post' or http_method == 'patch':
d['data'] = kwargs
elif http_method == 'get':
d['params'] = kwargs
url = furl(self.base_url)
url.path.add(self.api_version).add(path)
m = methodcaller(http_method, url.url, **d)
response = m(requests)
if not response.content:
return None
return response.json()
def get(self, path, kwargs):
return self._request('get', path, kwargs)
def post(self, path, kwargs):
return self._request('post', path, kwargs)
def patch(self, path, kwargs):
return self._request('patch', path, kwargs)
def delete(self, path):
return self._request('delete', path)
|
# -*- coding: utf-8 -*-
from operator import methodcaller
import requests
from furl import furl
class Client(object):
def __init__(self, access_token, base_url='https://api.annict.com', api_version='v1'):
self.access_token = access_token
self.base_url = base_url
self.api_version = api_version
def _request(self, http_method, path, kwargs=None):
kwargs['access_token'] = self.access_token
d = {}
if http_method == 'post' or http_method == 'patch':
d['data'] = kwargs
elif http_method == 'get':
d['params'] = kwargs
url = furl(self.base_url)
url.path.add(self.api_version).add(path)
m = methodcaller(http_method, url.url, **d)
return m(requests)
def get(self, path, kwargs):
return self._request('get', path, kwargs)
def post(self, path, kwargs):
return self._request('post', path, kwargs)
def patch(self, path, kwargs):
return self._request('patch', path, kwargs)
def delete(self, path):
return self._request('delete', path)
|
Fix Client returns requests's response.
|
Fix Client returns requests's response.
|
Python
|
mit
|
kk6/python-annict
|
dffbc7d79c67c3629f718c7a0330f9922499640d
|
examples/translations/portuguese_test_1.py
|
examples/translations/portuguese_test_1.py
|
# Portuguese Language Test - Python 3 Only!
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir_url("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Visitar a página principal"]')
self.atualizar_texto("#searchInput", "Rio de Janeiro")
self.clique("#searchButton")
self.verificar_texto("Rio de Janeiro", "#firstHeading")
self.verificar_elemento('img[alt*="edifícios"]')
self.atualizar_texto("#searchInput", "São Paulo")
self.clique("#searchButton")
self.verificar_texto("São Paulo", "#firstHeading")
self.verificar_elemento('img[src*="Monumento"]')
self.voltar()
self.verificar_verdade("Janeiro" in self.obter_url_atual())
self.avançar() # noqa
self.verificar_verdade("Paulo" in self.obter_url_atual())
|
# Portuguese Language Test - Python 3 Only!
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir_url("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.atualizar_texto("#searchInput", "Rio de Janeiro")
self.clique("#searchButton")
self.verificar_texto("Rio de Janeiro", "#firstHeading")
self.verificar_elemento('img[alt*="edifícios"]')
self.atualizar_texto("#searchInput", "São Paulo")
self.clique("#searchButton")
self.verificar_texto("São Paulo", "h1#firstHeading")
self.verificar_elemento('img[src*="Monumento"]')
self.voltar()
self.verificar_verdade("Rio" in self.obter_url_atual())
self.atualizar_texto("#searchInput", "Florianópolis\n")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('img[alt*="Avenida Beira Mar"]')
|
Update the Portuguese example test
|
Update the Portuguese example test
|
Python
|
mit
|
mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
|
135c84189720aa2b7c07e516c782f7fab7b4d8fe
|
astropy/units/format/base.py
|
astropy/units/format/base.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
class _FormatterMeta(type):
registry = {}
def __new__(mcls, name, bases, members):
if 'name' in members:
formatter_name = members['name'].lower()
else:
formatter_name = members['name'] = name.lower()
cls = super().__new__(mcls, name, bases, members)
mcls.registry[formatter_name] = cls
return cls
class Base(metaclass=_FormatterMeta):
"""
The abstract base class of all unit formats.
"""
def __new__(cls, *args, **kwargs):
# This __new__ is to make it clear that there is no reason to
# instantiate a Formatter--if you try to you'll just get back the
# class
return cls
@classmethod
def parse(cls, s):
"""
Convert a string to a unit object.
"""
raise NotImplementedError(
f"Can not parse with {cls.__name__} format")
@classmethod
def to_string(cls, u):
"""
Convert a unit object to a string.
"""
raise NotImplementedError(
f"Can not output in {cls.__name__} format")
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
class Base:
"""
The abstract base class of all unit formats.
"""
registry = {}
def __new__(cls, *args, **kwargs):
# This __new__ is to make it clear that there is no reason to
# instantiate a Formatter--if you try to you'll just get back the
# class
return cls
def __init_subclass__(cls, **kwargs):
# Keep a registry of all formats. Key by the class name unless a name
# is explicitly set (i.e., one *not* inherited from a superclass).
if 'name' not in cls.__dict__:
cls.name = cls.__name__.lower()
Base.registry[cls.name] = cls
super().__init_subclass__(**kwargs)
@classmethod
def parse(cls, s):
"""
Convert a string to a unit object.
"""
raise NotImplementedError(
f"Can not parse with {cls.__name__} format")
@classmethod
def to_string(cls, u):
"""
Convert a unit object to a string.
"""
raise NotImplementedError(
f"Can not output in {cls.__name__} format")
|
Remove use of metaclass for unit formats
|
Remove use of metaclass for unit formats
|
Python
|
bsd-3-clause
|
astropy/astropy,saimn/astropy,mhvk/astropy,lpsinger/astropy,saimn/astropy,lpsinger/astropy,mhvk/astropy,pllim/astropy,astropy/astropy,lpsinger/astropy,saimn/astropy,aleksandr-bakanov/astropy,pllim/astropy,astropy/astropy,pllim/astropy,lpsinger/astropy,larrybradley/astropy,mhvk/astropy,pllim/astropy,StuartLittlefair/astropy,larrybradley/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,saimn/astropy,aleksandr-bakanov/astropy,mhvk/astropy,StuartLittlefair/astropy,saimn/astropy,pllim/astropy,larrybradley/astropy,astropy/astropy,StuartLittlefair/astropy,astropy/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,StuartLittlefair/astropy,lpsinger/astropy,mhvk/astropy
|
52ef9217f954617283be54c889a317b2432651d7
|
licensing/models.py
|
licensing/models.py
|
from django.db import models
class License(models.Model):
name = models.CharField(max_length=80, unique=True)
symbols = models.CharField(max_length=5)
url = models.URLField(unique=True)
def __unicode__(self):
return self.name
def get_absolute_url(self):
return self.url
class Licensed(models.Model):
license = models.ForeignKey(License)
class Meta:
abstract = True
|
from django.db import models
class License(models.Model):
name = models.CharField(max_length=80, unique=True)
symbols = models.CharField(max_length=5)
url = models.URLField(unique=True)
def __unicode__(self):
return self.name
def __str__(self):
return self.name
def get_absolute_url(self):
return self.url
class Licensed(models.Model):
license = models.ForeignKey(License)
class Meta:
abstract = True
|
Add __str__() method to license model
|
Add __str__() method to license model
__unicode__() is not used in python3
|
Python
|
unlicense
|
editorsnotes/django-licensing,editorsnotes/django-licensing
|
49d831a61c5770d02609ff2df8fed3effc3869c2
|
avalonstar/components/games/admin.py
|
avalonstar/components/games/admin.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Game, Platform
class GameAdmin(admin.ModelAdmin):
list_display = ['name', 'platform', 'gbid']
raw_id_fields = ['platform']
autocomplete_lookup_fields = { 'fk': ['platform'] }
admin.site.register(Game, GameAdmin)
class PlatformAdmin(admin.ModelAdmin):
pass
admin.site.register(Platform, PlatformAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Game, Platform
class GameAdmin(admin.ModelAdmin):
list_display = ['name', 'platform', 'gbid', 'is_abandoned', 'is_completed']
raw_id_fields = ['platform']
autocomplete_lookup_fields = { 'fk': ['platform'] }
admin.site.register(Game, GameAdmin)
class PlatformAdmin(admin.ModelAdmin):
pass
admin.site.register(Platform, PlatformAdmin)
|
Add booleans in for games.
|
Add booleans in for games.
|
Python
|
apache-2.0
|
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
|
0e766eb66eba099071b6cfae49bf79492e29e648
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/'))
print dbs
dbs = dbs.split('(')[0]
print dbs
# dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
#
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/'))
# print dbs
dblist = []
for db in dbs:
dblist.append(db)
print dblist
# dbs = dbs.split('(')[0]
# print dbs
# dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
#
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
Create documentation of DataSource Settings
|
8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
aecff9764ef8d18b7016a6acba41e74a43e66085
|
clio/utils.py
|
clio/utils.py
|
import json
from bson import json_util
from flask.wrappers import Request, cached_property
def getBoolean(string):
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
|
import json
from bson import json_util
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
|
Add support to getBoolean function for None objects.
|
Add support to getBoolean function for None objects.
|
Python
|
apache-2.0
|
geodelic/clio,geodelic/clio
|
58d73429952a942d03b232242424946895ec3e8c
|
multi_schema/middleware.py
|
multi_schema/middleware.py
|
"""
Middleware to automatically set the schema (namespace).
if request.user.is_superuser, then look for a ?schema=XXX and set the schema to that.
Otherwise, set the schema to the one associated with the logged in user.
"""
from models import Schema
class SchemaMiddleware:
def process_request(self, request):
if request.user.is_anonymous():
return None
if request.user.is_superuser and '__schema' in request.GET:
request.session['schema'] = request.GET['__schema']
if request.user.is_superuser and 'schema' in request.session:
Schema.objects.get(pk=request.session['schema']).activate()
else:
request.user.schema.schema.activate()
def process_response(self, request):
pass
|
"""
Middleware to automatically set the schema (namespace).
if request.user.is_superuser, then look for a ?schema=XXX and set the schema to that.
Otherwise, set the schema to the one associated with the logged in user.
"""
from django.core.exceptions import ObjectDoesNotExist
from models import Schema
class SchemaMiddleware:
def process_request(self, request):
if request.user.is_anonymous():
return None
if request.user.is_superuser:
if '__schema' in request.GET:
request.session['schema'] = request.GET['__schema']
if 'schema' in request.session:
Schema.objects.get(pk=request.session['schema']).activate()
else:
try:
request.user.schema.schema.activate()
except ObjectDoesNotExist:
pass
def process_template_response(self, request, response):
if request.user.is_superuser:
response.context_data['schemata'] = Schema.objects.all()
response.context_data['selected_schema'] = request.session['schema']
return response
|
Add some data into the request context. Better handling of missing Schema objects when logging in (should we raise an error?).
|
Add some data into the request context.
Better handling of missing Schema objects when logging in (should we raise an error?).
|
Python
|
bsd-3-clause
|
schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse
|
b98e86ad9b3120dce9f163236b5e28f564547c27
|
TWLight/resources/factories.py
|
TWLight/resources/factories.py
|
# -*- coding: utf-8 -*-
import factory
import random
from django.conf import settings
from TWLight.resources.models import Partner, Stream, Video, Suggestion
class PartnerFactory(factory.django.DjangoModelFactory):
class Meta:
model = Partner
strategy = factory.CREATE_STRATEGY
company_name = factory.Faker(
"company", locale=random.choice(settings.FAKER_LOCALES)
)
terms_of_use = factory.Faker("uri", locale=random.choice(settings.FAKER_LOCALES))
status = Partner.AVAILABLE # not the default, but usually wanted in tests
class StreamFactory(factory.django.DjangoModelFactory):
class Meta:
model = Stream
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
name = factory.Faker("bs", locale=random.choice(settings.FAKER_LOCALES))
class SuggestionFactory(factory.django.DjangoModelFactory):
class Meta:
model = Suggestion
strategy = factory.CREATE_STRATEGY
suggested_company_name = factory.Faker(
"company", locale=random.choice(settings.FAKER_LOCALES)
)
company_url = factory.Faker("url", locale=random.choice(settings.FAKER_LOCALES))
class VideoFactory(factory.django.DjangoModelFactory):
class Meta:
model = Video
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
|
# -*- coding: utf-8 -*-
import factory
import random
from django.conf import settings
from TWLight.resources.models import Partner, Stream, Video, Suggestion
class PartnerFactory(factory.django.DjangoModelFactory):
class Meta:
model = Partner
strategy = factory.CREATE_STRATEGY
company_name = factory.Faker(
"company", locale=random.choice(settings.FAKER_LOCALES)
)
terms_of_use = factory.Faker("uri", locale=random.choice(settings.FAKER_LOCALES))
status = Partner.AVAILABLE # not the default, but usually wanted in tests
class StreamFactory(factory.django.DjangoModelFactory):
class Meta:
model = Stream
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
name = factory.Faker("bs", locale=random.choice(settings.FAKER_LOCALES))
class SuggestionFactory(factory.django.DjangoModelFactory):
class Meta:
model = Suggestion
strategy = factory.CREATE_STRATEGY
suggested_company_name = factory.Faker("pystr", max_chars=40)
company_url = factory.Faker("url", locale=random.choice(settings.FAKER_LOCALES))
class VideoFactory(factory.django.DjangoModelFactory):
class Meta:
model = Video
strategy = factory.CREATE_STRATEGY
partner = factory.SubFactory(PartnerFactory)
|
Change suggested_company_name factory var to pystr
|
Change suggested_company_name factory var to pystr
|
Python
|
mit
|
WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight
|
bf5307afe52415960d0ffc794f687b0ecebb48da
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from flask.ext.login import login_user, logout_user, current_user, login_required, LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
from flask import Flask, session
from flask.ext.session import Session
from flask.ext.mail import Mail
app = Flask(__name__)
# Configuration file reading
app.config.from_object('config')
# Database Initialization
db = SQLAlchemy(app)
# Login manager init
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
# Session Manager Init
sess = Session()
sess.init_app(app)
# Mail engine init
mail = Mail(app)
from app import views, models
|
from flask import Flask
from flask.ext.login import login_user, logout_user, current_user, login_required, LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
from flask import Flask, session
from flask.ext.session import Session
from flask.ext.mail import Mail
import logging
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
# Configuration file reading
app.config.from_object('config')
# Database Initialization
db = SQLAlchemy(app)
# Login manager init
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
# Session Manager Init
sess = Session()
sess.init_app(app)
# Mail engine init
mail = Mail(app)
##################
# Logging system #
##################
# Open a file rotated every 100MB
file_handler = RotatingFileHandler('tmp/cineapp.log', 'a', 100 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Cineapp startup')
from app import views, models
|
Enable file logging for the application.
|
Enable file logging for the application.
|
Python
|
mit
|
ptitoliv/cineapp,ptitoliv/cineapp,ptitoliv/cineapp
|
04328bb0ed84180aa9e5ce7f749eafb1ab96d4fc
|
app/api/auth.py
|
app/api/auth.py
|
from urllib import urlencode
from datetime import datetime
from django.http import HttpResponseForbidden
from django.contrib.auth.models import AnonymousUser
from django.utils.timezone import now
from api.models import AuthAPIKey, AuthAPILog
class APIKeyAuthentication(object):
""" Validats a request by API key passed as a GET parameter """
def is_authenticated(self, request):
try:
keyobj = AuthAPIKey.objects.get(key=request.GET.get('apikey', None))
except AuthAPIKey.DoesNotExist:
pass
else:
if keyobj and keyobj.active:
params = request.GET.copy()
if params.get('apikey', None): del params['apikey']
if len(params):
url = "%s?%s" % (request.path, urlencode(params))
else:
url = request.path
AuthAPILog(key=keyobj, access_datetime=now(), url=url).save()
request.user = AnonymousUser()
request.api_key = keyobj
return True
return False
def challenge(self):
return HttpResponseForbidden('Access Denied, use a API Key')
|
from urllib import urlencode
from datetime import datetime
from django.http import HttpResponseForbidden
from django.contrib.auth.models import AnonymousUser
from django.utils.timezone import now
from api.models import AuthAPIKey, AuthAPILog
class APIKeyAuthentication(object):
""" Validats a request by API key passed as a GET parameter """
def is_authenticated(self, request):
try:
keyobj = AuthAPIKey.objects.get(key=request.GET.get('apikey', None))
except AuthAPIKey.DoesNotExist:
pass
else:
if keyobj and keyobj.active:
params = request.GET.copy()
if params.get('apikey', None): del params['apikey']
if len(params):
url = "%s?%s" % (request.path, urlencode(params))
else:
url = request.path
AuthAPILog.objects.create(key=keyobj, access_datetime=now(), url=url)
request.user = AnonymousUser()
request.api_key = keyobj
return True
return False
def challenge(self):
return HttpResponseForbidden('Access Denied, use a API Key')
|
Use create instead of instance and save
|
Use create instead of instance and save
|
Python
|
bsd-3-clause
|
nikdoof/test-auth
|
6091fccc90bb6b90c47a2e4fb7ee6821876eb1a1
|
synthnotes/generators/lengthgenerator.py
|
synthnotes/generators/lengthgenerator.py
|
from pkg_resources import resource_filename
import pandas as pd
import numpy as np
class LengthGenerator(object):
def __init__(self,
length_file=resource_filename(__name__,
'resources/note_lengths.csv')):
# print(length_file)
df = pd.read_csv(length_file)
notes_count = df['count'].sum()
df['probability'] = df['count'] / notes_count
self.note_lengths = df['note_length'].as_matrix()
self.p = df['probability'].as_matrix()
def generate(self, size=1):
return np.random.choice(self.note_lengths,
size=size,
p=self.p)
|
from pkg_resources import resource_filename
import pandas as pd
import numpy as np
class LengthGenerator(object):
def __init__(self,
length_file=resource_filename('synthnotes.resources',
'note_lengths.csv')):
# print(length_file)
df = pd.read_csv(length_file)
notes_count = df['count'].sum()
df['probability'] = df['count'] / notes_count
self.note_lengths = df['note_length'].as_matrix()
self.p = df['probability'].as_matrix()
def generate(self, size=1):
return np.random.choice(self.note_lengths,
size=size,
p=self.p)
|
Change LengthGenerator to get appropriate file path
|
Change LengthGenerator to get appropriate file path
|
Python
|
mit
|
ebegoli/SynthNotes
|
fc7cadecb95fa798a8e8aaeb544ad5464f13a533
|
nanomon/registry.py
|
nanomon/registry.py
|
from weakref import WeakValueDictionary
class DuplicateEntryError(Exception):
def __init__(self, name, obj, registry):
self.name = name
self.obj = obj
self.registry = registry
def __str__(self):
return "Duplicate entry in '%s' registry for '%s'." % (
self.registry._registry_name, self.name)
class Registry(WeakValueDictionary):
def __init__(self, object_type, *args, **kwargs):
self._object_type = object_type
#super(Registry, self).__init__(*args, **kwargs)
WeakValueDictionary.__init__(self, *args, **kwargs)
def __setitem__(self, name, value):
if not isinstance(value, self._object_type):
raise TypeError("This registry only accepts objects of type %s." %
(self._object_type.__name__))
if self.has_key(name):
raise DuplicateEntryError(name, value, self)
WeakValueDictionary.__setitem__(self, name, value)
|
from weakref import WeakValueDictionary
class DuplicateEntryError(Exception):
def __init__(self, name, obj, registry):
self.name = name
self.obj = obj
self.registry = registry
def __str__(self):
return "Duplicate entry in '%s' registry for '%s'." % (
self.registry._object_type.__name__, self.name)
class Registry(WeakValueDictionary):
def __init__(self, object_type, *args, **kwargs):
self._object_type = object_type
WeakValueDictionary.__init__(self, *args, **kwargs)
def __setitem__(self, name, value):
if not isinstance(value, self._object_type):
raise TypeError("This registry only accepts objects of type %s." %
(self._object_type.__name__))
if self.has_key(name):
raise DuplicateEntryError(name, value, self)
WeakValueDictionary.__setitem__(self, name, value)
|
Clean up some commented out code
|
Clean up some commented out code
|
Python
|
bsd-2-clause
|
cloudtools/nymms
|
d9f03ad1c73cc18276666f28e9a9360c71139a0d
|
nib/plugins/time.py
|
nib/plugins/time.py
|
import datetime
import time
from nib import jinja
@jinja('time')
def timeformat(t=None, f='%Y-%m-%d %I:%M %p'):
if t is None:
t = time.gmtime()
elif isinstance(t, datetime.date) or isinstance(t, datetime.datetime):
t = t.timetuple()
elif isinstance(t, float):
t = time.gmtime(t)
s = time.strftime(f, t)
return s
@jinja('atomtime')
def atomtimeformat(t=None, f='%Y-%m-%dT%I:%M:%SZ'):
return timeformat(t,f)
@jinja('rsstime')
def rsstimeformat(t=None, f='%a, %d %b %Y %I:%M:%S GMT'):
return timeformat(t,f)
@jinja('date')
def dateformat(t=None, f='%Y-%m-%d'):
return timeformat(t,f)
|
import datetime
import time
from nib import jinja
@jinja('time')
def timeformat(t=None, f='%Y-%m-%d %I:%M %p'):
if t is None:
t = time.gmtime()
elif isinstance(t, datetime.date) or isinstance(t, datetime.datetime):
t = t.timetuple()
elif isinstance(t, float):
t = time.gmtime(t)
s = time.strftime(f, t)
return s
@jinja('atomtime')
def atomtimeformat(t=None, f='%Y-%m-%dT%I:%M:%SZ'):
return timeformat(t,f)
@jinja('rsstime')
def rsstimeformat(t=None, f='%a, %d %b %Y %I:%M:%S GMT'):
return timeformat(t,f)
@jinja('date')
def dateformat(t=None, f='%Y-%m-%d'):
return timeformat(t,f)
@jinja('longdate')
def longdateformat(t=None, f='%B %d, %Y'):
return timeformat(t, f)
|
Add 'longdate' filter for readable dates in templates
|
Add 'longdate' filter for readable dates in templates
|
Python
|
mit
|
jreese/nib
|
43a515ddfbe38686672fe00d4765d3f2e1bc5346
|
scarlet/assets/settings.py
|
scarlet/assets/settings.py
|
from django.conf import settings
# Main Assets Directory. This will be a subdirectory within MEDIA_ROOT.
# Set to None to use MEDIA_ROOT directly
DIRECTORY = getattr(settings, "ASSETS_DIR", 'assets')
# Which size should be used as CMS thumbnail for images.
CMS_THUMBNAIL_SIZE = getattr(settings, 'ASSETS_CMS_THUMBNAIL_SIZE', '80x80')
# EXTRA SETTINGS
# Convert Filename (UUID)
HASH_FILENAME = getattr(settings, "ASSETS_HASH_FILENAME", True)
# Append a qs to assets urls for cache busting
USE_CACHE_BUST = getattr(settings, "ASSETS_USE_CACHE_BUST", True)
ASSET_MODEL = getattr(settings, "ASSET_MODEL", "assets.Asset")
ASSET_TYPES = getattr(settings, "ASSET_TYPES", None)
DEFAULT_IMAGE_SIZES = {
'admin' : { 'width' : 100, 'height' : 100, 'editable': False }
}
IMAGE_SIZES = getattr(settings, "IMAGE_SIZES", DEFAULT_IMAGE_SIZES)
IMAGE_CROPPER = '.crops.cropper'
CELERY = getattr(settings, "ASSET_CELERY", None)
USE_CELERY_DECORATOR = getattr(settings, "ASSET_USE_CELERY_DECORATOR", False)
|
from django.conf import settings
# Main Assets Directory. This will be a subdirectory within MEDIA_ROOT.
# Set to None to use MEDIA_ROOT directly
DIRECTORY = getattr(settings, "ASSETS_DIR", 'assets')
# Which size should be used as CMS thumbnail for images.
CMS_THUMBNAIL_SIZE = getattr(settings, 'ASSETS_CMS_THUMBNAIL_SIZE', '80x80')
# EXTRA SETTINGS
# Convert Filename (UUID)
HASH_FILENAME = getattr(settings, "ASSETS_HASH_FILENAME", True)
# Append a qs to assets urls for cache busting
USE_CACHE_BUST = getattr(settings, "ASSETS_USE_CACHE_BUST", True)
ASSET_MODEL = getattr(settings, "ASSET_MODEL", "assets.Asset")
ASSET_TYPES = getattr(settings, "ASSET_TYPES", None)
DEFAULT_IMAGE_SIZES = {
'admin' : {
'width' : 100, 'height' : 100,
'editable': False, 'upscale': True,
},
}
IMAGE_SIZES = getattr(settings, "IMAGE_SIZES", DEFAULT_IMAGE_SIZES)
IMAGE_CROPPER = '.crops.cropper'
CELERY = getattr(settings, "ASSET_CELERY", None)
USE_CELERY_DECORATOR = getattr(settings, "ASSET_USE_CELERY_DECORATOR", False)
|
Set upscale to True by default for admin asset
|
Set upscale to True by default for admin asset
|
Python
|
mit
|
ff0000/scarlet,ff0000/scarlet,ff0000/scarlet,ff0000/scarlet,ff0000/scarlet
|
b57d5ecf56640c9d0a69b565006e2240662d6b46
|
profile_collection/startup/11-temperature-controller.py
|
profile_collection/startup/11-temperature-controller.py
|
from ophyd import PVPositioner, EpicsSignal, EpicsSignalRO
from ophyd import Component as C
from ophyd.device import DeviceStatus
class CS700TemperatureController(PVPositioner):
setpoint = C(EpicsSignal, 'T-SP')
readback = C(EpicsSignalRO, 'T-I')
done = C(EpicsSignalRO, 'Cmd-Busy')
stop_signal = C(EpicsSignal, 'Cmd-Cmd')
def trigger(self):
# There is nothing to do. Just report that we are done.
# Note: This really should not necessary to do --
# future changes to PVPositioner may obviate this code.
status = DeviceStatus()
status._finished()
return status
cs700 = CS700TemperatureController('XF:28IDC-ES:1{Env:01}', name='cs700',
settle_time=10)
cs700.done_value = 0
cs700.read_attrs = ['setpoint', 'readback']
cs700.readback.name = 'temperautre'
cs700.setpoint.name = 'temperautre_setpoint'
|
from ophyd import PVPositioner, EpicsSignal, EpicsSignalRO
from ophyd import Component as C
from ophyd.device import DeviceStatus
class CS700TemperatureController(PVPositioner):
setpoint = C(EpicsSignal, 'T-SP')
readback = C(EpicsSignalRO, 'T-I')
done = C(EpicsSignalRO, 'Cmd-Busy')
stop_signal = C(EpicsSignal, 'Cmd-Cmd')
def trigger(self):
# There is nothing to do. Just report that we are done.
# Note: This really should not necessary to do --
# future changes to PVPositioner may obviate this code.
status = DeviceStatus()
status._finished()
return status
cs700 = CS700TemperatureController('XF:28IDC-ES:1{Env:01}', name='cs700')
# this functionality never worked, has now been removed, but will shortly be
# coming back
# settle_time=10)
cs700.done_value = 0
cs700.read_attrs = ['setpoint', 'readback']
cs700.readback.name = 'temperautre'
cs700.setpoint.name = 'temperautre_setpoint'
|
Remove settle_time kwarg from c700
|
Remove settle_time kwarg from c700
This kwarg has been removed from ophyd, but will be coming back (and be
functional) soon. Revert these changes when that happens: ophyd 0.2.1)
|
Python
|
bsd-2-clause
|
NSLS-II-XPD/ipython_ophyd,NSLS-II-XPD/ipython_ophyd
|
28627a41918be15037ba22e930a45d022e88388d
|
opps/articles/adminx.py
|
opps/articles/adminx.py
|
# -*- coding: utf-8 -*-
#from django.contrib import admin
from .models import Post, Album, Link
from opps.contrib import admin
admin.site.register(Post)
admin.site.register(Album)
admin.site.register(Link)
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from .models import Post, Album, Link
from opps.containers.models import ContainerSource, ContainerImage
from opps.contrib import admin
from opps.contrib.admin.layout import *
from xadmin.plugins.inline import Inline
class ImageInline(object):
model = ContainerImage
style = 'accordion'
class SourceInline(object):
model = ContainerSource
style = 'accordion'
class PostAdmin(object):
raw_id_fields = ['main_image', 'channel', 'albums']
inlines = [ImageInline, SourceInline]
style_fields = {'system': "radio-inline"}
form_layout = (
Main(
TabHolder(
Tab(_(u'Identification'),
Fieldset('site', 'title', 'slug',
'get_http_absolute_url', 'short_url'),
),
Tab(_(u'Content'),
Fieldset('hat', 'short_title', 'headline',
'content', 'main_image', 'main_image_caption',
'image_thumb' 'tags'),
Inline(ContainerImage),
Inline(ContainerSource),
),
Tab(_(u'Relationships'),
Fieldset('channel', 'albums'),
),
)),
Side(
Fieldset(_(u'Publication'), 'published', 'date_available',
'show_on_root_channel', 'in_containerboxes')
)
)
reversion_enable = True
admin.site.register(Post, PostAdmin)
admin.site.register(Album)
admin.site.register(Link)
|
Add Inline example on post model xadmin
|
Add Inline example on post model xadmin
|
Python
|
mit
|
jeanmask/opps,opps/opps,YACOWS/opps,williamroot/opps,opps/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps,jeanmask/opps,opps/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,YACOWS/opps
|
e72c7fb4249895f2d6f4c9f36153786b75d5e8fa
|
chainer/functions/reshape.py
|
chainer/functions/reshape.py
|
import numpy
from chainer import function
from chainer.utils import type_check
class Reshape(function.Function):
type_check_prod = type_check.Variable(numpy.prod, 'prod')
"""Reshapes an input array without copy."""
def __init__(self, shape):
self.shape = shape
def check_type_forward(self, in_types):
type_check.expect(
in_types.size() == 1,
self.type_check_prod(in_types[0].shape) ==
self.type_check_prod(self.shape)
)
def check_type_backward(self, in_types, out_types):
type_check.expect(
out_types.size() == 1,
self.type_check_prod(in_types[0].shape) ==
self.type_check_prod(out_types[0].shape)
)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (~chainer.Variable): Input variable.
shape (tuple of ints): Target shape.
Returns:
~chainer.Variable: Variable that holds a reshaped version of the input
variable.
"""
return Reshape(shape)(x)
|
import numpy
from chainer import function
from chainer.utils import type_check
_type_check_prod = type_check.Variable(numpy.prod, 'prod')
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
self.shape = shape
def check_type_forward(self, in_types):
type_check.expect(
in_types.size() == 1,
_type_check_prod(in_types[0].shape) ==
_type_check_prod(self.shape)
)
def check_type_backward(self, in_types, out_types):
type_check.expect(
out_types.size() == 1,
_type_check_prod(in_types[0].shape) ==
_type_check_prod(out_types[0].shape)
)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (~chainer.Variable): Input variable.
shape (tuple of ints): Target shape.
Returns:
~chainer.Variable: Variable that holds a reshaped version of the input
variable.
"""
return Reshape(shape)(x)
|
Move type_chack_prod module level variable and change its name to _type_check_prod
|
Move type_chack_prod module level variable and change its name to _type_check_prod
|
Python
|
mit
|
kikusu/chainer,niboshi/chainer,sou81821/chainer,elviswf/chainer,anaruse/chainer,okuta/chainer,okuta/chainer,AlpacaDB/chainer,tkerola/chainer,ktnyt/chainer,wkentaro/chainer,chainer/chainer,woodshop/complex-chainer,jfsantos/chainer,keisuke-umezawa/chainer,yanweifu/chainer,truongdq/chainer,niboshi/chainer,aonotas/chainer,ronekko/chainer,cupy/cupy,cemoody/chainer,chainer/chainer,benob/chainer,tigerneil/chainer,ytoyama/yans_chainer_hackathon,wkentaro/chainer,niboshi/chainer,sinhrks/chainer,kuwa32/chainer,hvy/chainer,bayerj/chainer,keisuke-umezawa/chainer,sinhrks/chainer,pfnet/chainer,kiyukuta/chainer,hvy/chainer,AlpacaDB/chainer,jnishi/chainer,muupan/chainer,jnishi/chainer,laysakura/chainer,tscohen/chainer,t-abe/chainer,rezoo/chainer,okuta/chainer,wkentaro/chainer,benob/chainer,woodshop/chainer,keisuke-umezawa/chainer,hvy/chainer,truongdq/chainer,kashif/chainer,keisuke-umezawa/chainer,muupan/chainer,ktnyt/chainer,hidenori-t/chainer,umitanuki/chainer,Kaisuke5/chainer,ktnyt/chainer,okuta/chainer,ikasumi/chainer,chainer/chainer,cupy/cupy,jnishi/chainer,chainer/chainer,jnishi/chainer,1986ks/chainer,kikusu/chainer,niboshi/chainer,minhpqn/chainer,wkentaro/chainer,cupy/cupy,ysekky/chainer,delta2323/chainer,cupy/cupy,masia02/chainer,wavelets/chainer,hvy/chainer,ktnyt/chainer,t-abe/chainer
|
737bf244f36b73a54b5b4f89f0c7e604d3f34b72
|
tests/grammar_term-nonterm_test/NonterminalGetTest.py
|
tests/grammar_term-nonterm_test/NonterminalGetTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import Grammar
from grammpy import Nonterminal
class TempClass(Nonterminal):
pass
class Second(Nonterminal):
pass
class Third(Nonterminal):
pass
class TerminalGetTest(TestCase):
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import Grammar
from grammpy import Nonterminal
class TempClass(Nonterminal):
pass
class Second(Nonterminal):
pass
class Third(Nonterminal):
pass
class TerminalGetTest(TestCase):
def test_getNontermEmpty(self):
gr = Grammar()
self.assertIsNone(gr.get_nonterm(TempClass))
self.assertIsNone(gr.get_nonterm(Second))
self.assertIsNone(gr.get_nonterm(Third))
def test_getNontermClass(self):
gr = Grammar()
gr.add_nonterm(TempClass)
self.assertEqual(gr.get_nonterm(TempClass), TempClass)
def test_getNontermArray(self):
gr = Grammar()
gr.add_nonterm([TempClass, Second, Third])
g = gr.get_term([Second, TempClass])
for i in g:
self.assertTrue(i in [TempClass, Second, Third])
self.assertEqual(g[0], Second)
self.assertEqual(g[1], TempClass)
def test_dontGetNontermArray(self):
gr = Grammar()
gr.add_term([TempClass, Second])
g = gr.get_term([TempClass, Third])
self.assertEqual(g[0], TempClass)
self.assertIsNone(g[1])
def test_getNontermTuple(self):
gr = Grammar()
gr.add_term([TempClass, Second, Third])
g = gr.get_term((Third, TempClass))
for i in g:
self.assertTrue(i in [TempClass, Second, Third])
self.assertEqual(g[0], Third)
self.assertEqual(g[1], TempClass)
def test_dontGetNontermTuple(self):
gr = Grammar()
gr.add_term([TempClass, Second])
g = gr.get_term((TempClass, Third))
self.assertEqual(g[0], TempClass)
self.assertIsNone(g[1])
if __name__ == '__main__':
main()
|
Add tests of get nonterms
|
Add tests of get nonterms
|
Python
|
mit
|
PatrikValkovic/grammpy
|
0b1702314fca978db1d0475ff3bc14977e7675a2
|
hxl_proxy/__init__.py
|
hxl_proxy/__init__.py
|
"""
Top-level Flask application for HXL Proxy
David Megginson
January 2015
License: Public Domain
Documentation: http://hxlstandard.org
"""
import os
import requests_cache
from flask import Flask, g, request
from flask_cache import Cache
import werkzeug.datastructures
# Main application object
app = Flask(__name__)
app.config.from_object('hxl_proxy.default_config')
if os.environ.get('HXL_PROXY_CONFIG'):
app.config.from_envvar('HXL_PROXY_CONFIG')
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
# Set up cache
cache = Cache(app,config={
'CACHE_TYPE': 'filesystem',
'CACHE_DIR': app.config.get('CACHE_DIR', '/tmp/'),
'CACHE_THRESHOLD': app.config.get('CACHE_MAX_ITEMS', 1000),
'CACHE_DEFAULT_TIMEOUT': app.config.get('CACHE_DEFAULT_TIMEOUT_SECONDS', 3600)
})
requests_cache.install_cache('/tmp/hxl_proxy_requests')
# Needed to register annotations
import hxl_proxy.controllers
# end
|
"""
Top-level Flask application for HXL Proxy
David Megginson
January 2015
License: Public Domain
Documentation: http://hxlstandard.org
"""
import os
import requests_cache
from flask import Flask, g, request
from flask_cache import Cache
import werkzeug.datastructures
# Main application object
app = Flask(__name__)
app.config.from_object('hxl_proxy.default_config')
if os.environ.get('HXL_PROXY_CONFIG'):
app.config.from_envvar('HXL_PROXY_CONFIG')
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
# Set up cache
cache = Cache(app,config={
'CACHE_TYPE': 'filesystem',
'CACHE_DIR': app.config.get('CACHE_DIR', '/tmp/'),
'CACHE_THRESHOLD': app.config.get('CACHE_MAX_ITEMS', 1000),
'CACHE_DEFAULT_TIMEOUT': app.config.get('CACHE_DEFAULT_TIMEOUT_SECONDS', 3600)
})
requests_cache.install_cache('/tmp/hxl_proxy_requests', expire_after=3600)
# Needed to register annotations
import hxl_proxy.controllers
# end
|
Add 1-hour expiry to requests_cache (formerly 5 minutes).
|
Add 1-hour expiry to requests_cache (formerly 5 minutes).
|
Python
|
unlicense
|
HXLStandard/hxl-proxy,HXLStandard/hxl-proxy,HXLStandard/hxl-proxy,HXLStandard/hxl-proxy
|
093b08f6bd03bd938ae7b7a18297708faa353766
|
django_lightweight_queue/middleware/transaction.py
|
django_lightweight_queue/middleware/transaction.py
|
from django.db import transaction, connection
class TransactionMiddleware(object):
def process_job(self, job):
if not connection.in_atomic_block:
transaction.set_autocommit(False)
def process_result(self, job, result, duration):
if not connection.in_atomic_block:
transaction.commit()
def process_exception(self, job, time_taken, *exc_info):
if not connection.in_atomic_block:
transaction.rollback()
# Legacy
if not hasattr(connection, 'in_atomic_block'):
class TransactionMiddleware(object):
def process_job(self, job):
transaction.enter_transaction_management()
transaction.managed(True)
def process_result(self, job, result, duration):
if not transaction.is_managed():
return
if transaction.is_dirty():
transaction.commit()
transaction.leave_transaction_management()
def process_exception(self, job, time_taken, *exc_info):
if transaction.is_dirty():
transaction.rollback()
transaction.leave_transaction_management()
|
from django.db import transaction, connection
class TransactionMiddleware(object):
def process_job(self, job):
transaction.atomic(savepoint=False).__enter__()
def process_result(self, job, result, duration):
transaction.atomic(savepoint=False).__exit__(None, None, None)
def process_exception(self, job, time_taken, *exc_info):
transaction.atomic(savepoint=False).__exit__(*exc_info)
# Legacy
if not hasattr(connection, 'in_atomic_block'):
class TransactionMiddleware(object):
def process_job(self, job):
transaction.enter_transaction_management()
transaction.managed(True)
def process_result(self, job, result, duration):
if not transaction.is_managed():
return
if transaction.is_dirty():
transaction.commit()
transaction.leave_transaction_management()
def process_exception(self, job, time_taken, *exc_info):
if transaction.is_dirty():
transaction.rollback()
transaction.leave_transaction_management()
|
Use Django's Atomic decorator logic
|
Use Django's Atomic decorator logic
We now keep Autocommit on it’s new default of True, as we only need the
ability to rollback the contents of a queue job. By setting
savepoint=False, the whole job will roll back if anything fails, rather
than just up to the containing savepoint.
|
Python
|
bsd-3-clause
|
thread/django-lightweight-queue,thread/django-lightweight-queue,prophile/django-lightweight-queue,prophile/django-lightweight-queue
|
9eec7f7f39dc7e1af6e78e4be8d01b50626a4eb5
|
tests/acceptance/test_scoring.py
|
tests/acceptance/test_scoring.py
|
import shelve
def test_shows_player_rating(browser, test_server, database_url):
with shelve.open(database_url) as db:
db.clear()
db['p1'] = 1000
app = ScoringApp(browser, test_server)
app.visit('/')
app.shows('P1 1000')
def test_user_adding(browser, test_server):
app = ScoringApp(browser, test_server)
app.visit('/players')
app.add_player('test')
app.is_in_page('/players/test')
app.shows('TEST 1000')
class ScoringApp(object):
def __init__(self, browser, get_url):
self._browser = browser
self._get_url = get_url
def visit(self, url):
self._browser.visit(self._get_url(url))
def shows(self, text):
assert self._browser.is_text_present(text)
def add_player(self, name):
self._browser.fill('player-name', name)
self._browser.find_by_id('submit').click()
def is_in_page(self, url):
assert self._browser.url == self._get_url(url)
|
import shelve
from whatsmyrank.players import START_RANK
from whatsmyrank.players import PlayerRepository
def test_shows_player_rating(browser, test_server, database_url):
player_repo = PlayerRepository(database_url, START_RANK)
player_repo.create('p1')
app = ScoringApp(browser, test_server)
app.visit('/')
app.shows('P1 1000')
def test_user_adding(browser, test_server):
app = ScoringApp(browser, test_server)
app.visit('/players')
app.add_player('test')
app.is_in_page('/players/test')
app.shows('TEST 1000')
class ScoringApp(object):
def __init__(self, browser, get_url):
self._browser = browser
self._get_url = get_url
def visit(self, url):
self._browser.visit(self._get_url(url))
def shows(self, text):
assert self._browser.is_text_present(text)
def add_player(self, name):
self._browser.fill('player-name', name)
self._browser.find_by_id('submit').click()
def is_in_page(self, url):
assert self._browser.url == self._get_url(url)
|
Remove database details from acceptance test
|
Remove database details from acceptance test
|
Python
|
bsd-2-clause
|
abele/whatsmyrank,abele/whatsmyrank
|
d436bcc20be8eb81960a53d442f699e42e2f9ea7
|
src/tkjoincsv.py
|
src/tkjoincsv.py
|
import tkFileDialog
import joincsv
import os.path
import sys
if __name__ == '__main__':
filetypes=[("Spreadsheets", "*.csv"),
("Spreadsheets", "*.xls"),
("Spreadsheets", "*.xlsx")]
if len(sys.argv) == 2:
input_filename = sys.argv[1]
else:
input_filename = tkFileDialog.askopenfilename(filetypes=filetypes)
if not os.path.isfile(input_filename):
exit(0)
output_filename = tkFileDialog.asksaveasfilename(filetypes=filetypes, defaultextension=".csv")
if not os.path.isfile(output_filename):
exit(0)
joiner = joincsv.RecordJoiner(input_filename)
joiner.save(output_filename)
|
import tkFileDialog
import joincsv
import os.path
import sys
if __name__ == '__main__':
filetypes=[("Spreadsheets", "*.csv"),
("Spreadsheets", "*.xls"),
("Spreadsheets", "*.xlsx")]
if len(sys.argv) == 2:
input_filename = sys.argv[1]
else:
input_filename = tkFileDialog.askopenfilename(filetypes=filetypes)
if not os.path.isfile(input_filename):
exit(0)
output_filename = tkFileDialog.asksaveasfilename(filetypes=filetypes, defaultextension=".csv")
if output_filename:
joiner = joincsv.RecordJoiner(input_filename)
joiner.save(output_filename)
|
Allow saving to a file that does not already exist again.
|
Allow saving to a file that does not already exist again.
|
Python
|
apache-2.0
|
peterSW/corow
|
342d62a42bb4e1993bbe9d755e6daabcaffe4122
|
chdb.py
|
chdb.py
|
import sqlite3
DB_FILENAME = 'citationhunt.sqlite3'
def init_db():
return sqlite3.connect(DB_FILENAME)
def reset_db():
db = init_db()
with db:
db.execute('''
DROP TABLE categories
''')
db.execute('''
DROP TABLE articles
''')
db.execute('''
DROP TABLE snippets
''')
db.execute('''
DROP TABLE articles_categories
''')
db.execute('''
CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT)
''')
db.execute('''
INSERT INTO categories VALUES ("unassigned", "unassigned")
''')
db.execute('''
CREATE TABLE articles_categories (article_id TEXT, category_id TEXT,
FOREIGN KEY(article_id) REFERENCES articles(page_id)
ON DELETE CASCADE,
FOREIGN KEY(category_id) REFERENCES categories(id)
ON DELETE CASCADE)
''')
db.execute('''
CREATE TABLE articles (page_id TEXT PRIMARY KEY, url TEXT,
title TEXT)
''')
db.execute('''
CREATE TABLE snippets (id TEXT PRIMARY KEY, snippet TEXT,
section TEXT, article_id TEXT, FOREIGN KEY(article_id)
REFERENCES articles(page_id) ON DELETE CASCADE)
''')
return db
def create_indices():
db = init_db()
db.execute('''CREATE INDEX IF NOT EXISTS snippets_articles
ON snippets(article_id);''')
|
import sqlite3
DB_FILENAME = 'citationhunt.sqlite3'
def init_db():
return sqlite3.connect(DB_FILENAME)
def reset_db():
db = init_db()
with db:
db.execute('''
DROP TABLE IF EXISTS categories
''')
db.execute('''
DROP TABLE IF EXISTS articles
''')
db.execute('''
DROP TABLE IF EXISTS snippets
''')
db.execute('''
DROP TABLE IF EXISTS articles_categories
''')
db.execute('''
CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT)
''')
db.execute('''
INSERT INTO categories VALUES ("unassigned", "unassigned")
''')
db.execute('''
CREATE TABLE articles_categories (article_id TEXT, category_id TEXT,
FOREIGN KEY(article_id) REFERENCES articles(page_id)
ON DELETE CASCADE,
FOREIGN KEY(category_id) REFERENCES categories(id)
ON DELETE CASCADE)
''')
db.execute('''
CREATE TABLE articles (page_id TEXT PRIMARY KEY, url TEXT,
title TEXT)
''')
db.execute('''
CREATE TABLE snippets (id TEXT PRIMARY KEY, snippet TEXT,
section TEXT, article_id TEXT, FOREIGN KEY(article_id)
REFERENCES articles(page_id) ON DELETE CASCADE)
''')
return db
def create_indices():
db = init_db()
db.execute('''CREATE INDEX IF NOT EXISTS snippets_articles
ON snippets(article_id);''')
|
Revert "Remove IF EXISTS from DROP TABLE when resetting the db."
|
Revert "Remove IF EXISTS from DROP TABLE when resetting the db."
This reverts commit 271668a20a2262fe6211b9f61146ad90d8096486 [formerly a7dce25964cd740b0d0db86b255ede60c913e73d].
Former-commit-id: 08199327c411663a199ebf36379e88a514935399
|
Python
|
mit
|
eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt
|
8b3ca76b980f126912de1bc8ffa067c199693eb3
|
cinder/db/sqlalchemy/migrate_repo/versions/061_add_snapshot_id_timestamp_to_backups.py
|
cinder/db/sqlalchemy/migrate_repo/versions/061_add_snapshot_id_timestamp_to_backups.py
|
# Copyright (c) 2015 EMC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, DateTime, MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
backups = Table('backups', meta, autoload=True)
snapshot_id = Column('snapshot_id', String(length=36))
data_timestamp = Column('data_timestamp', DateTime)
backups.create_column(snapshot_id)
backups.update().values(snapshot_id=None).execute()
backups.create_column(data_timestamp)
backups.update().values(data_timestamp=None).execute()
# Copy existing created_at timestamp to data_timestamp
# in the backups table.
backups_list = list(backups.select().execute())
for backup in backups_list:
backup_id = backup.id
backups.update().\
where(backups.c.id == backup_id).\
values(data_timestamp=backup.created_at).execute()
|
# Copyright (c) 2015 EMC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, DateTime, MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
backups = Table('backups', meta, autoload=True)
snapshot_id = Column('snapshot_id', String(length=36))
data_timestamp = Column('data_timestamp', DateTime)
backups.create_column(snapshot_id)
backups.create_column(data_timestamp)
backups.update().values(data_timestamp=backups.c.created_at).execute()
|
Fix race conditions in migration 061
|
Fix race conditions in migration 061
Migration 061 is supposed to add new `data_timestamp` field and populate
it with value of `created_at` column. This was done by selecting all the
backups and doing updates one-by-one. As it wasn't done in transaction
solution was prone to race condition when a new backup is added while
running the migration. This means that this migration could cause
problems when running in live environment. With blueprint
online-schema-upgrades we want to make Cinder able to perform migrations
live.
A solution is to change this statement to a single DB query which
updates all the rows. This commit also removes unnecessary update to
snapshot_id added there. As this column is nullable it will by default
be NULL, so there's no need to set it manually to that value.
As before and after this commit the migration does logically the same,
this should be safe even if someone is doing inter-release deployments.
An alternative would be to simply add transaction to the update step in
the migration, but that would effectively lock the table for longer
period of time than atomic one-query update.
Closes-Bug: 1530358
Change-Id: Ib8733c096a3dbe2bad00beaf5734936ffcddda33
|
Python
|
apache-2.0
|
phenoxim/cinder,cloudbase/cinder,j-griffith/cinder,phenoxim/cinder,Nexenta/cinder,Datera/cinder,mahak/cinder,mahak/cinder,j-griffith/cinder,ge0rgi/cinder,Nexenta/cinder,openstack/cinder,cloudbase/cinder,eharney/cinder,eharney/cinder,Hybrid-Cloud/cinder,bswartz/cinder,NetApp/cinder,Hybrid-Cloud/cinder,Datera/cinder,openstack/cinder,bswartz/cinder,NetApp/cinder,dims/cinder,dims/cinder
|
960ce03fc6d861c8df8d7aef5042f71c101794ca
|
pavement.py
|
pavement.py
|
# -*- coding: utf-8 -*-
from paver.easy import *
@task
def test(options):
info("Running tests for Python 2")
sh('python2 tests.py')
info("Running tests for Python 3")
sh('python3 tests.py')
@task
def coverage(options):
info("Running coverage for Python 2")
sh('coverage2 run --source ldapom ./tests.py')
sh('coverage2 report')
info("Running coverage for Python 3")
sh('coverage3 run --source ldapom ./tests.py')
sh('coverage3 report')
|
# -*- coding: utf-8 -*-
from paver.easy import *
@task
def test(options):
info("Running tests for Python 2")
sh('python2 -m unittest -v tests')
info("Running tests for Python 3")
sh('python3 -m unittest -v tests')
@task
def coverage(options):
info("Running coverage for Python 2")
sh('coverage2 run --source ldapom ./tests.py')
sh('coverage2 report')
info("Running coverage for Python 3")
sh('coverage3 run --source ldapom ./tests.py')
sh('coverage3 report')
|
Make paver unittest run more verbose
|
Make paver unittest run more verbose
|
Python
|
mit
|
HaDiNet/ldapom
|
f69ea0232881c923e71bd2716fb6faa5d0d99491
|
yithlibraryserver/tests/test_views.py
|
yithlibraryserver/tests/test_views.py
|
# Yith Library Server is a password storage server.
# Copyright (C) 2012 Yaco Sistemas
# Copyright (C) 2012 Alejandro Blanco Escudero <alejandro.b.e@gmail.com>
# Copyright (C) 2012 Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
from yithlibraryserver import testing
class ViewTests(testing.TestCase):
def test_home(self):
res = self.testapp.get('/')
self.assertEqual(res.status, '200 OK')
|
# Yith Library Server is a password storage server.
# Copyright (C) 2012 Yaco Sistemas
# Copyright (C) 2012 Alejandro Blanco Escudero <alejandro.b.e@gmail.com>
# Copyright (C) 2012 Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
from yithlibraryserver import testing
class ViewTests(testing.TestCase):
def test_home(self):
res = self.testapp.get('/')
self.assertEqual(res.status, '200 OK')
def test_tos(self):
res = self.testapp.get('/tos')
self.assertEqual(res.status, '200 OK')
|
Test the new tos view
|
Test the new tos view
|
Python
|
agpl-3.0
|
lorenzogil/yith-library-server,Yaco-Sistemas/yith-library-server,lorenzogil/yith-library-server,Yaco-Sistemas/yith-library-server,Yaco-Sistemas/yith-library-server,lorenzogil/yith-library-server
|
a3f1bd9b27bb605fe363a69a34a92862a1899da1
|
notifications/alliance_selections.py
|
notifications/alliance_selections.py
|
from consts.notification_type import NotificationType
from helpers.model_to_dict import ModelToDict
from notifications.base_notification import BaseNotification
class AllianceSelectionNotification(BaseNotification):
def __init__(self, event):
self.event = event
self._event_feed = event.key_name
self._district_feed = event.event_district_abbrev
@property
def _type(self):
return NotificationType.ALLIANCE_SELECTION
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event'] = ModelToDict.eventConverter(self.event)
return data
|
from consts.notification_type import NotificationType
from helpers.model_to_dict import ModelToDict
from notifications.base_notification import BaseNotification
class AllianceSelectionNotification(BaseNotification):
def __init__(self, event):
self.event = event
self._event_feed = event.key_name
self._district_feed = event.event_district_abbrev
@property
def _type(self):
return NotificationType.ALLIANCE_SELECTION
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['event_key'] = self.event.key_name
data['message_data']['event'] = ModelToDict.eventConverter(self.event)
return data
|
Add event name and key to alliance selection notifications
|
Add event name and key to alliance selection notifications
This info is already included in 'event', but adding for consistency
|
Python
|
mit
|
jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance
|
30a2a16aff030235941eac3786cc49b42e0ed868
|
bootstrap/conf/salt/state/run-tracking-db/scripts/import_sample_data.py
|
bootstrap/conf/salt/state/run-tracking-db/scripts/import_sample_data.py
|
import pandas as pd
import sys
df = pd.read_csv(sys.argv[1])
df.columns = [c.lower() for c in df.columns]
from sqlalchemy import create_engine
engine = create_engine('postgresql://localhost:5432/germline_genotype_tracking')
try:
df.to_sql("pcawg_samples", engine)
except ValueError as e:
if str(e) != "Table 'pcawg_samples' already exists.":
print (e)
exit(1)
|
import pandas as pd
import sys
df = pd.read_csv(sys.argv[1])
df.columns = [c.lower() for c in df.columns]
from sqlalchemy import create_engine
engine = create_engine('postgresql://localhost:5432/germline_genotype_tracking')
try:
df.to_sql("pcawg_samples", engine)
except ValueError as e:
if str(e) != "Table 'pcawg_samples' already exists.":
print str(e)
exit(1)
else:
print str(e)
|
Print an error message when table already exists without failing the script.
|
Print an error message when table already exists without failing the script.
|
Python
|
mit
|
llevar/germline-regenotyper,llevar/germline-regenotyper
|
6113b60187da1da42b26bee81556aad3efef57c4
|
nipype/interfaces/tests/test_afni.py
|
nipype/interfaces/tests/test_afni.py
|
from nipype.interfaces import afni
from nose.tools import assert_equal
def test_To3d():
cmd = afni.To3d()
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d'
cmd = afni.To3d(anat=True)
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -anat'
cmd = afni.To3d()
cmd.inputs.datum = 'float'
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -datum float'
|
from nipype.interfaces import afni
from nose.tools import assert_equal
def test_To3d():
cmd = afni.To3d()
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d'
cmd = afni.To3d(anat=True)
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -anat'
cmd = afni.To3d()
cmd.inputs.datum = 'float'
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -datum float'
cmd = afni.To3d()
cmd.inputs.session = '/home/bobama'
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -session /home/bobama'
cmd = afni.To3d(prefix='foo.nii.gz')
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -prefix foo.nii.gz'
cmd = afni.To3d(infiles='/data/*.dcm')
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d /data/*.dcm'
|
Add tests to afni To3d.
|
Add tests to afni To3d.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@165 ead46cd0-7350-4e37-8683-fc4c6f79bf00
|
Python
|
bsd-3-clause
|
rameshvs/nipype,grlee77/nipype,dmordom/nipype,glatard/nipype,fprados/nipype,rameshvs/nipype,wanderine/nipype,mick-d/nipype_source,dgellis90/nipype,sgiavasis/nipype,arokem/nipype,Leoniela/nipype,pearsonlab/nipype,pearsonlab/nipype,FCP-INDI/nipype,mick-d/nipype,satra/NiPypeold,gerddie/nipype,FredLoney/nipype,dgellis90/nipype,JohnGriffiths/nipype,dmordom/nipype,pearsonlab/nipype,mick-d/nipype_source,iglpdc/nipype,glatard/nipype,carolFrohlich/nipype,fprados/nipype,grlee77/nipype,gerddie/nipype,gerddie/nipype,JohnGriffiths/nipype,iglpdc/nipype,blakedewey/nipype,christianbrodbeck/nipype,mick-d/nipype_source,blakedewey/nipype,rameshvs/nipype,dmordom/nipype,dgellis90/nipype,rameshvs/nipype,fprados/nipype,grlee77/nipype,blakedewey/nipype,glatard/nipype,arokem/nipype,wanderine/nipype,sgiavasis/nipype,FCP-INDI/nipype,gerddie/nipype,FredLoney/nipype,carolFrohlich/nipype,JohnGriffiths/nipype,Leoniela/nipype,sgiavasis/nipype,mick-d/nipype,carlohamalainen/nipype,sgiavasis/nipype,pearsonlab/nipype,carolFrohlich/nipype,FredLoney/nipype,FCP-INDI/nipype,blakedewey/nipype,mick-d/nipype,grlee77/nipype,arokem/nipype,christianbrodbeck/nipype,JohnGriffiths/nipype,wanderine/nipype,dgellis90/nipype,FCP-INDI/nipype,wanderine/nipype,mick-d/nipype,carlohamalainen/nipype,iglpdc/nipype,glatard/nipype,iglpdc/nipype,Leoniela/nipype,satra/NiPypeold,carlohamalainen/nipype,arokem/nipype,carolFrohlich/nipype
|
e2bac19e08197dc33756d7b7cf1f88e4ba808ae1
|
PyFVCOM/__init__.py
|
PyFVCOM/__init__.py
|
"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.4.1'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
|
"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.4.1'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
|
Add missing module to the project.
|
Add missing module to the project.
|
Python
|
mit
|
pwcazenave/PyFVCOM
|
bf1bafbbebeab86a213e4c4bed0be6f1b18404c6
|
python/grizzly/grizzly/lazy_op.py
|
python/grizzly/grizzly/lazy_op.py
|
"""Summary
"""
from weld.weldobject import *
def to_weld_type(weld_type, dim):
"""Summary
Args:
weld_type (TYPE): Description
dim (TYPE): Description
Returns:
TYPE: Description
"""
for i in xrange(dim):
weld_type = WeldVec(weld_type)
return weld_type
class LazyOpResult:
"""Wrapper class around as yet un-evaluated Weld computation results
Attributes:
dim (int): Dimensionality of the output
expr (WeldObject / Numpy.ndarray): The expression that needs to be
evaluated
weld_type (WeldType): Type of the output object
"""
def __init__(self, expr, weld_type, dim):
"""Summary
Args:
expr (TYPE): Description
weld_type (TYPE): Description
dim (TYPE): Description
"""
self.expr = expr
self.weld_type = weld_type
self.dim = dim
def evaluate(self, verbose=True, decode=True):
"""Summary
Args:
verbose (bool, optional): Description
decode (bool, optional): Description
Returns:
TYPE: Description
"""
if isinstance(self.expr, WeldObject):
return self.expr.evaluate(
to_weld_type(
self.weld_type,
self.dim),
verbose,
decode)
return self.expr
|
"""Summary
"""
from weld.weldobject import *
def to_weld_type(weld_type, dim):
"""Summary
Args:
weld_type (TYPE): Description
dim (TYPE): Description
Returns:
TYPE: Description
"""
for i in xrange(dim):
weld_type = WeldVec(weld_type)
return weld_type
class LazyOpResult:
"""Wrapper class around as yet un-evaluated Weld computation results
Attributes:
dim (int): Dimensionality of the output
expr (WeldObject / Numpy.ndarray): The expression that needs to be
evaluated
weld_type (WeldType): Type of the output object
"""
def __init__(self, expr, weld_type, dim):
"""Summary
Args:
expr (TYPE): Description
weld_type (TYPE): Description
dim (TYPE): Description
"""
self.expr = expr
self.weld_type = weld_type
self.dim = dim
def evaluate(self, verbose=True, decode=True, passes=None):
"""Summary
Args:
verbose (bool, optional): Description
decode (bool, optional): Description
Returns:
TYPE: Description
"""
if isinstance(self.expr, WeldObject):
return self.expr.evaluate(
to_weld_type(
self.weld_type,
self.dim),
verbose,
decode,
passes=passes)
return self.expr
|
Add passes to Grizzly's lazyOp
|
Add passes to Grizzly's lazyOp
|
Python
|
bsd-3-clause
|
rahulpalamuttam/weld,rahulpalamuttam/weld,weld-project/weld,sppalkia/weld,weld-project/weld,weld-project/weld,rahulpalamuttam/weld,weld-project/weld,sppalkia/weld,sppalkia/weld,sppalkia/weld,sppalkia/weld,weld-project/weld,rahulpalamuttam/weld,rahulpalamuttam/weld
|
d12907dd681c1d16c623b9dcceed9ff5e85c2ac6
|
views.py
|
views.py
|
from django.shortcuts import render
def intro(request, template='intro.html'):
response = render(request, template)
response['X-Frame-Options'] = 'SAMEORIGIN'
return response
|
from django.shortcuts import render
from django.views.decorators.clickjacking import xframe_options_sameorigin
@xframe_options_sameorigin
def intro(request, template='intro.html'):
response = render(request, template)
return response
|
Use X-Frame-Options decorator to override middleware.
|
Use X-Frame-Options decorator to override middleware.
|
Python
|
bsd-3-clause
|
m8ttyB/pontoon-intro,mathjazz/pontoon-intro,mathjazz/pontoon-intro,Osmose/pontoon-intro,jotes/pontoon-intro,Osmose/pontoon-intro,jotes/pontoon-intro,m8ttyB/pontoon-intro,jotes/pontoon-intro,mathjazz/pontoon-intro,m8ttyB/pontoon-intro,Osmose/pontoon-intro
|
32a831d575b5354468a8f9c2a815f9f1aa03f2fb
|
api/caching/listeners.py
|
api/caching/listeners.py
|
from api.caching.tasks import ban_url
from framework.tasks.handlers import enqueue_task
from modularodm import signals
@signals.save.connect
def log_object_saved(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_task(ban_url.s(abs_url))
|
from functools import partial
from api.caching.tasks import ban_url
from framework.tasks.postcommit_handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def log_object_saved(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, abs_url))
|
Switch cache ban request to new postcommit synchronous method
|
Switch cache ban request to new postcommit synchronous method
|
Python
|
apache-2.0
|
kwierman/osf.io,amyshi188/osf.io,baylee-d/osf.io,felliott/osf.io,chrisseto/osf.io,amyshi188/osf.io,felliott/osf.io,samchrisinger/osf.io,cslzchen/osf.io,icereval/osf.io,cwisecarver/osf.io,sloria/osf.io,kwierman/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,TomHeatwole/osf.io,DanielSBrown/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,mluo613/osf.io,monikagrabowska/osf.io,felliott/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,mluo613/osf.io,kwierman/osf.io,asanfilippo7/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,mluke93/osf.io,aaxelb/osf.io,TomBaxter/osf.io,acshi/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,wearpants/osf.io,RomanZWang/osf.io,mluo613/osf.io,samchrisinger/osf.io,abought/osf.io,mfraezz/osf.io,chennan47/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,pattisdr/osf.io,caneruguz/osf.io,Nesiehr/osf.io,abought/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,doublebits/osf.io,binoculars/osf.io,zamattiac/osf.io,adlius/osf.io,amyshi188/osf.io,mattclark/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,jnayak1/osf.io,abought/osf.io,laurenrevere/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,RomanZWang/osf.io,alexschiller/osf.io,wearpants/osf.io,acshi/osf.io,HalcyonChimera/osf.io,acshi/osf.io,Nesiehr/osf.io,hmoco/osf.io,kch8qx/osf.io,hmoco/osf.io,billyhunt/osf.io,mluke93/osf.io,zachjanicki/osf.io,brianjgeiger/osf.io,zachjanicki/osf.io,jnayak1/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,caseyrollins/osf.io,kwierman/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,doublebits/osf.io,zachjanicki/osf.io,amyshi188/osf.io,acshi/osf.io,doublebits/osf.io,billyhunt/osf.io,mfraezz/osf.io,mfraezz/osf.io,chennan47/osf.io,crcresearch/osf.io,adlius/osf.io,caneruguz/osf.io,chennan47/osf.io,chrisseto/osf.io,leb2dg/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,mluo613/osf.io,pattisdr/osf.io,emetsger/osf.io,doublebits/osf.io,samchrisinger/osf.io,erinspace/osf.io,jnayak1/osf.io,cslzchen/osf.io,TomHeatwole/osf.io,cslzchen/osf.io,saradbowman/osf.io,cwisecarver/osf.io,mattclark/osf.io,caneruguz/osf.io,baylee-d/osf.io,caneruguz/osf.io,crcresearch/osf.io,emetsger/osf.io,zamattiac/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,SSJohns/osf.io,kch8qx/osf.io,rdhyee/osf.io,icereval/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,samchrisinger/osf.io,icereval/osf.io,TomBaxter/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,sloria/osf.io,wearpants/osf.io,aaxelb/osf.io,billyhunt/osf.io,caseyrollins/osf.io,hmoco/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,mluo613/osf.io,wearpants/osf.io,emetsger/osf.io,mluke93/osf.io,sloria/osf.io,zachjanicki/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,binoculars/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,RomanZWang/osf.io,erinspace/osf.io,alexschiller/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,adlius/osf.io,hmoco/osf.io,mluke93/osf.io,monikagrabowska/osf.io,binoculars/osf.io,RomanZWang/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,adlius/osf.io,acshi/osf.io,monikagrabowska/osf.io,doublebits/osf.io,monikagrabowska/osf.io,abought/osf.io,zamattiac/osf.io,RomanZWang/osf.io,alexschiller/osf.io,felliott/osf.io,emetsger/osf.io,pattisdr/osf.io,zamattiac/osf.io,erinspace/osf.io,jnayak1/osf.io,rdhyee/osf.io,leb2dg/osf.io,mattclark/osf.io,rdhyee/osf.io
|
90963666f22bea81d433724d232deaa0f3e2fec1
|
st2common/st2common/exceptions/db.py
|
st2common/st2common/exceptions/db.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.exceptions import StackStormBaseException
class StackStormDBObjectNotFoundError(StackStormBaseException):
pass
class StackStormDBObjectMalformedError(StackStormBaseException):
pass
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.exceptions import StackStormBaseException
class StackStormDBObjectNotFoundError(StackStormBaseException):
pass
class StackStormDBObjectMalformedError(StackStormBaseException):
pass
class StackStormDBObjectConflictError(StackStormBaseException):
"""
Exception that captures a DB object conflict error.
"""
def __init__(self, message, conflict_id):
super(StackStormDBObjectConflictError, self).__init__(message)
self.conflict_id = conflict_id
|
Add a special exception for capturing object conflicts.
|
Add a special exception for capturing object conflicts.
|
Python
|
apache-2.0
|
jtopjian/st2,StackStorm/st2,StackStorm/st2,emedvedev/st2,dennybaa/st2,StackStorm/st2,alfasin/st2,pixelrebel/st2,nzlosh/st2,Itxaka/st2,StackStorm/st2,dennybaa/st2,punalpatel/st2,Plexxi/st2,lakshmi-kannan/st2,lakshmi-kannan/st2,grengojbo/st2,Itxaka/st2,jtopjian/st2,alfasin/st2,punalpatel/st2,peak6/st2,tonybaloney/st2,pinterb/st2,lakshmi-kannan/st2,tonybaloney/st2,Plexxi/st2,punalpatel/st2,emedvedev/st2,tonybaloney/st2,emedvedev/st2,alfasin/st2,Plexxi/st2,peak6/st2,Plexxi/st2,armab/st2,pinterb/st2,dennybaa/st2,nzlosh/st2,nzlosh/st2,Itxaka/st2,jtopjian/st2,pixelrebel/st2,armab/st2,peak6/st2,pixelrebel/st2,pinterb/st2,nzlosh/st2,grengojbo/st2,grengojbo/st2,armab/st2
|
39b57462b69d78825fd217822d9be2f1eea5a06d
|
src/ansible/models.py
|
src/ansible/models.py
|
from django.db import models
from django.conf import settings
class Playbook(models.Model):
name = models.CharField(max_length=200)
inventory = models.CharField(max_length=200, default="hosts")
user = models.CharField(max_length=200, default="ubuntu")
directory = models.CharField(max_length=200, editable=False, default="dir")
def __str__(self):
return "%s" % self.name
def format_directory(self):
directory = self.name.lower()
directory = directory.replace(" ","-")
return directory
def save(self, *args, **kwargs):
self.directory = self.format_directory()
super(Playbook, self).save(*args, **kwargs)
class Meta:
verbose_name_plural = "playbooks"
class Registry(models.Model):
playbook = models.ForeignKey(Playbook, on_delete=models.CASCADE)
name = models.CharField(max_length=200)
item = models.FilePathField(path=settings.PLAYBOOK_DIR, recursive=True)
def __str__(self):
return "%s" % self.name
class Meta:
verbose_name_plural = "registries"
|
from django.db import models
from django.conf import settings
class Playbook(models.Model):
name = models.CharField(max_length=200)
inventory = models.CharField(max_length=200, default="hosts")
user = models.CharField(max_length=200, default="ubuntu")
directory = models.CharField(max_length=200, editable=False, default="dir")
def __str__(self):
return "%s" % self.name
def format_directory(self):
directory = self.name.lower()
directory = directory.replace(" ","-")
return directory
def save(self, *args, **kwargs):
self.directory = self.format_directory()
super(Playbook, self).save(*args, **kwargs)
class Meta:
verbose_name_plural = "playbooks"
class Registry(models.Model):
playbook = models.ForeignKey("Playbook", default=1, on_delete=models.CASCADE)
name = models.CharField(max_length=200)
item = models.FilePathField(path=settings.PLAYBOOK_DIR, recursive=True)
def __str__(self):
return "%s" % self.name
class Meta:
verbose_name_plural = "registries"
|
Set default value for Registry.playbook
|
Set default value for Registry.playbook
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
c7512104dce2e9ca83e8400b399b4f77113f9368
|
packs/travisci/actions/lib/action.py
|
packs/travisci/actions/lib/action.py
|
import requests
from st2actions.runners.pythonrunner import Action
API_URL = 'https://api.travis-ci.org'
HEADERS_ACCEPT = 'application/vnd.travis-ci.2+json'
CONTENT_TYPE = 'application/json'
class TravisCI(Action):
def _get_auth_headers(self):
headers = {}
headers['Authorization'] = self.config['Authorization']
headers['Content-Type'] = self.config['Content-Type']
return headers
def _perform_request(self, path, method, data=None, requires_auth=False):
url = API_URL + path
if method == "GET":
if requires_auth:
headers = self._get_auth_headers()
else:
headers = {}
response = requests.get(url, headers=headers)
elif method == 'POST':
headers = self._get_auth_headers()
response = requests.post(url, headers=headers)
elif method == 'PUT':
headers = self._get_auth_headers()
response = requests.put(url, data=data, headers=headers)
return response
|
import httplib
import requests
from st2actions.runners.pythonrunner import Action
API_URL = 'https://api.travis-ci.org'
HEADERS_ACCEPT = 'application/vnd.travis-ci.2+json'
CONTENT_TYPE = 'application/json'
class TravisCI(Action):
def _get_auth_headers(self):
headers = {}
headers['Authorization'] = self.config['Authorization']
headers['Content-Type'] = self.config['Content-Type']
return headers
def _perform_request(self, path, method, data=None, requires_auth=False):
url = API_URL + path
if method == "GET":
if requires_auth:
headers = self._get_auth_headers()
else:
headers = {}
response = requests.get(url, headers=headers)
elif method == 'POST':
headers = self._get_auth_headers()
response = requests.post(url, headers=headers)
elif method == 'PUT':
headers = self._get_auth_headers()
response = requests.put(url, data=data, headers=headers)
if response.status_code in [httplib.FORBIDDEN, httplib.UNAUTHORIZED]:
msg = ('Invalid or missing Travis CI auth token. Make sure you have'
'specified valid token in the config file')
raise Exception(msg)
return response
|
Throw on invalid / missing credentials.
|
Throw on invalid / missing credentials.
|
Python
|
apache-2.0
|
pidah/st2contrib,lmEshoo/st2contrib,psychopenguin/st2contrib,psychopenguin/st2contrib,armab/st2contrib,StackStorm/st2contrib,tonybaloney/st2contrib,pidah/st2contrib,pearsontechnology/st2contrib,tonybaloney/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,pidah/st2contrib,armab/st2contrib,armab/st2contrib,StackStorm/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,pearsontechnology/st2contrib,digideskio/st2contrib,digideskio/st2contrib
|
fa3ec9a764ca0d646588e908395367ce553981e1
|
tca/chat/views.py
|
tca/chat/views.py
|
from django.shortcuts import render
from rest_framework import viewsets
from chat.models import Member
from chat.models import ChatRoom
from chat.serializers import MemberSerializer
from chat.serializers import ChatRoomSerializer
class MemberViewSet(viewsets.ModelViewSet):
model = Member
serializer_class = MemberSerializer
class ChatRoomViewSet(viewsets.ModelViewSet):
model = ChatRoom
serializer_class = ChatRoomSerializer
|
from django.shortcuts import render
from django.shortcuts import get_object_or_404
from rest_framework import viewsets
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from chat.models import Member
from chat.models import ChatRoom
from chat.serializers import MemberSerializer
from chat.serializers import ChatRoomSerializer
class MemberViewSet(viewsets.ModelViewSet):
model = Member
serializer_class = MemberSerializer
class ChatRoomViewSet(viewsets.ModelViewSet):
model = ChatRoom
serializer_class = ChatRoomSerializer
@action()
def add_member(self, request, pk=None):
chat_room = self.get_object()
if 'lrz_id' not in request.DATA:
# Invalid request
return Response(status=status.HTTP_400_BAD_REQUEST)
member = get_object_or_404(Member, lrz_id=request.DATA['lrz_id'])
chat_room.members.add(member)
return Response({
'status': 'success',
})
|
Add an action for adding members to a chat room
|
Add an action for adding members to a chat room
Even though django-rest-framework supports a Ruby-on-Rails style
of updating existing resources by issuing a PATCH or PUT request,
such updates are unsafe and can cause race-conditions to lose
some state. The implementation of this action isn't fully RESTful,
but neither is the PATCH-style used here, so it is still a good
choice for solving a possible race condition situation.
|
Python
|
bsd-3-clause
|
mlalic/TumCampusAppBackend,mlalic/TumCampusAppBackend
|
01e911926d37fa981fd7703f751ff91f052313e2
|
tkLibs/__init__.py
|
tkLibs/__init__.py
|
__all__ = ['autoScrollbar', 'button', 'combobox', 'listbox', 'window']
from .autoScrollbar import autoScrollbar
from .button import button
from .combobox import combobox
from .listbox import listbox
from .window import window
|
__all__ = ['autoScrollbar', 'button', 'combobox', 'entry', 'frame', 'label', 'listbox', 'toplevel', 'window']
from .autoScrollbar import autoScrollbar
from .button import button
from .combobox import combobox
from .entry import entry
from .frame import frame
from .label import label
from .listbox import listbox
from .toplevel import toplevel
from .window import window
|
Add import of new widgets.
|
Add import of new widgets.
|
Python
|
mit
|
Kyle-Fagan/tkLibs
|
71cb7a3d83cbb352a358ba8ac260584a6666b5ad
|
seleniumbase/config/proxy_list.py
|
seleniumbase/config/proxy_list.py
|
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
* https://github.com/mertguvencli/http-proxy-list
"""
PROXY_LIST = {
"example1": "170.39.193.236:3128", # (Example) - set your own proxy here
"example2": "socks4://50.197.210.138:32100", # (Example)
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
* https://github.com/mertguvencli/http-proxy-list
"""
PROXY_LIST = {
"example1": "151.181.91.10:80", # (Example) - set your own proxy here
"example2": "socks4://50.197.210.138:32100", # (Example)
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
Update the example proxy list
|
Update the example proxy list
|
Python
|
mit
|
seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase
|
1d88ea54d1f4ce63893b906a5b79faa4dd25243f
|
grow/commands/convert.py
|
grow/commands/convert.py
|
from grow.pods import pods
from grow.pods import storage
from grow.conversion import *
import click
import os
@click.command()
@click.argument('pod_path', default='.')
@click.option('--type', type=click.Choice(['content_locale_split']))
def convert(pod_path, type):
"""Converts pod files from an earlier version of Grow."""
root = os.path.abspath(os.path.join(os.getcwd(), pod_path))
pod = pods.Pod(root, storage=storage.FileStorage)
if type == 'content_locale_split':
content_locale_split.Converter.convert(pod)
else:
raise click.UsageError(
'Unable to convert files without a --type option.\n'
'Run `grow convert --help` to see valid --type values.')
|
from grow.pods import pods
from grow.pods import storage
from grow.conversion import content_locale_split
import click
import os
@click.command()
@click.argument('pod_path', default='.')
@click.option('--type', type=click.Choice(['content_locale_split']))
def convert(pod_path, type):
"""Converts pod files from an earlier version of Grow."""
root = os.path.abspath(os.path.join(os.getcwd(), pod_path))
pod = pods.Pod(root, storage=storage.FileStorage)
if type == 'content_locale_split':
content_locale_split.Converter.convert(pod)
else:
raise click.UsageError(
'Unable to convert files without a --type option.\n'
'Run `grow convert --help` to see valid --type values.')
|
Adjust import to fix build with PyInstaller.
|
Adjust import to fix build with PyInstaller.
|
Python
|
mit
|
grow/pygrow,grow/pygrow,grow/pygrow,grow/grow,grow/grow,grow/grow,grow/grow
|
688bec4dc00dd1040901ca446c6b6cc7fa6fbbcb
|
downstream-farmer/utils.py
|
downstream-farmer/utils.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
def urlencode(string):
return urlencode(string)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from urllib import quote_plus
except ImportError:
from urllib.parse import quote_plus
def urlify(string):
""" You might be wondering: why is this here at all, since it's basically
doing exactly what the quote_plus function in urllib does. Well, to keep
the 2 & 3 stuff all in one place, meaning rather than try to import the
urllib stuff twice in each file where url-safe strings are needed, we keep
it all in one file: here.
Supporting multiple Pythons is hard.
:param string: String to URLify
:return: URLified string
"""
return quote_plus(string)
|
Add documentation and py3k compat
|
Add documentation and py3k compat
|
Python
|
mit
|
Storj/downstream-farmer
|
30e567adb809810930616493fd92ef1c40c9207b
|
dthm4kaiako/users/forms.py
|
dthm4kaiako/users/forms.py
|
"""Forms for user application."""
from django.forms import ModelForm
from django.contrib.auth import get_user_model, forms
User = get_user_model()
class SignupForm(ModelForm):
"""Sign up for user registration."""
class Meta:
"""Metadata for SignupForm class."""
model = get_user_model()
fields = ['first_name', 'last_name']
def signup(self, request, user):
"""Extra logic when a user signs up.
Required by django-allauth.
"""
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
class UserChangeForm(forms.UserChangeForm):
"""Form class for changing user."""
class Meta(forms.UserChangeForm.Meta):
"""Metadata for UserChangeForm class."""
model = User
fields = ('email', 'last_name')
class UserCreationForm(forms.UserCreationForm):
"""Form class for creating user."""
class Meta(forms.UserCreationForm.Meta):
"""Metadata for UserCreationForm class."""
model = User
fields = ('email', 'first_name', 'last_name')
|
"""Forms for user application."""
from django.forms import ModelForm
from django.contrib.auth import get_user_model, forms
from captcha.fields import ReCaptchaField
from captcha.widgets import ReCaptchaV3
User = get_user_model()
class SignupForm(ModelForm):
"""Sign up for user registration."""
captcha = ReCaptchaField(widget=ReCaptchaV3, label='')
class Meta:
"""Metadata for SignupForm class."""
model = get_user_model()
fields = ['first_name', 'last_name']
def signup(self, request, user):
"""Extra logic when a user signs up.
Required by django-allauth.
"""
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
class UserChangeForm(forms.UserChangeForm):
"""Form class for changing user."""
class Meta(forms.UserChangeForm.Meta):
"""Metadata for UserChangeForm class."""
model = User
fields = ('email', 'last_name')
class UserCreationForm(forms.UserCreationForm):
"""Form class for creating user."""
class Meta(forms.UserCreationForm.Meta):
"""Metadata for UserCreationForm class."""
model = User
fields = ('email', 'first_name', 'last_name')
|
Add recaptcha to signup page
|
Add recaptcha to signup page
Signup page is currently not used, but
doing it now in case it is forgotten later.
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
c0eb0f902b0fcbea29c8a3bf70f80ca9384cce9f
|
scripts/remove_after_use/send_mendeley_reauth_email.py
|
scripts/remove_after_use/send_mendeley_reauth_email.py
|
# -*- coding: utf-8 -*-
import sys
import logging
from website.app import setup_django
setup_django()
from website import mails
from osf.models import OSFUser
from addons.mendeley.models import UserSettings
import progressbar
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
def main(dry=True):
user = OSFUser.load('qrgl2')
qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner')
pbar = progressbar.ProgressBar(maxval=qs.count()).start()
for i, each in enumerate(qs):
user = each.owner
logger.info('Sending email to OSFUser {}'.format(user._id))
if not dry:
mails.send_mail(
mail=mails.MENDELEY_REAUTH,
to_addr=user.username,
can_change_preferences=False,
user=user
)
pbar.update(i + 1)
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
# -*- coding: utf-8 -*-
import sys
import logging
from website.app import setup_django
setup_django()
from website import mails
from osf.models import OSFUser
from addons.mendeley.models import UserSettings
import progressbar
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
def main(dry=True):
qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner').order_by('pk')
count = qs.count()
pbar = progressbar.ProgressBar(maxval=count).start()
logger.info('Sending email to {} users'.format(count))
for i, each in enumerate(qs):
user = each.owner
logger.info('Sending email to OSFUser {}'.format(user._id))
if not dry:
mails.send_mail(
mail=mails.MENDELEY_REAUTH,
to_addr=user.username,
can_change_preferences=False,
user=user
)
pbar.update(i + 1)
logger.info('Sent email to {} users'.format(count))
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
Remove junk and add more logging
|
Remove junk and add more logging
|
Python
|
apache-2.0
|
cslzchen/osf.io,icereval/osf.io,brianjgeiger/osf.io,mattclark/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,adlius/osf.io,cslzchen/osf.io,mattclark/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,icereval/osf.io,erinspace/osf.io,felliott/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,saradbowman/osf.io,felliott/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,cslzchen/osf.io,baylee-d/osf.io,felliott/osf.io,mfraezz/osf.io,pattisdr/osf.io,sloria/osf.io,felliott/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,sloria/osf.io,icereval/osf.io,sloria/osf.io,cslzchen/osf.io,caseyrollins/osf.io,erinspace/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,erinspace/osf.io,adlius/osf.io,adlius/osf.io,HalcyonChimera/osf.io
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.