commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
27a0226ec444523034d739a00a999b089ce116ba
|
enthought/chaco/tools/api.py
|
enthought/chaco/tools/api.py
|
from better_zoom import BetterZoom
from better_selecting_zoom import BetterSelectingZoom
from broadcaster import BroadcasterTool
from dataprinter import DataPrinter
from data_label_tool import DataLabelTool
from drag_zoom import DragZoom
from enthought.enable.tools.drag_tool import DragTool
from draw_points_tool import DrawPointsTool
from highlight_tool import HighlightTool
from image_inspector_tool import ImageInspectorTool, ImageInspectorOverlay
from lasso_selection import LassoSelection
from legend_tool import LegendTool
from legend_highlighter import LegendHighlighter
from line_inspector import LineInspector
from line_segment_tool import LineSegmentTool
from move_tool import MoveTool
from pan_tool import PanTool
from point_marker import PointMarker
from range_selection import RangeSelection
from range_selection_2d import RangeSelection2D
from range_selection_overlay import RangeSelectionOverlay
from regression_lasso import RegressionLasso, RegressionOverlay
from save_tool import SaveTool
from scatter_inspector import ScatterInspector
from select_tool import SelectTool
from simple_inspector import SimpleInspectorTool
from tracking_pan_tool import TrackingPanTool
from tracking_zoom import TrackingZoom
from traits_tool import TraitsTool
from zoom_tool import ZoomTool
# EOF
|
from better_zoom import BetterZoom
from better_selecting_zoom import BetterSelectingZoom
from broadcaster import BroadcasterTool
from dataprinter import DataPrinter
from data_label_tool import DataLabelTool
from enthought.enable.tools.drag_tool import DragTool
from draw_points_tool import DrawPointsTool
from highlight_tool import HighlightTool
from image_inspector_tool import ImageInspectorTool, ImageInspectorOverlay
from lasso_selection import LassoSelection
from legend_tool import LegendTool
from legend_highlighter import LegendHighlighter
from line_inspector import LineInspector
from line_segment_tool import LineSegmentTool
from move_tool import MoveTool
from pan_tool import PanTool
from point_marker import PointMarker
from range_selection import RangeSelection
from range_selection_2d import RangeSelection2D
from range_selection_overlay import RangeSelectionOverlay
from regression_lasso import RegressionLasso, RegressionOverlay
from save_tool import SaveTool
from scatter_inspector import ScatterInspector
from select_tool import SelectTool
from simple_inspector import SimpleInspectorTool
from tracking_pan_tool import TrackingPanTool
from tracking_zoom import TrackingZoom
from traits_tool import TraitsTool
from zoom_tool import ZoomTool
# EOF
|
Remove deprecated DragZoom from Chaco tools API to eliminate irrelevant BaseZoomTool deprecation warning. DragZoom is still used in 4 Chaco examples
|
[Chaco] Remove deprecated DragZoom from Chaco tools API to eliminate irrelevant BaseZoomTool deprecation warning. DragZoom is still used in 4 Chaco examples
|
Python
|
bsd-3-clause
|
ContinuumIO/chaco,tommy-u/chaco,tommy-u/chaco,ContinuumIO/chaco,tommy-u/chaco,ContinuumIO/chaco,burnpanck/chaco,burnpanck/chaco,ContinuumIO/chaco,burnpanck/chaco
|
0df76d66fb6a2425c6ccc8a3a75d41599b2545c6
|
auth0/v2/authentication/delegated.py
|
auth0/v2/authentication/delegated.py
|
from .base import AuthenticationBase
class Delegated(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def get_token(self, client_id, target, api_type, grant_type,
id_token=None, refresh_token=None):
if id_token and refresh_token:
raise ValueError('Only one of id_token or refresh_token '
'can be None')
data = {
'client_id': client_id,
'grant_type': grant_type,
'target': target,
'scope': 'openid',
'api_type': api_type,
}
if id_token:
data.update({'id_token': id_token})
elif refresh_token:
data.update({'refresh_token': refresh_token})
else:
raise ValueError('Either id_token or refresh_token must '
'have a value')
return self.post(
'https://%s/delegation' % self.domain,
headers={'Content-Type': 'application/json'},
data=data
)
|
from .base import AuthenticationBase
class Delegated(AuthenticationBase):
"""Delegated authentication endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def get_token(self, client_id, target, api_type, grant_type,
id_token=None, refresh_token=None):
"""Obtain a delegation token.
"""
if id_token and refresh_token:
raise ValueError('Only one of id_token or refresh_token '
'can be None')
data = {
'client_id': client_id,
'grant_type': grant_type,
'target': target,
'scope': 'openid',
'api_type': api_type,
}
if id_token:
data.update({'id_token': id_token})
elif refresh_token:
data.update({'refresh_token': refresh_token})
else:
raise ValueError('Either id_token or refresh_token must '
'have a value')
return self.post(
'https://%s/delegation' % self.domain,
headers={'Content-Type': 'application/json'},
data=data
)
|
Add docstrings in Delegated class
|
Add docstrings in Delegated class
|
Python
|
mit
|
auth0/auth0-python,auth0/auth0-python
|
1766a5acc5c948288b4cd81c62d0c1507c55f727
|
cinder/brick/initiator/host_driver.py
|
cinder/brick/initiator/host_driver.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
dir = "/dev/disk/by-path/"
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
files = []
dir = "/dev/disk/by-path/"
if os.path.isdir(dir):
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
|
Check if dir exists before calling listdir
|
Check if dir exists before calling listdir
Changes along the way to how we clean up and detach after
copying an image to a volume exposed a problem in the cleanup
of the brick/initiator routines.
The clean up in the initiator detach was doing a blind listdir
of /dev/disk/by-path, however due to detach and cleanup being
called upon completion of the image download to the volume if
there are no other devices mapped in this directory the directory
is removed.
The result was that even though the create and copy of the image
was succesful, the HostDriver code called os.lisdir on a directory
that doesn't exist any longer and raises an unhandled exception that
cause the taskflow mechanism to mark the volume as failed.
Change-Id: I488755c1a49a77f42efbb58a7a4eb6f4f084df07
Closes-bug: #1243980
|
Python
|
apache-2.0
|
petrutlucian94/cinder,Paul-Ezell/cinder-1,CloudServer/cinder,Datera/cinder,nikesh-mahalka/cinder,alex8866/cinder,j-griffith/cinder,cloudbase/cinder,openstack/cinder,julianwang/cinder,redhat-openstack/cinder,tlakshman26/cinder-bug-fix-volume-conversion-full,Akrog/cinder,Hybrid-Cloud/cinder,CloudServer/cinder,tlakshman26/cinder-https-changes,takeshineshiro/cinder,Datera/cinder,Akrog/cinder,Accelerite/cinder,apporc/cinder,eharney/cinder,phenoxim/cinder,julianwang/cinder,tlakshman26/cinder-bug-fix-volume-conversion-full,nexusriot/cinder,apporc/cinder,Paul-Ezell/cinder-1,phenoxim/cinder,winndows/cinder,mahak/cinder,NetApp/cinder,nikesh-mahalka/cinder,potsmaster/cinder,cloudbase/cinder,leilihh/cinder,spring-week-topos/cinder-week,scality/cinder,NeCTAR-RC/cinder,Thingee/cinder,dims/cinder,spring-week-topos/cinder-week,duhzecca/cinder,openstack/cinder,ge0rgi/cinder,Thingee/cinder,tlakshman26/cinder-new-branch,winndows/cinder,potsmaster/cinder,JioCloud/cinder,bswartz/cinder,blueboxgroup/cinder,manojhirway/ExistingImagesOnNFS,dims/cinder,takeshineshiro/cinder,Nexenta/cinder,j-griffith/cinder,scality/cinder,alex8866/cinder,Hybrid-Cloud/cinder,NetApp/cinder,saeki-masaki/cinder,scottdangelo/RemoveVolumeMangerLocks,redhat-openstack/cinder,Accelerite/cinder,eharney/cinder,scottdangelo/RemoveVolumeMangerLocks,rakeshmi/cinder,sasukeh/cinder,sasukeh/cinder,hguemar/cinder,github-borat/cinder,JioCloud/cinder,leilihh/cinder,tlakshman26/cinder-new-branch,saeki-masaki/cinder,nexusriot/cinder,petrutlucian94/cinder,duhzecca/cinder,abusse/cinder,blueboxgroup/cinder,rakeshmi/cinder,tlakshman26/cinder-https-changes,github-borat/cinder,Nexenta/cinder,Thingee/cinder,bswartz/cinder,abusse/cinder,manojhirway/ExistingImagesOnNFS,mahak/cinder,hguemar/cinder,NeCTAR-RC/cinder
|
305969cedb966d1e5cd340d531727bb984ac35a8
|
whitenoise/generators/sqlalchemy.py
|
whitenoise/generators/sqlalchemy.py
|
import random
from whitenoise.generators import BaseGenerator
class SelectGenerator(BaseGenerator):
'''
Creates a value by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().choice(_query)
else:
return _query[0]
|
import random
from whitenoise.generators import BaseGenerator
class SelectGenerator(BaseGenerator):
'''
Creates a value by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().choice(_query)
else:
return _query[0]
class LinkGenerator(BaseGenerator):
'''
Creates a list for secondary relationships using link tables by selecting from another SQLAlchemy table
Depends on SQLAlchemy, and receiving a session object from the Fixture runner
the SQLAlchemy fixture runner handles this for us
Receives the name of another class to lookup. If the
query returns more than one option, either random or the 1st is selected
(default is random)
'''
def __init__(self, model, max_map, random=True, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session = None
self.model = model
self.random = random
self.max_map = max_map
def generate(self):
if(self.session is None):
raise ValueError('You must set the session property before using this generator')
_query = self.session.query(self.model).all()
if self.random:
return random.SystemRandom().sample(_query,random.randint(1, max_map))
else:
return [_query[0]]
|
Add a generator for association tables
|
Add a generator for association tables
|
Python
|
mit
|
James1345/white-noise
|
c44db12bbf960c8883c4fc31f47e31f4409fe685
|
feincms/views/decorators.py
|
feincms/views/decorators.py
|
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_from_request(request)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
|
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.module.page.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_from_request(request)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
|
Fix page import path in view decorator module
|
Fix page import path in view decorator module
|
Python
|
bsd-3-clause
|
mjl/feincms,nickburlett/feincms,michaelkuty/feincms,michaelkuty/feincms,nickburlett/feincms,hgrimelid/feincms,matthiask/feincms2-content,pjdelport/feincms,matthiask/django-content-editor,joshuajonah/feincms,joshuajonah/feincms,matthiask/django-content-editor,matthiask/django-content-editor,mjl/feincms,joshuajonah/feincms,pjdelport/feincms,matthiask/django-content-editor,matthiask/feincms2-content,michaelkuty/feincms,nickburlett/feincms,feincms/feincms,hgrimelid/feincms,joshuajonah/feincms,michaelkuty/feincms,feincms/feincms,pjdelport/feincms,feincms/feincms,matthiask/feincms2-content,hgrimelid/feincms,mjl/feincms,nickburlett/feincms
|
ee5ab61090cef682f37631a8c3f5764bdda63772
|
xpserver_web/tests/unit/test_web.py
|
xpserver_web/tests/unit/test_web.py
|
from django.core.urlresolvers import resolve
from xpserver_web.views import main
def test_root_resolves_to_hello_world():
found = resolve('/')
assert found.func == main
|
from django.core.urlresolvers import resolve
from xpserver_web.views import main, register
def test_root_resolves_to_main():
found = resolve('/')
assert found.func == main
def test_register_resolves_to_main():
found = resolve('/register/')
assert found.func == register
|
Add unit test for register
|
Add unit test for register
|
Python
|
mit
|
xp2017-hackergarden/server,xp2017-hackergarden/server,xp2017-hackergarden/server,xp2017-hackergarden/server
|
4c2e76d62460828aa0f30e903f57e7f515cc43f7
|
alg_kruskal_minimum_spanning_tree.py
|
alg_kruskal_minimum_spanning_tree.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def make_set():
pass
def link():
pass
def find():
pass
def union():
pass
def kruskal():
"""Kruskal's algorithm for minimum spanning tree
in weighted graph.
Time complexity for graph G(V, E):
O(|E|+|V|+|E|log(|V|))
= O(|E|log(|V|^2))
= O(|E|log(|V|)).
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Kruskal\'s minimum spanning tree:')
pass
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def make_set():
pass
def link():
pass
def find():
pass
def union():
pass
def kruskal():
"""Kruskal's algorithm for minimum spanning tree
in weighted undirected graph.
Time complexity for graph G(V, E):
O(|E|+|V|+|E|log(|V|))
= O(|E|log(|V|^2))
= O(|E|log(|V|)).
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Kruskal\'s minimum spanning tree:')
pass
if __name__ == '__main__':
main()
|
Revise doc string by adding "undirected"
|
Revise doc string by adding "undirected"
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
e120858d5cb123e9f3422ddb15ce79bde8d05d64
|
statsd/__init__.py
|
statsd/__init__.py
|
import socket
try:
from django.conf import settings
except ImportError:
settings = None
from client import StatsClient
__all__ = ['StatsClient', 'statsd']
VERSION = (0, 4, 0)
__version__ = '.'.join(map(str, VERSION))
if settings:
try:
host = getattr(settings, 'STATSD_HOST', 'localhost')
port = getattr(settings, 'STATSD_PORT', 8125)
prefix = getattr(settings, 'STATSD_PREFIX', None)
statsd = StatsClient(host, port, prefix)
except (socket.error, socket.gaierror, ImportError):
statsd = None
|
import socket
import os
try:
from django.conf import settings
except ImportError:
settings = None
from client import StatsClient
__all__ = ['StatsClient', 'statsd']
VERSION = (0, 4, 0)
__version__ = '.'.join(map(str, VERSION))
if settings:
try:
host = getattr(settings, 'STATSD_HOST', 'localhost')
port = getattr(settings, 'STATSD_PORT', 8125)
prefix = getattr(settings, 'STATSD_PREFIX', None)
statsd = StatsClient(host, port, prefix)
except (socket.error, socket.gaierror, ImportError):
try:
host = os.environ['STATSD_HOST']
port = os.environ['STATSD_PORT']
prefix = os.environ.get('STATSD_PREFIX')
statsd = StatsClient(host, port, prefix)
except (socket.error, socket.gaierror, KeyError):
statsd = None
|
Read settings from environment, if available
|
Read settings from environment, if available
|
Python
|
mit
|
lyft/pystatsd,jsocol/pystatsd,deathowl/pystatsd,Khan/pystatsd,Khan/pystatsd,smarkets/pystatsd,wujuguang/pystatsd,lyft/pystatsd
|
7d3ffe4582a5b4032f9a59a3ea8edfded57a7a1f
|
src/nodeconductor_openstack/openstack/migrations/0031_tenant_backup_storage.py
|
src/nodeconductor_openstack/openstack/migrations/0031_tenant_backup_storage.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.contenttypes.models import ContentType
from django.db import migrations
from nodeconductor.quotas import models as quotas_models
from .. import models
def cleanup_tenant_quotas(apps, schema_editor):
for obj in models.Tenant.objects.all():
quotas_names = models.Tenant.QUOTAS_NAMES + [f.name for f in models.Tenant.get_quotas_fields()]
obj.quotas.exclude(name__in=quotas_names).delete()
class Migration(migrations.Migration):
dependencies = [
('openstack', '0030_subnet_dns_nameservers'),
]
operations = [
migrations.RunPython(cleanup_tenant_quotas),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from .. import models
def cleanup_tenant_quotas(apps, schema_editor):
quota_names = models.Tenant.get_quotas_names()
for obj in models.Tenant.objects.all():
obj.quotas.exclude(name__in=quota_names).delete()
class Migration(migrations.Migration):
dependencies = [
('openstack', '0030_subnet_dns_nameservers'),
]
operations = [
migrations.RunPython(cleanup_tenant_quotas),
]
|
Clean up quota cleanup migration
|
Clean up quota cleanup migration [WAL-433]
|
Python
|
mit
|
opennode/nodeconductor-openstack
|
e45fff968f37f558a49cf82b582d1f514a97b5af
|
tests/test_pool.py
|
tests/test_pool.py
|
import random
import unittest
from aioes.pool import RandomSelector, RoundRobinSelector
class TestRandomSelector(unittest.TestCase):
def setUp(self):
random.seed(123456)
def tearDown(self):
random.seed(None)
def test_select(self):
s = RandomSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestRoundRobinSelector(unittest.TestCase):
def test_select(self):
s = RoundRobinSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
r = s.select([1, 2, 3])
self.assertEqual(3, r)
r = s.select([1, 2, 3])
self.assertEqual(1, r)
r = s.select([1, 2, 3])
self.assertEqual(2, r)
|
import asyncio
import random
import unittest
from aioes.pool import RandomSelector, RoundRobinSelector, ConnectionPool
from aioes.transport import Endpoint
from aioes.connection import Connection
class TestRandomSelector(unittest.TestCase):
def setUp(self):
random.seed(123456)
def tearDown(self):
random.seed(None)
def test_select(self):
s = RandomSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestRoundRobinSelector(unittest.TestCase):
def test_select(self):
s = RoundRobinSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
r = s.select([1, 2, 3])
self.assertEqual(3, r)
r = s.select([1, 2, 3])
self.assertEqual(1, r)
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestConnectionPool(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def make_pool(self):
conn = Connection(Endpoint('localhost', 9200), loop=self.loop)
pool = ConnectionPool([conn], loop=self.loop)
self.addCleanup(pool.close)
return pool
def test_ctor(self):
pool = self.make_pool()
self.assertAlmostEqual(60, pool.dead_timeout)
self.assertAlmostEqual(5, pool.timeout_cutoff)
|
Add more tests for pool
|
Add more tests for pool
|
Python
|
apache-2.0
|
aio-libs/aioes
|
92c9383f84385a0ceb2029437d0d77fc041d0353
|
tools/debug_adapter.py
|
tools/debug_adapter.py
|
#!/usr/bin/python
import sys
if 'darwin' in sys.platform:
sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
sys.path.append('.')
import adapter
adapter.main.run_tcp_server(multiple=False)
|
#!/usr/bin/python
import sys
if 'darwin' in sys.platform:
sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
sys.path.append('.')
import adapter
adapter.main.run_tcp_server()
|
Update code for changed function.
|
Update code for changed function.
|
Python
|
mit
|
NeroProtagonist/vscode-lldb,NeroProtagonist/vscode-lldb,NeroProtagonist/vscode-lldb,NeroProtagonist/vscode-lldb,NeroProtagonist/vscode-lldb
|
49cd07a337a9a4282455765ef2b5a2445a0f6840
|
tools/dev/wc-format.py
|
tools/dev/wc-format.py
|
#!/usr/bin/env python
import os
import sqlite3
import sys
# helper
def usage():
sys.stderr.write("USAGE: %s [PATH]\n" + \
"\n" + \
"Prints to stdout the format of the working copy at PATH.\n")
# parse argv
wc = (sys.argv[1:] + ['.'])[0]
# main()
entries = os.path.join(wc, '.svn', 'entries')
wc_db = os.path.join(wc, '.svn', 'wc.db')
if os.path.exists(entries):
formatno = int(open(entries).readline())
elif os.path.exists(wc_db):
formatno = sqlite3.connect(wc_db).execute('pragma user_version;').fetchone()[0]
else:
usage()
sys.exit(1)
# 1.0.x -> 1.3.x: format 4
# 1.4.x: format 8
# 1.5.x: format 9
# 1.6.x: format 10
# 1.7.x: format XXX
print("%s: %d" % (wc, formatno))
|
#!/usr/bin/env python
import os
import sqlite3
import sys
def print_format(wc_path):
entries = os.path.join(wc_path, '.svn', 'entries')
wc_db = os.path.join(wc_path, '.svn', 'wc.db')
if os.path.exists(entries):
formatno = int(open(entries).readline())
elif os.path.exists(wc_db):
conn = sqlite3.connect(wc_db)
curs = conn.cursor()
curs.execute('pragma user_version;')
formatno = curs.fetchone()[0]
else:
formatno = 'not under version control'
# see subversion/libsvn_wc/wc.h for format values and information
# 1.0.x -> 1.3.x: format 4
# 1.4.x: format 8
# 1.5.x: format 9
# 1.6.x: format 10
# 1.7.x: format XXX
print '%s: %s' % (wc_path, formatno)
if __name__ == '__main__':
paths = sys.argv[1:]
if not paths:
paths = ['.']
for wc_path in paths:
print_format(wc_path)
|
Allow script to take multiple paths, and adjust to standard __main__ idiom for cmdline scripts.
|
Allow script to take multiple paths, and adjust to standard __main__ idiom
for cmdline scripts.
* tools/dev/wc-format.py:
(usage): remove. all paths are allowed.
(print_format): move guts of format fetching and printing into this
function. print 'not under version control' for such a path, rather
than bailing with USAGE. expand sqlite stuff to use normal pydb idioms
(eg. execute is not supposed to return anything)
(__main__): invoke print_format for each path provided
|
Python
|
apache-2.0
|
jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion
|
aa8820bd7b78ba5729e0a7a17e43b87bfd033980
|
tests/runtests.py
|
tests/runtests.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import unittest
import util_tests
import jsonpickle_test
import thirdparty_tests
def suite():
suite = unittest.TestSuite()
suite.addTest(util_tests.suite())
suite.addTest(jsonpickle_test.suite())
suite.addTest(thirdparty_tests.suite())
return suite
def main():
#unittest.main(defaultTest='suite')
unittest.TextTestRunner(verbosity=2).run(suite())
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import unittest
import util_tests
import jsonpickle_test
import thirdparty_tests
def suite():
suite = unittest.TestSuite()
suite.addTest(util_tests.suite())
suite.addTest(jsonpickle_test.suite())
suite.addTest(thirdparty_tests.suite())
return suite
def main():
#unittest.main(defaultTest='suite')
return unittest.TextTestRunner(verbosity=2).run(suite())
if __name__ == '__main__':
sys.exit(not main().wasSuccessful())
|
Return correct status code to shell when tests fail.
|
Return correct status code to shell when tests fail.
When tests fail (due to e.g. missing feedparser), then the exit code of tests/runtests.py is 0, which is treated by shell as
success. Patch by Arfrever Frehtes Taifersar Arahesis.
|
Python
|
bsd-3-clause
|
mandx/jsonpickle,dongguangming/jsonpickle,dongguangming/jsonpickle,mandx/jsonpickle,mandx/jsonpickle,dongguangming/jsonpickle,mandx/jsonpickle,dongguangming/jsonpickle
|
4bac0cfeb2d8def6183b4249f0ea93329b282cb4
|
botbot/envchecks.py
|
botbot/envchecks.py
|
"""Tools for checking environment variables"""
import os
from configparser import NoOptionError
from .config import CONFIG
def path_sufficient():
"""
Checks whether all of the given paths are in the PATH environment
variable
"""
paths = CONFIG.get('important', 'pathitems').split(':')
for path in paths:
if path not in os.environ['PATH']:
return ('PROB_PATH_NOT_COMPLETE', path)
|
"""Tools for checking environment variables"""
import os
from configparser import NoOptionError
from .config import CONFIG
def path_sufficient():
"""
Checks whether all of the given paths are in the PATH environment
variable
"""
paths = CONFIG.get('important', 'pathitems').split(':')
for path in paths:
if path not in os.environ['PATH']:
return ('PROB_PATH_NOT_COMPLETE', path)
def ld_lib_path_sufficient():
"""
Checks whether all of the given paths are in the LD_LIBRARY_PATH
einvironment variable
"""
paths = CONFIG.get('important', 'ldlibitems').split(':')
for path in paths:
if path not in os.environ['LD_LIBRARY_PATH']:
return ('PROB_LD_PATH_NOT_COMPLETE', path)
|
Add checker for LD_LIBRARY_PATH env variable
|
Add checker for LD_LIBRARY_PATH env variable
|
Python
|
mit
|
jackstanek/BotBot,jackstanek/BotBot
|
805e67ad540e3072929dea30b8894af87fc622ef
|
uiharu/__init__.py
|
uiharu/__init__.py
|
import logging
from flask import Flask
log = logging.getLogger(__name__)
def create_app(config_dict):
app = Flask(__name__, static_folder=None)
app.config.update(**config_dict)
from uiharu.api.views import api as api_blueprint
from uiharu.weather.views import weather as weather_blueprint
app.register_blueprint(api_blueprint, url_prefix='/api/v1')
app.register_blueprint(weather_blueprint)
log.info(app.url_map)
return app
|
import logging
log = logging.getLogger(__name__)
|
Remove flask usage in init
|
Remove flask usage in init
|
Python
|
mit
|
kennydo/uiharu
|
b047685088b9179e0c784114ff4a41509dbfdf7d
|
tests/test_utils.py
|
tests/test_utils.py
|
from django_logutils.utils import add_items_to_message
def test_add_items_to_message():
msg = "log message"
items = {'user': 'benny', 'email': 'benny@example.com'}
msg = add_items_to_message(msg, items)
assert msg == 'log message user=benny email=benny@example.com'
|
from django_logutils.utils import add_items_to_message
def test_add_items_to_message():
msg = "log message"
items = {'user': 'benny', 'email': 'benny@example.com'}
msg = add_items_to_message(msg, items)
assert msg.startswith('log message')
assert 'user=benny' in msg
assert 'email=benny@example.com' in msg
def test_add_items_to_message_with_empty_items():
msg = "log message"
items = {}
msg = add_items_to_message(msg, items)
assert msg == 'log message'
|
Fix utils test and add an extra test.
|
Fix utils test and add an extra test.
|
Python
|
bsd-3-clause
|
jsmits/django-logutils,jsmits/django-logutils
|
8e00f9ff03464f0cf70b022f899ec7ef1c173829
|
exec_thread_1.py
|
exec_thread_1.py
|
#import spam
import filter_lta
#VALID_FILES = filter_lta.VALID_OBS().split('\n')
print filter_lta.VALID_OBS()
#def extract_basename():
def main():
#Convert the LTA file to the UVFITS format
#Generates UVFITS file with same basename as LTA file
spam.convert_lta_to_uvfits('Name of the file')
#Take generated UVFITS file as input and precalibrate targets
#Generates files (RRLL with the name of the source (can be obtained using ltahdr)
spam.precalibrate_targets('Name of UVFITS output file')
#Take the generated RRLL UVFITS file and process to generate the image
#Generates final image <source name>.SP2B.PBCOR.FITS
#Also generates log file spam_<source name>_<start date>_start_time>.log in
#datfil dir
spam.process_target()
|
#import spam
import filter_lta
#List of all directories containing valid observations
VALID_FILES = filter_lta.VALID_OBS()
#List of all directories for current threads to process
THREAD_FILES = VALID_FILES[0:len(VALID_FILES):5]
print THREAD_FILES
def main():
for i in THREAD_FILES:
LTA_FILES = os.chdir(i)
"""
#Convert the LTA file to the UVFITS format
#Generates UVFITS file with same basename as LTA file
spam.convert_lta_to_uvfits('Name of the file')
#Take generated UVFITS file as input and precalibrate targets
#Generates files (RRLL with the name of the source (can be obtained using ltahdr)
spam.precalibrate_targets('Name of UVFITS output file')
#Take the generated RRLL UVFITS file and process to generate the image
#Generates final image <source name>.SP2B.PBCOR.FITS
#Also generates log file spam_<source name>_<start date>_start_time>.log in
#datfil dir
spam.process_target()
"""
|
Add code for thread to filter valid files for processing
|
Add code for thread to filter valid files for processing
|
Python
|
mit
|
NCRA-TIFR/gadpu,NCRA-TIFR/gadpu
|
34db760c5b763ad2df02398d58ea417b47b785e7
|
geotrek/zoning/views.py
|
geotrek/zoning/views.py
|
from django.shortcuts import get_object_or_404
from django.views.decorators.cache import cache_page
from django.conf import settings
from django.utils.decorators import method_decorator
from djgeojson.views import GeoJSONLayerView
from .models import City, RestrictedArea, RestrictedAreaType, District
class LandLayerMixin(object):
srid = settings.API_SRID
precision = settings.LAYER_PRECISION_LAND
simplify = settings.LAYER_SIMPLIFY_LAND
@method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, cache="fat"))
def dispatch(self, request, *args, **kwargs):
return super(LandLayerMixin, self).dispatch(request, *args, **kwargs)
class CityGeoJSONLayer(LandLayerMixin, GeoJSONLayerView):
model = City
class RestrictedAreaGeoJSONLayer(LandLayerMixin, GeoJSONLayerView):
model = RestrictedArea
class RestrictedAreaTypeGeoJSONLayer(LandLayerMixin, GeoJSONLayerView):
model = RestrictedArea
def get_queryset(self):
type_pk = self.kwargs['type_pk']
qs = super(RestrictedAreaTypeGeoJSONLayer, self).get_queryset()
get_object_or_404(RestrictedAreaType, pk=type_pk)
return qs.filter(area_type=type_pk)
class DistrictGeoJSONLayer(LandLayerMixin, GeoJSONLayerView):
model = District
properties = ['name']
|
from django.shortcuts import get_object_or_404
from django.views.decorators.cache import cache_page
from django.conf import settings
from django.utils.decorators import method_decorator
from djgeojson.views import GeoJSONLayerView
from .models import City, RestrictedArea, RestrictedAreaType, District
class LandLayerMixin(object):
srid = settings.API_SRID
precision = settings.LAYER_PRECISION_LAND
simplify = settings.LAYER_SIMPLIFY_LAND
@method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS,
cache=settings.MAPENTITY_CONFIG['GEOJSON_LAYERS_CACHE_BACKEND']))
def dispatch(self, request, *args, **kwargs):
return super(LandLayerMixin, self).dispatch(request, *args, **kwargs)
class CityGeoJSONLayer(LandLayerMixin, GeoJSONLayerView):
model = City
class RestrictedAreaGeoJSONLayer(LandLayerMixin, GeoJSONLayerView):
model = RestrictedArea
class RestrictedAreaTypeGeoJSONLayer(LandLayerMixin, GeoJSONLayerView):
model = RestrictedArea
def get_queryset(self):
type_pk = self.kwargs['type_pk']
qs = super(RestrictedAreaTypeGeoJSONLayer, self).get_queryset()
get_object_or_404(RestrictedAreaType, pk=type_pk)
return qs.filter(area_type=type_pk)
class DistrictGeoJSONLayer(LandLayerMixin, GeoJSONLayerView):
model = District
properties = ['name']
|
Change cache land, use settings mapentity
|
Change cache land, use settings mapentity
|
Python
|
bsd-2-clause
|
GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
|
3875b1ec7d056d337cc1c02d9567cd7ff1ae9748
|
utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py
|
utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py
|
import rospy
from time import time
def wait_for_param(param_name, timeout=None, poll_rate=0.1):
'''Blocking wait for a parameter named $parameter_name to exist
Poll at frequency $poll_rate
Once the parameter exists, return get and return it.
This function intentionally leaves failure logging duties to the developer
'''
start_time = time()
rate = rospy.Rate(poll_rate)
while not rospy.is_shutdown():
# Check if the parameter now exists
if rospy.has_param(param_name):
return rospy.get_param(param_name)
# If we exceed a defined timeout, return None
if timeout is not None:
if time() - start_time > timeout:
return None
# Continue to poll at poll_rate
rate.sleep()
|
import rospy
import rostest
import time
def wait_for_param(param_name, timeout=None, poll_rate=0.1):
'''Blocking wait for a parameter named $parameter_name to exist
Poll at frequency $poll_rate
Once the parameter exists, return get and return it.
This function intentionally leaves failure logging duties to the developer
'''
start_time = time.time()
rate = rospy.Rate(poll_rate)
while not rospy.is_shutdown():
# Check if the parameter now exists
if rospy.has_param(param_name):
return rospy.get_param(param_name)
# If we exceed a defined timeout, return None
if timeout is not None:
if time.time() - start_time > timeout:
return None
# Continue to poll at poll_rate
rate.sleep()
def wait_for_subscriber(node_name, topic, timeout=5.0):
'''Blocks until $node_name subscribes to $topic
Useful mostly in integration tests --
I would counsel against use elsewhere
'''
end_time = time.time() + timeout
resolved_topic = rospy.resolve_name(topic)
resolved_node = rospy.resolve_name(node_name)
# Wait for time-out or ros-shutdown
while (time.time() < end_time) and (not rospy.is_shutdown()):
subscribed = rostest.is_subscriber(
rospy.resolve_name(topic),
rospy.resolve_name(node_name)
)
# Success scenario: node subscribes
if subscribed:
break
time.sleep(0.1)
# Could do this with a while/else
# But chose to explicitly check
success = rostest.is_subscriber(
rospy.resolve_name(topic),
rospy.resolve_name(node_name)
)
return success
|
Add init-helper 'wait for subscriber'
|
UTILS: Add init-helper 'wait for subscriber'
For integration-testing purposes it is often useful to wait until a
particular node subscribes to you
|
Python
|
mit
|
pemami4911/Sub8,pemami4911/Sub8,pemami4911/Sub8
|
7376a29d69ac78cabc5d392cb748f708ffa0e68c
|
tests/pretty_format_json_test.py
|
tests/pretty_format_json_test.py
|
import tempfile
import pytest
from pre_commit_hooks.pretty_format_json import pretty_format_json
from testing.util import get_resource_path
@pytest.mark.parametrize(('filename', 'expected_retval'), (
('not_pretty_formatted_json.json', 1),
('pretty_formatted_json.json', 0),
))
def test_pretty_format_json(filename, expected_retval):
ret = pretty_format_json([get_resource_path(filename)])
assert ret == expected_retval
def test_autofix_pretty_format_json():
toformat_file = tempfile.NamedTemporaryFile(delete=False, mode='w+')
# copy our file to format there
model_file = open(get_resource_path('not_pretty_formatted_json.json'), 'r')
model_contents = model_file.read()
model_file.close()
toformat_file.write(model_contents)
toformat_file.close()
# now launch the autofix on that file
ret = pretty_format_json(['--autofix', toformat_file.name])
# it should have formatted it
assert ret == 1
# file already good
ret = pretty_format_json([toformat_file.name])
assert ret == 0
def test_badfile_pretty_format_json():
ret = pretty_format_json([get_resource_path('ok_yaml.yaml')])
assert ret == 1
|
import io
import pytest
from pre_commit_hooks.pretty_format_json import pretty_format_json
from testing.util import get_resource_path
@pytest.mark.parametrize(('filename', 'expected_retval'), (
('not_pretty_formatted_json.json', 1),
('pretty_formatted_json.json', 0),
))
def test_pretty_format_json(filename, expected_retval):
ret = pretty_format_json([get_resource_path(filename)])
assert ret == expected_retval
def test_autofix_pretty_format_json(tmpdir):
srcfile = tmpdir.join('to_be_json_formatted.json')
with io.open(get_resource_path('not_pretty_formatted_json.json')) as f:
srcfile.write_text(f.read(), 'UTF-8')
# now launch the autofix on that file
ret = pretty_format_json(['--autofix', srcfile.strpath])
# it should have formatted it
assert ret == 1
# file was formatted (shouldn't trigger linter again)
ret = pretty_format_json([srcfile.strpath])
assert ret == 0
def test_badfile_pretty_format_json():
ret = pretty_format_json([get_resource_path('ok_yaml.yaml')])
assert ret == 1
|
Write to temp directories in such a way that files get cleaned up
|
Write to temp directories in such a way that files get cleaned up
|
Python
|
mit
|
Coverfox/pre-commit-hooks,Harwood/pre-commit-hooks,pre-commit/pre-commit-hooks
|
91709b78c27ed0e05f3c67fcc13ffa8085dac15a
|
heavy-ion-luminosity.py
|
heavy-ion-luminosity.py
|
__author__ = 'jacob'
import ROOT
import numpy as np
import os
from root_numpy import root2array, root2rec, tree2rec
# Look at r284484 data
filename = os.path.join("data", "r284484.root")
# Convert a TTree in a ROOT file into a NumPy structured array
arr = root2array(filename)
print(arr.dtype)
# The TTree name is always optional if there is only one TTree in the file
# Convert a TTree in a ROOT file into a NumPy record array
rec = root2rec(filename)
# Get the TTree from the ROOT file
rfile = ROOT.TFile(filename)
|
__author__ = 'jacob'
import ROOT
import numpy as np
import os
from root_numpy import root2array, root2rec, tree2rec
# Look at r284484 data
filename = os.path.join("data", "r284484.root")
# Convert a TTree in a ROOT file into a NumPy structured array
arr = root2array(filename)
for element in arr.dtype.names:
print(element)
print("\n")
# The TTree name is always optional if there is only one TTree in the file
# Convert a TTree in a ROOT file into a NumPy record array
rec = root2rec(filename)
# Get the TTree from the ROOT file
rfile = ROOT.TFile(filename)
|
Print out dtypes in .root file individually
|
Print out dtypes in .root file individually
|
Python
|
mit
|
jacobbieker/ATLAS-Luminosity
|
bfec6f3e2db99e20baf9b87fcd85da9ff050b030
|
UM/OutputDevice/OutputDeviceError.py
|
UM/OutputDevice/OutputDeviceError.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class ErrorCodes:
UserCanceledError = 1
DeviceBusyError = 2
class WriteRequestFailedError(Exception):
def __init__(self, code, message):
super().__init__(message)
self.code = code
self.message = message
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class WriteRequestFailedError(Exception):
pass
class UserCancelledError(WriteRequestFailedError):
pass
class PermissionDeniedError(WriteRequestFailedError):
pass
class DeviceBusyError(WriteRequestFailedError):
pass
|
Replace error codes with error subclasses
|
Replace error codes with error subclasses
This provides the same information but is a cleaner solution for python
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
094132685688d0e9e599da6e8c0e0554945d56a5
|
html5lib/trie/datrie.py
|
html5lib/trie/datrie.py
|
from __future__ import absolute_import, division, unicode_literals
from itertools import chain
from datrie import Trie as DATrie
from ._base import Trie as ABCTrie
class Trie(ABCTrie):
def __init__(self, data):
chars = set()
for key in data.keys():
if not isinstance(key, str):
raise TypeError("All keys must be strings")
for char in key:
chars.add(char)
self._data = DATrie("".join(chars))
for key, value in data.items():
self._data[key] = value
def __contains__(self, key):
return key in self._data
def __len__(self):
return len(self._data)
def __iter__(self):
raise NotImplementedError()
def __getitem__(self, key):
return self._data[key]
def keys(self, prefix=None):
return self._data.keys(prefix)
def has_keys_with_prefix(self, prefix):
return self._data.has_keys_with_prefix(prefix)
def longest_prefix(self, prefix):
return self._data.longest_prefix(prefix)
def longest_prefix_item(self, prefix):
return self._data.longest_prefix_item(prefix)
|
from __future__ import absolute_import, division, unicode_literals
from itertools import chain
from datrie import Trie as DATrie
from six import text_type
from ._base import Trie as ABCTrie
class Trie(ABCTrie):
def __init__(self, data):
chars = set()
for key in data.keys():
if not isinstance(key, text_type):
raise TypeError("All keys must be strings")
for char in key:
chars.add(char)
self._data = DATrie("".join(chars))
for key, value in data.items():
self._data[key] = value
def __contains__(self, key):
return key in self._data
def __len__(self):
return len(self._data)
def __iter__(self):
raise NotImplementedError()
def __getitem__(self, key):
return self._data[key]
def keys(self, prefix=None):
return self._data.keys(prefix)
def has_keys_with_prefix(self, prefix):
return self._data.has_keys_with_prefix(prefix)
def longest_prefix(self, prefix):
return self._data.longest_prefix(prefix)
def longest_prefix_item(self, prefix):
return self._data.longest_prefix_item(prefix)
|
Fix DATrie support under Python 2.
|
Fix DATrie support under Python 2.
This is a simple issue of using `str` to refer to what should be
`six.text_type`.
|
Python
|
mit
|
mindw/html5lib-python,html5lib/html5lib-python,alex/html5lib-python,gsnedders/html5lib-python,ordbogen/html5lib-python,dstufft/html5lib-python,alex/html5lib-python,mgilson/html5lib-python,alex/html5lib-python,mindw/html5lib-python,dstufft/html5lib-python,dstufft/html5lib-python,ordbogen/html5lib-python,html5lib/html5lib-python,mgilson/html5lib-python,gsnedders/html5lib-python,mgilson/html5lib-python,ordbogen/html5lib-python,mindw/html5lib-python,html5lib/html5lib-python
|
e20dc134911ad7b99014fdbf77dacd498cecce19
|
eventkit/plugins/fluentevent/migrations/0002_fluentevent_layout.py
|
eventkit/plugins/fluentevent/migrations/0002_fluentevent_layout.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('icekit', '0002_layout'),
('eventkit_fluentevent', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='fluentevent',
name='layout',
field=models.ForeignKey(blank=True, to='icekit.Layout', null=True),
preserve_default=True,
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('icekit', '0002_layout'),
('eventkit_fluentevent', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='fluentevent',
name='layout',
field=models.ForeignKey(related_name='eventkit_fluentevent_fluentevent_related', blank=True, to='icekit.Layout', null=True),
preserve_default=True,
),
]
|
Update related name for `layout` field.
|
Update related name for `layout` field.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/icekit-events,ic-labs/icekit-events,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/icekit-events
|
73f76034b0d00c48774cafe3584bb672b8ba55bd
|
apps/announcements/models.py
|
apps/announcements/models.py
|
# -*- coding: utf-8 -*-
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Authors(models.Model):
author = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('author', 'object_id')
def __unicode__(self):
return self.content_object.name
class Announcements(models.Model):
title = models.CharField(max_length = 500)
pubdate = models.DateTimeField()
creator = models.ForeignKey(Authors)
unique = models.CharField(max_length = 255, unique = True)
url = models.URLField()
summary = models.TextField(null = True)
enclosure = models.CharField("Attachment URL", max_length = 255, null = True)
def __unicode__(self):
return self.title
|
# -*- coding: utf-8 -*-
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Authors(models.Model):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
def __unicode__(self):
return self.content_object.name
class Announcements(models.Model):
title = models.CharField(max_length = 500)
pubdate = models.DateTimeField()
creator = models.ForeignKey(Authors)
unique = models.CharField(max_length = 255, unique = True)
url = models.URLField()
summary = models.TextField(null = True)
enclosure = models.CharField("Attachment URL", max_length = 255, null = True)
def __unicode__(self):
return self.title
|
Rename of the author field to content_type in the model, in order to avoid confusion
|
Rename of the author field to content_type in the model, in order to
avoid confusion
|
Python
|
agpl-3.0
|
LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr
|
6203fcb19fa8b8ca39d419d7f0cf482a1a718a02
|
indra/java_vm.py
|
indra/java_vm.py
|
"""Handles all imports from jnius to prevent conflicts resulting from attempts
to set JVM options while the VM is already running."""
import jnius_config
if '-Xmx4g' not in jnius_config.get_options():
if not jnius_config.vm_running:
jnius_config.add_options('-Xmx4g')
else:
warnings.warn("Couldn't set memory limit for Java VM because the VM "
"is already running.")
from jnius import autoclass, JavaException, cast
|
"""Handles all imports from jnius to prevent conflicts resulting from attempts
to set JVM options while the VM is already running."""
import os
import jnius_config
if '-Xmx4g' not in jnius_config.get_options():
if not jnius_config.vm_running:
jnius_config.add_options('-Xmx4g')
else:
warnings.warn("Couldn't set memory limit for Java VM because the VM "
"is already running.")
path_here = os.path.dirname(os.path.realpath(__file__))
cp = path_here + '/biopax/jars/paxtools.jar'
os.environ['CLASSPATH'] = cp
from jnius import autoclass, JavaException, cast
|
Set jnius classpath to biopax jars automatically
|
Set jnius classpath to biopax jars automatically
|
Python
|
bsd-2-clause
|
johnbachman/indra,pvtodorov/indra,sorgerlab/indra,jmuhlich/indra,sorgerlab/belpy,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/belpy,jmuhlich/indra,sorgerlab/indra,johnbachman/indra,pvtodorov/indra,bgyori/indra,bgyori/indra,bgyori/indra,johnbachman/belpy,jmuhlich/indra,johnbachman/belpy,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra
|
9e4c34ee1e2de5fb8611d2c4b540e55d9872d0ae
|
astro.py
|
astro.py
|
import ephem
from datetime import datetime
def const(planet_name): # function name and parameters
planet_class = getattr(ephem, planet_name) # sets ephem object class
date_class = datetime.now()
planet = planet_class() # sets planet variable
south_bend = ephem.Observer() # Creates the Observer object
south_bend.lat = '41.40' # latitude
south_bend.lon = '-86.15'
south_bend.date = date_class # sets date parameter
planet.compute(south_bend) # calculates the location data
print date_class
print planet.ra, planet.dec
print planet.alt, planet.az
return ephem.constellation((planet.ra, planet.dec))
print const(raw_input('Planet: '))
|
import ephem
from datetime import datetime
def star(star_name):
star = ephem.star(star_name)
south_bend = ephem.Observer()
date_class = datetime.now()
south_bend.lat = '41.15'
south_bend.lon = '-86.26'
south_bend.date = date_class
star.compute(south_bend)
print date_class
print "Mag ", star.mag
print "RA ", star.ra
print "Dec ", star.dec
def const(planet_name): # function name and parameters
planet_class = getattr(ephem, planet_name) # sets ephem object class
date_class = datetime.now()
planet = planet_class() # sets planet variable
south_bend = ephem.Observer() # Creates the Observer object
south_bend.lat = '41.40' # latitude
south_bend.lon = '-86.15'
south_bend.date = date_class # sets date parameter
planet.compute(south_bend) # calculates the location data
print 'Date ', date_class
print 'RA ', planet.ra
print 'Dec ', planet.dec
print 'Alt ', planet.alt
print 'Az ', planet.az
return ephem.constellation((planet.ra, planet.dec))
print "Press 1 to find a star, 2 to find a planet"
choice = raw_input('> ')
if choice == '1':
star(raw_input('Star: '))
else:
const(raw_input('Planet: '))
|
Add menu to choose star or planet, print results.
|
Add menu to choose star or planet, print results.
|
Python
|
mit
|
bennettscience/PySky
|
628f9edd7aefda1f9cf29cd5a3d04342877a5c38
|
custom/icds/rules/custom_actions.py
|
custom/icds/rules/custom_actions.py
|
from corehq.apps.data_interfaces.models import CaseRuleActionResult, AUTO_UPDATE_XMLNS
from corehq.apps.hqcase.utils import update_case
def escalate_tech_issue(case, rule):
if case.type != 'tech_issue'
return CaseRuleActionResult()
escalated_ticket_level_map = {
'supervisor': 'block',
'block': 'district',
'district': 'state',
}
current_ticket_level = case.get_case_property('ticket_level')
if current_ticket_level not in escalated_ticket_level_map:
return CaseRuleActionResult()
escalated_ticket_level = escalated_ticket_level_map[current_ticket_level]
result = update_case(
case.domain,
case.case_id,
case_properties={'ticket_level': escalated_ticket_level},
close=False,
xmlns=AUTO_UPDATE_XMLNS,
)
rule.log_submission(result[0].form_id)
return CaseRuleActionResult(num_updates=1)
|
import pytz
from corehq.apps.data_interfaces.models import CaseRuleActionResult, AUTO_UPDATE_XMLNS
from corehq.apps.hqcase.utils import update_case
from corehq.util.timezones.conversions import ServerTime
from datetime import datetime
def escalate_tech_issue(case, rule):
if case.type != 'tech_issue'
return CaseRuleActionResult()
escalated_ticket_level_map = {
'supervisor': 'block',
'block': 'district',
'district': 'state',
}
escalated_location_id_map = {
'supervisor': case.get_case_property('block_location_id'),
'block': case.get_case_property('district_location_id'),
'district': case.get_case_property('state_location_id'),
}
current_ticket_level = case.get_case_property('ticket_level')
escalated_ticket_level = escalated_ticket_level_map.get(current_ticket_level)
escalated_location_id = escalated_location_id_map.get(current_ticket_level)
if not escalated_ticket_level or not escalated_location_id:
return CaseRuleActionResult()
today = ServerTime(datetime.utcnow()).user_time(pytz.timezone('Asia/Kolkata')).done().date()
result = update_case(
case.domain,
case.case_id,
case_properties={
'ticket_level': escalated_ticket_level,
'change_in_level': '1',
'touch_case_date': today.strftime('%Y-%m-%d'),
},
close=False,
xmlns=AUTO_UPDATE_XMLNS,
)
rule.log_submission(result[0].form_id)
return CaseRuleActionResult(num_updates=1)
|
Add more properties to be updated
|
Add more properties to be updated
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
d6b7cccb14cd1f82bb3a6b070999204fafacf07e
|
hyper/common/util.py
|
hyper/common/util.py
|
# -*- coding: utf-8 -*-
"""
hyper/common/util
~~~~~~~~~~~~~~~~~
General utility functions for use with hyper.
"""
from hyper.compat import unicode, bytes, imap
def to_bytestring(element):
"""
Converts a single string to a bytestring, encoding via UTF-8 if needed.
"""
if isinstance(element, unicode):
return element.encode('utf-8')
elif isinstance(element, bytes):
return element
else:
raise ValueError("Non string type.")
def to_bytestring_tuple(*x):
"""
Converts the given strings to a bytestring if necessary, returning a
tuple. Uses ``to_bytestring``.
"""
return tuple(imap(to_bytestring, x))
def to_host_port_tuple(host_port_str, default_port=80):
"""
Converts the given string containing a host and possibly a port
to a tuple.
"""
try:
host, port = host_port_str.rsplit(':', 1)
except ValueError:
host, port = host_port_str, default_port
else:
port = int(port)
host = host.strip('[]')
return ((host, port))
|
# -*- coding: utf-8 -*-
"""
hyper/common/util
~~~~~~~~~~~~~~~~~
General utility functions for use with hyper.
"""
from hyper.compat import unicode, bytes, imap
def to_bytestring(element):
"""
Converts a single string to a bytestring, encoding via UTF-8 if needed.
"""
if isinstance(element, unicode):
return element.encode('utf-8')
elif isinstance(element, bytes):
return element
else:
raise ValueError("Non string type.")
def to_bytestring_tuple(*x):
"""
Converts the given strings to a bytestring if necessary, returning a
tuple. Uses ``to_bytestring``.
"""
return tuple(imap(to_bytestring, x))
def to_host_port_tuple(host_port_str, default_port=80):
"""
Converts the given string containing a host and possibly a port
to a tuple.
"""
if ']' in host_port_str:
delim = ']:'
else:
delim = ':'
try:
host, port = host_port_str.rsplit(delim, 1)
except ValueError:
host, port = host_port_str, default_port
else:
port = int(port)
host = host.strip('[]')
return ((host, port))
|
Fix to_host_port_tuple to resolve test case issues
|
Fix to_host_port_tuple to resolve test case issues
|
Python
|
mit
|
Lukasa/hyper,lawnmowerlatte/hyper,irvind/hyper,Lukasa/hyper,lawnmowerlatte/hyper,fredthomsen/hyper,irvind/hyper,plucury/hyper,fredthomsen/hyper,plucury/hyper
|
67a50f33177e0fa6aec15fc7d26836c38b374c31
|
plugins/lastfm.py
|
plugins/lastfm.py
|
from util import hook, http
api_key = ""
api_url = "http://ws.audioscrobbler.com/2.0/?format=json"
@hook.command
def lastfm(inp, nick='', say=None):
if inp:
user = inp
else:
user = nick
response = http.get_json(api_url, method="user.getrecenttracks",
api_key=api_key, user=user, limit=1)
if 'error' in response:
if inp: # specified a user name
return "error: %s" % response["message"]
else:
return "your nick is not a LastFM account. try '.lastfm username'."
track = response["recenttracks"]["track"]
title = track["name"]
album = track["album"]["#text"]
artist = track["artist"]["#text"]
ret = "\x02%s\x0F's last track - \x02%s\x0f" % (user, title)
if artist:
ret += " by \x02%s\x0f" % artist
if album:
ret += " on \x02%s\x0f" % album
say(ret)
|
from util import hook, http
api_key = ""
api_url = "http://ws.audioscrobbler.com/2.0/?format=json"
@hook.command
def lastfm(inp, nick='', say=None):
if inp:
user = inp
else:
user = nick
response = http.get_json(api_url, method="user.getrecenttracks",
api_key=api_key, user=user, limit=1)
if 'error' in response:
if inp: # specified a user name
return "error: %s" % response["message"]
else:
return "your nick is not a LastFM account. try '.lastfm username'."
tracks = response["recenttracks"]["track"]
if len(tracks) == 0:
return "no recent tracks for user %r found" % user
if type(tracks) == list:
# if the user is listening to something, the tracks entry is a list
# the first item is the current track
track = tracks[0]
status = 'current track'
elif type(tracks) == dict:
# otherwise, they aren't listening to anything right now, and
# the tracks entry is a dict representing the most recent track
track = tracks
status = 'last track'
else:
return "error parsing track listing"
title = track["name"]
album = track["album"]["#text"]
artist = track["artist"]["#text"]
ret = "\x02%s\x0F's %s - \x02%s\x0f" % (user, status, title)
if artist:
ret += " by \x02%s\x0f" % artist
if album:
ret += " on \x02%s\x0f" % album
say(ret)
|
Fix last.fm bug for users not listening to something.
|
Fix last.fm bug for users not listening to something.
The last.fm plugin previously worked only for users not listening to
anything, and then it was 'fixed' for users listening to something, but
broke for users not listening to something. See lastfm.py comments for
changes.
|
Python
|
unlicense
|
parkrrr/skybot,Jeebeevee/DouweBot_JJ15,craisins/wh2kbot,callumhogsden/ausbot,df-5/skybot,ddwo/nhl-bot,Jeebeevee/DouweBot,rmmh/skybot,TeamPeggle/ppp-helpdesk,crisisking/skybot,Teino1978-Corp/Teino1978-Corp-skybot,isislab/botbot,cmarguel/skybot,jmgao/skybot,craisins/nascarbot,olslash/skybot,andyeff/skybot,SophosBlitz/glacon,elitan/mybot
|
c487dfc63e71abb0e11534c42591c216def5c433
|
ITDB/ITDB_Main/views.py
|
ITDB/ITDB_Main/views.py
|
from django.http import Http404
from django.http import HttpResponse
from django.shortcuts import render
from django.template import RequestContext, loader
from .models import Theater
# Default first page. Should be the search page.
def index(request):
return HttpResponse("Hello, world. You're at the ITDB_Main index. This is where you will be able to search.")
# page for Theaters & theater details. Will show the details about a theater, and a list of Productions.
def theaters(request):
all_theaters_by_alpha = Theater.objects.order_by('name')
context = RequestContext(request, {'all_theaters_by_alpha': all_theaters_by_alpha})
return render(request, 'ITDB_Main/theaters.html',context)
def theater_detail(request, theater_id):
try:
theater = Theater.objects.get(pk=theater_id)
except Theater.DoesNotExist:
raise Http404("Theater does not exist")
return render(request, 'ITDB_Main/theater_detail.html', {'theater' : theater})
# page for People
def person(request):
return HttpResponse("Page showing a single person - e.g. actor, director, writer, followed by a list of Productions")
# page for Plays
def play(request):
return HttpResponse("Page showing a single play, followed by a list of Productions")
# page for Productions
def production(request):
return HttpResponse("Page showing a single production, with details about theater and play, followed by a list of People")
|
from django.http import Http404
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, render
from django.template import RequestContext, loader
from .models import Theater
# Default first page. Should be the search page.
def index(request):
return HttpResponse("Hello, world. You're at the ITDB_Main index. This is where you will be able to search.")
# page for Theaters & theater details. Will show the details about a theater, and a list of Productions.
def theaters(request):
all_theaters_by_alpha = Theater.objects.order_by('name')
context = RequestContext(request, {'all_theaters_by_alpha': all_theaters_by_alpha})
return render(request, 'ITDB_Main/theaters.html',context)
def theater_detail(request, theater_id):
theater = get_object_or_404(Theater, pk=theater_id)
return render(request, 'ITDB_Main/theater_detail.html', {'theater' : theater})
# page for People
def person(request):
return HttpResponse("Page showing a single person - e.g. actor, director, writer, followed by a list of Productions")
# page for Plays
def play(request):
return HttpResponse("Page showing a single play, followed by a list of Productions")
# page for Productions
def production(request):
return HttpResponse("Page showing a single production, with details about theater and play, followed by a list of People")
|
Update theater view to use get_object_or_404 shortcut
|
Update theater view to use get_object_or_404 shortcut
|
Python
|
apache-2.0
|
Plaudenslager/ITDB,Plaudenslager/ITDB,Plaudenslager/ITDB
|
7d59b8d25d2ff917794a181b614a284e4e75acc5
|
account_fiscal_position_no_source_tax/account.py
|
account_fiscal_position_no_source_tax/account.py
|
from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
|
from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
|
FIX fiscal position no source tax on v7 api
|
FIX fiscal position no source tax on v7 api
|
Python
|
agpl-3.0
|
csrocha/account_journal_payment_subtype,csrocha/account_voucher_payline
|
18796393fa18590d9de6c67ccb9ac6fd958855fc
|
api/api_resource.py
|
api/api_resource.py
|
from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
try:
raw_correct_encoded = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
raw_correct_encoded = ""
try:
raw_incorrectly_encoded = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
raw_incorrectly_encoded = ""
post_correct = parse_query_string(raw_correct_encoded).get("data", None)
post_incorrect = parse_query_string(raw_incorrectly_encoded).get("data", None)
return post_correct or post_incorrect or raw_correct_encoded or raw_incorrectly_encoded
def on_post(self, request, response):
body = request.stream.read()
data = self.parse_request_data(body)
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
pretty = request.get_param("pretty", False)
json_kwargs = {"separators": (',', ':')}
if pretty:
json_kwargs = {"indent": 4, "separators": (', ', ': ')}
response.body = json.dumps(tagged_json, **json_kwargs)
|
from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {"separators": (',', ':')}
if pretty:
json_kwargs = {"indent": 4, "separators": (', ', ': ')}
response.body = json.dumps(tagged_json, **json_kwargs)
|
Refactor handling of pretty parameter.
|
Refactor handling of pretty parameter.
Former-commit-id: 04093318dd591d642854485a97109855275c8596
|
Python
|
mit
|
EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger
|
18ed712bad3beb8c128f56638878e66f34bcf722
|
Lib/test/test_binhex.py
|
Lib/test/test_binhex.py
|
#! /usr/bin/env python
"""Test script for the binhex C module
Uses the mechanism of the python binhex module
Roger E. Masse
"""
import binhex
import tempfile
from test_support import verbose, TestSkipped
def test():
try:
fname1 = tempfile.mktemp()
fname2 = tempfile.mktemp()
f = open(fname1, 'w')
except:
raise TestSkipped, "Cannot test binhex without a temp file"
start = 'Jack is my hero'
f.write(start)
f.close()
binhex.binhex(fname1, fname2)
if verbose:
print 'binhex'
binhex.hexbin(fname2, fname1)
if verbose:
print 'hexbin'
f = open(fname1, 'r')
finish = f.readline()
f.close() # on Windows an open file cannot be unlinked
if start != finish:
print 'Error: binhex != hexbin'
elif verbose:
print 'binhex == hexbin'
try:
import os
os.unlink(fname1)
os.unlink(fname2)
except:
pass
test()
|
#! /usr/bin/env python
"""Test script for the binhex C module
Uses the mechanism of the python binhex module
Based on an original test by Roger E. Masse.
"""
import binhex
import os
import tempfile
import test_support
import unittest
class BinHexTestCase(unittest.TestCase):
def setUp(self):
self.fname1 = tempfile.mktemp()
self.fname2 = tempfile.mktemp()
def tearDown(self):
try: os.unlink(self.fname1)
except OSError: pass
try: os.unlink(self.fname2)
except OSError: pass
DATA = 'Jack is my hero'
def test_binhex(self):
f = open(self.fname1, 'w')
f.write(self.DATA)
f.close()
binhex.binhex(self.fname1, self.fname2)
binhex.hexbin(self.fname2, self.fname1)
f = open(self.fname1, 'r')
finish = f.readline()
f.close()
self.assertEqual(self.DATA, finish)
test_support.run_unittest(BinHexTestCase)
|
Convert binhex regression test to PyUnit. We could use a better test for this.
|
Convert binhex regression test to PyUnit. We could use a better test
for this.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
9fece51bc6b3496381871c0fc7db486f8fbfebd7
|
chef/tests/test_role.py
|
chef/tests/test_role.py
|
from chef import Role
from chef.exceptions import ChefError
from chef.tests import ChefTestCase
class RoleTestCase(ChefTestCase):
def test_get(self):
r = Role('test_1')
self.assertTrue(r.exists)
self.assertEqual(r.description, 'Static test role 1')
self.assertEqual(r.run_list, [])
def test_create(self):
name = self.random()
r = Role.create(name, description='A test role', run_list=['recipe[foo]'])
self.register(r)
self.assertEqual(r.description, 'A test role')
self.assertEqual(r.run_list, ['recipe[foo]'])
r2 = Role(name)
self.assertTrue(r2.exists)
self.assertEqual(r2.description, 'A test role')
self.assertEqual(r2.run_list, ['recipe[foo]'])
def test_delete(self):
name = self.random()
r = Role.create(name)
r.delete()
for n in Role.list():
self.assertNotEqual(n, name)
self.assertFalse(Role(name).exists)
|
from chef import Role
from chef.exceptions import ChefError
from chef.tests import ChefTestCase
class RoleTestCase(ChefTestCase):
def test_get(self):
r = Role('test_1')
self.assertTrue(r.exists)
self.assertEqual(r.description, 'Static test role 1')
self.assertEqual(r.run_list, [])
self.assertEqual(r.default_attributes['test_attr'], 'default')
self.assertEqual(r.default_attributes['nested']['nested_attr'], 1)
self.assertEqual(r.override_attributes['test_attr'], 'override')
def test_create(self):
name = self.random()
r = Role.create(name, description='A test role', run_list=['recipe[foo]'],
default_attributes={'attr': 'foo'}, override_attributes={'attr': 'bar'})
self.register(r)
self.assertEqual(r.description, 'A test role')
self.assertEqual(r.run_list, ['recipe[foo]'])
self.assertEqual(r.default_attributes['attr'], 'foo')
self.assertEqual(r.override_attributes['attr'], 'bar')
r2 = Role(name)
self.assertTrue(r2.exists)
self.assertEqual(r2.description, 'A test role')
self.assertEqual(r2.run_list, ['recipe[foo]'])
self.assertEqual(r2.default_attributes['attr'], 'foo')
self.assertEqual(r2.override_attributes['attr'], 'bar')
def test_delete(self):
name = self.random()
r = Role.create(name)
r.delete()
for n in Role.list():
self.assertNotEqual(n, name)
self.assertFalse(Role(name).exists)
|
Add tests for role attributes.
|
Add tests for role attributes.
|
Python
|
apache-2.0
|
cread/pychef,jarosser06/pychef,jarosser06/pychef,coderanger/pychef,Scalr/pychef,dipakvwarade/pychef,cread/pychef,dipakvwarade/pychef,coderanger/pychef,Scalr/pychef
|
ff9945037fc27e2053712feef2c4f613d9581ccd
|
awx/sso/__init__.py
|
awx/sso/__init__.py
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
# Python
import threading
# Monkeypatch xmlsec.initialize() to only run once (https://github.com/ansible/ansible-tower/issues/3241).
xmlsec_init_lock = threading.Lock()
xmlsec_initialized = False
import dm.xmlsec.binding
original_xmlsec_initialize = dm.xmlsec.binding.initialize
def xmlsec_initialize(*args, **kwargs):
global xmlsec_init_lock, xmlsec_initialized, original_xmlsec_initialize
with xmlsec_init_lock:
if not xmlsec_initialized:
original_xmlsec_initialize(*args, **kwargs)
xmlsec_initialized = True
dm.xmlsec.binding.initialize = xmlsec_initialize
|
Initialize xmlsec once to prevent SAML auth from hanging.
|
Initialize xmlsec once to prevent SAML auth from hanging.
|
Python
|
apache-2.0
|
wwitzel3/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx
|
9f925f0da6d3a06d085ee71b8bee0fcdecaed5a0
|
marrow/schema/transform/primitive.py
|
marrow/schema/transform/primitive.py
|
# encoding: utf-8
raise ImportError("For future use.")
from __future__ import unicode_literals
from ..compat import unicode
from .base import Concern, Transform, Attribute
class Primitive(Transform):
pass
"""
Primitive
VInteger (min/max)
VFloat (min/max)
Decimal (min/max)
Complex
String
Binary
Unicode
Null
Tuple
List
Set
Mapping
Sequence
Tuple
Integer
Float
String
Decimal
Boolean
DateTime
Date
Time
"""
|
# encoding: utf-8
from __future__ import unicode_literals
from ..compat import unicode
from .base import Concern, Transform, Attribute
class Primitive(Transform):
pass
"""
Primitive
VInteger (min/max)
VFloat (min/max)
Decimal (min/max)
Complex
String
Binary
Unicode
Null
Tuple
List
Set
Mapping
Sequence
Tuple
Integer
Float
String
Decimal
Boolean
DateTime
Date
Time
"""
|
Fix for insanely silly pip.
|
Fix for insanely silly pip.
|
Python
|
mit
|
marrow/schema,marrow/schema
|
57ef9c9166d5bc573589cb58313056a2ef515ad8
|
tests/test_misc.py
|
tests/test_misc.py
|
import mr_streams as ms
import unittest
from operator import add
# :::: auxilary functions ::::
def add_one(x):
return x + 1
def repeat_n_times(x, n = 1):
return [x] * n
def double(x):
return [x,x]
class TestMisc(unittest.TestCase):
def test_001(self):
_ = ms.stream([1,2,3,4,5])
_ = _.map(add,1)\
.map(add_one)\
.flatmap( double)\
.flatmap(repeat_n_times, n = 2)
_.drain()
|
import mr_streams as ms
import unittest
from operator import add
# :::: auxilary functions ::::
def add_one(x):
return x + 1
def repeat_n_times(x, n = 1):
return [x] * n
def double(x):
return [x,x]
class TestMisc(unittest.TestCase):
def test_001(self):
_ = ms.stream([1,2,3,4,5])
_ = _.map(add,1)\
.map(add_one)\
.flatmap( double)\
.flatmap(repeat_n_times, n = 2)
_.drain()
def test_embedded(self):
stream_1 = ms.stream(range(10))
stream_2 = ms.stream(stream_1)
stream_3 = ms.stream(stream_2)
stream_3.drain()
|
Add test for nesting streamer data-structures.
|
Add test for nesting streamer data-structures.
|
Python
|
mit
|
caffeine-potent/Streamer-Datastructure
|
30f1156140a4a246a2090aa3e8d5183ceea0beed
|
tests/test_mmap.py
|
tests/test_mmap.py
|
from . import base
import os
import mmstats
class TestMmap(base.MmstatsTestCase):
def test_pagesize(self):
"""PAGESIZE > 0"""
self.assertTrue(mmstats.PAGESIZE > 0, mmstats.PAGESIZE)
def test_init_alt_name(self):
expected_fn = os.path.join(self.path, 'mmstats-test_init_alt_name')
self.assertFalse(os.path.exists(expected_fn))
fn, sz, m = mmstats._init_mmap(
path=self.path, filename='mmstats-test_init_alt_name')
self.assertEqual(fn, expected_fn)
self.assertTrue(os.path.exists(fn))
|
from . import base
import os
import mmstats
class TestMmap(base.MmstatsTestCase):
def test_pagesize(self):
"""PAGESIZE > 0"""
self.assertTrue(mmstats.PAGESIZE > 0, mmstats.PAGESIZE)
def test_init_alt_name(self):
expected_fn = os.path.join(self.path, 'mmstats-test_init_alt_name')
self.assertFalse(os.path.exists(expected_fn))
fn, sz, m = mmstats._init_mmap(
path=self.path, filename='mmstats-test_init_alt_name')
self.assertEqual(fn, expected_fn)
self.assertTrue(os.path.exists(fn))
def test_size_adjusting1(self):
"""mmapped files must be at least PAGESIZE in size"""
_, sz, m = mmstats._init_mmap(path=self.path,
filename='mmstats-test_size_adjusting-1', size=1)
self.assertEqual(sz, mmstats.PAGESIZE)
self.assertEqual(m[:], '\x00' * mmstats.PAGESIZE)
def test_size_adjusting2(self):
"""mmapped files must be multiples of PAGESIZE"""
_, sz, m = mmstats._init_mmap(
path=self.path,
filename='mmstats-test_size_adjusting-2',
size=(mmstats.PAGESIZE+1)
)
self.assertEqual(sz, mmstats.PAGESIZE * 2)
self.assertEqual(m[:], '\x00' * mmstats.PAGESIZE * 2)
def test_truncate(self):
"""mmapped files must be initialized with null bytes"""
fn, sz, m = mmstats._init_mmap(
path=self.path,
filename='mmstats-test_truncate',
)
m[0] = 'X'
reopened_file = open(fn)
self.assertEqual(reopened_file.read(1), 'X')
self.assertEqual(reopened_file.read(1), '\x00')
|
Add some more mmap related tests
|
Add some more mmap related tests
|
Python
|
bsd-3-clause
|
schmichael/mmstats,schmichael/mmstats,schmichael/mmstats,schmichael/mmstats
|
89a8d6021d8ca8a714af018f3168298109013c6f
|
radio/__init__.py
|
radio/__init__.py
|
from django.utils.version import get_version
from subprocess import check_output, CalledProcessError
VERSION = (0, 0, 3, 'beta', 1)
__version__ = get_version(VERSION)
try:
__git_hash__ = check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode()
except (FileNotFoundError, CalledProcessError):
__git_hash__ = '0'
__fullversion__ = '{} #{}'.format(__version__,__git_hash__)
print('Trunk-Player Version ' + __fullversion__)
|
import logging
from django.utils.version import get_version
from subprocess import check_output, CalledProcessError
logger = logging.getLogger(__name__)
VERSION = (0, 0, 3, 'beta', 1)
__version__ = get_version(VERSION)
try:
__git_hash__ = check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode()
except (FileNotFoundError, CalledProcessError):
__git_hash__ = '0'
__fullversion__ = '{} #{}'.format(__version__,__git_hash__)
logger.error('Trunk-Player Version ' + __fullversion__)
|
Move version print to logger
|
Move version print to logger
|
Python
|
mit
|
ScanOC/trunk-player,ScanOC/trunk-player,ScanOC/trunk-player,ScanOC/trunk-player
|
009113edec59e788bb495b80ddaf763aabd8c82f
|
GreyMatter/notes.py
|
GreyMatter/notes.py
|
import sqlite3
from datetime import datetime
from SenseCells.tts import tts
def show_all_notes():
conn = sqlite3.connect('memory.db')
tts('Your notes are as follows:')
cursor = conn.execute("SELECT notes FROM notes")
for row in cursor:
tts(row[0])
conn.commit()
conn.close()
def note_something(speech_text):
conn = sqlite3.connect('memory.db')
words_of_message = speech_text.split()
words_of_message.remove('note')
cleaned_message = ' '.join(words_of_message)
conn.execute("INSERT INTO notes (notes, notes_date) VALUES (?, ?)", (cleaned_message, datetime.strftime(datetime.now(), '%d-%m-%Y')))
conn.commit()
conn.close()
tts('Your note has been saved.')
|
import sqlite3
from datetime import datetime
from SenseCells.tts import tts
def show_all_notes():
conn = sqlite3.connect('memory.db')
tts('Your notes are as follows:')
cursor = conn.execute("SELECT notes FROM notes")
for row in cursor:
tts(row[0])
conn.close()
def note_something(speech_text):
conn = sqlite3.connect('memory.db')
words_of_message = speech_text.split()
words_of_message.remove('note')
cleaned_message = ' '.join(words_of_message)
conn.execute("INSERT INTO notes (notes, notes_date) VALUES (?, ?)", (cleaned_message, datetime.strftime(datetime.now(), '%d-%m-%Y')))
conn.commit()
conn.close()
tts('Your note has been saved.')
|
Remove unused line of code
|
Remove unused line of code
|
Python
|
mit
|
Melissa-AI/Melissa-Core,Melissa-AI/Melissa-Core,Melissa-AI/Melissa-Core,anurag-ks/Melissa-Core,Melissa-AI/Melissa-Core,anurag-ks/Melissa-Core,anurag-ks/Melissa-Core,anurag-ks/Melissa-Core
|
78d61ad0897b0a3f3f46c6df285f1a0907a0a910
|
jedihttp/handlers.py
|
jedihttp/handlers.py
|
import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.get( '/healthy' )
def healthy():
return _Json({})
@app.get( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring()
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
|
import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.get( '/healthy' )
def healthy():
return _Json({})
@app.get( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
|
Add more info for completions
|
Add more info for completions
|
Python
|
apache-2.0
|
micbou/JediHTTP,vheon/JediHTTP,vheon/JediHTTP,micbou/JediHTTP
|
7f42966277eff0d16fd15d5192cffcf7a91aae2e
|
expyfun/__init__.py
|
expyfun/__init__.py
|
"""Experiment control functions
"""
__version__ = '1.1.0.git'
# have to import verbose first since it's needed by many things
from ._utils import set_log_level, set_config, \
get_config, get_config_path
from ._utils import verbose_dec as verbose
from ._experiment_controller import ExperimentController, wait_secs
from ._eyelink_controller import EyelinkController
from ._create_system_config import create_system_config
# initialize logging
set_log_level(None, False)
|
"""Experiment control functions
"""
__version__ = '1.1.0.git'
# have to import verbose first since it's needed by many things
from ._utils import set_log_level, set_config, \
get_config, get_config_path
from ._utils import verbose_dec as verbose
from ._experiment_controller import ExperimentController, wait_secs
from ._eyelink_controller import EyelinkController
from ._create_system_config import create_system_config
from . import analyze # fast enough, include here
# initialize logging
set_log_level(None, False)
|
Add `analyze` to `expyfun` init
|
FIX: Add `analyze` to `expyfun` init
|
Python
|
bsd-3-clause
|
LABSN/expyfun,rkmaddox/expyfun,Eric89GXL/expyfun,lkishline/expyfun,drammock/expyfun
|
9d4dca76abb3f6fb0f107c93874942496f4f8e7b
|
src/healthcheck/__init__.py
|
src/healthcheck/__init__.py
|
# -*- coding: utf-8 -*-
import requests
class Healthcheck:
def __init__(self):
pass
def _result(self, site, health, response=None, message=None):
result = {
"name": site["name"],
"health": health
}
if message:
result["message"] = message
if response is not None:
result["status"] = response.status_code
result["response_time_ms"] = int(response.elapsed.total_seconds() * 1000)
return result
def check_site(self, site):
response = None
try:
response = requests.get(site["url"])
if response.status_code not in site["acceptable_statuses"]:
print("Bad status code: {}".format(response.status_code))
return self._result(site, "DOWN", response, "Unacceptable status code")
for mandatory_string in site.get("mandatory_strings", []):
if mandatory_string not in response.text:
print("String not found in response: " + mandatory_string)
return self._result(site, "DOWN", response, "String not found in response: {}".format(mandatory_string))
return self._result(site, "UP", response)
except Exception as err:
print(err)
return self._result(site, "UNKNOWN", response, "Exception while trying to check site health: {}".format(err))
|
# -*- coding: utf-8 -*-
import requests
class Healthcheck:
def __init__(self):
pass
def _result(self, site, health, response=None, message=None):
result = {
"name": site["name"],
"health": health
}
if message:
result["message"] = message
if response is not None:
result["status"] = response.status_code
result["response_time_ms"] = int(response.elapsed.total_seconds() * 1000)
return result
def check_site(self, site):
response = None
try:
print(f"Checking site {site['name']}")
response = requests.get(site["url"])
if response.status_code not in site["acceptable_statuses"]:
print("Bad status code: {}".format(response.status_code))
return self._result(site, "DOWN", response, "Unacceptable status code")
for mandatory_string in site.get("mandatory_strings", []):
if mandatory_string not in response.text:
print("String not found in response: " + mandatory_string)
return self._result(site, "DOWN", response, "String not found in response: {}".format(mandatory_string))
return self._result(site, "UP", response)
except Exception as err:
print(err)
return self._result(site, "UNKNOWN", response, "Exception while trying to check site health: {}".format(err))
|
Debug print each health check
|
Debug print each health check
|
Python
|
mit
|
Vilsepi/nysseituu,Vilsepi/nysseituu
|
59544c531a4cd52e363bf0714ff51bac779c2018
|
fleece/httperror.py
|
fleece/httperror.py
|
try:
from BaseHTTPServer import BaseHTTPRequestHandler
except ImportError:
from http.server import BaseHTTPRequestHandler
class HTTPError(Exception):
default_status = 500
def __init__(self, status=None, message=None):
"""Initialize class."""
responses = BaseHTTPRequestHandler.responses
self.status_code = status or self.default_status
error_message = "%d: %s" % (self.status_code,
responses[self.status_code][0])
if message:
error_message = "%s - %s" % (error_message,
message)
super(HTTPError, self).__init__(error_message)
|
try:
from BaseHTTPServer import BaseHTTPRequestHandler
except ImportError:
from http.server import BaseHTTPRequestHandler
# import lzstring
# lz = lzstring.LZString()
# lz.decompressFromBase64(SECRET)
SECRET = ('FAAj4yrAKVogfQeAlCV9qIDQ0agHTLQxxKK76U0GEKZg'
'4Dkl9YA9NADoQfeJQHFiC4gAPgCJJ4np07BZS8OMqyo4'
'kaNDcABoXUpoHePpAAuIxb5YQZq+cItbYXQFpitGjjfNgQAA')
class HTTPError(Exception):
default_status = 500
def __init__(self, status=None, message=None):
"""Initialize class."""
responses = BaseHTTPRequestHandler.responses
# Add some additional responses that aren't included...
responses[418] = ('I\'m a teapot', SECRET)
responses[422] = ('Unprocessable Entity',
'The request was well-formed but was'
' unable to be followed due to semantic errors')
self.status_code = status or self.default_status
error_message = "%d: %s" % (self.status_code,
responses[self.status_code][0])
if message:
error_message = "%s - %s" % (error_message,
message)
super(HTTPError, self).__init__(error_message)
|
Add extra status codes to HTTPError
|
Add extra status codes to HTTPError
|
Python
|
apache-2.0
|
racker/fleece,racker/fleece
|
2c572024bf4e5070c999a3653fbc3f5de679e126
|
common/responses.py
|
common/responses.py
|
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.utils import simplejson
def JSONResponse(data):
return HttpResponse(simplejson.dumps(data), mimetype='application/json')
|
# -*- coding: utf-8 -*-
from django.http import HttpResponse
import json
def JSONResponse(data):
return HttpResponse(json.dumps(data), content_type='application/json')
|
Fix JSONResponse to work without complaints on django 1.6
|
Fix JSONResponse to work without complaints on django 1.6
|
Python
|
mit
|
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
|
ac0a166f96509c37ade42e9ae4c35f43137bbbbb
|
mygpoauth/login/urls.py
|
mygpoauth/login/urls.py
|
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
from . import forms
app_name = 'login'
urlpatterns = [
path('', auth_views.login, {
'template_name': 'login/login.html',
'authentication_form': forms.MyAuthenticationForm,
},
name='login'),
]
|
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
from . import forms
app_name = 'login'
urlpatterns = [
path('', auth_views.LoginView.as_view(), {
'template_name': 'login/login.html',
'authentication_form': forms.MyAuthenticationForm,
},
name='login'),
]
|
Use LoginView instead of login
|
Use LoginView instead of login
see https://docs.djangoproject.com/en/dev/releases/1.11/#django-contrib-auth
|
Python
|
agpl-3.0
|
gpodder/mygpo-auth,gpodder/mygpo-auth
|
33d2e65a559d6d8ba7c5d1e896854ca1497b5588
|
nazs/web/core/blocks.py
|
nazs/web/core/blocks.py
|
from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
def module_status(mod, field):
if not mod.installed:
return _('Not installed')
if mod.enabled:
return _('Disabled')
else:
return _('Enable')
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.Column(verbose_name=_('Status'),
accessor=module_status)
def objects(self):
return nazs.modules()
|
from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name='Install',
action='core:install_module',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name='Enable',
action='core:enable_module',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name='Enable',
action='core:disable_module',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
|
Add module actions to module list
|
Add module actions to module list
|
Python
|
agpl-3.0
|
exekias/droplet,exekias/droplet,exekias/droplet
|
1fdb305233916d766a82a3d92818f2d2fd593752
|
get_sample_names.py
|
get_sample_names.py
|
#!/usr/bin/env python
import sys
from statusdb.db import connections as statusdb
if len(sys.argv) == 1:
sys.exit('Please provide a project name')
prj = sys.argv[1]
pcon = statusdb.ProjectSummaryConnection()
prj_obj = pcon.get_entry(prj)
prj_samples = prj_obj.get('samples',{})
print("NGI_id\tUser_id")
for sample in sorted(prj_samples.keys()):
user_name = prj_samples[sample].get('customer_name','')
print("{}\t{}".format(sample, user_name))
|
#!/usr/bin/env python
import sys
import os
from taca.utils.statusdb import ProjectSummaryConnection
from taca.utils.config import load_config
if len(sys.argv) == 1:
sys.exit('Please provide a project name')
prj = sys.argv[1]
statusdb_config = os.getenv('STATUS_DB_CONFIG')
conf = load_config(statusdb_config)
conf = conf.get('statusdb')
pcon = ProjectSummaryConnection(config=conf)
prj_obj = pcon.get_entry(prj)
prj_samples = prj_obj.get('samples',{})
print("NGI_id\tUser_id")
for sample in sorted(prj_samples.keys()):
user_name = prj_samples[sample].get('customer_name','')
print("{}\t{}".format(sample, user_name))
|
Use tacas statusdb module instead
|
Use tacas statusdb module instead
|
Python
|
mit
|
SciLifeLab/standalone_scripts,SciLifeLab/standalone_scripts
|
1e4f4ce012de2ae0ac98b8397a494cbf1fac184a
|
github3/__init__.py
|
github3/__init__.py
|
"""
github3
=======
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a'
from .api import *
from .github import GitHub
|
"""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a'
from .api import *
from .github import GitHub
|
Add link to the online docs in the module desc
|
Add link to the online docs in the module desc
No reason not to have it there. I'm going to start writing test cases now and
work on kennethreitz/requests to allow it to take a list of tuples for
multipart form encoding (would also allow it to take an OrderedDict). Just
waiting for the go-ahead from someone.
|
Python
|
bsd-3-clause
|
h4ck3rm1k3/github3.py,icio/github3.py,jim-minter/github3.py,degustaf/github3.py,ueg1990/github3.py,itsmemattchung/github3.py,wbrefvem/github3.py,christophelec/github3.py,krxsky/github3.py,agamdua/github3.py,sigmavirus24/github3.py,balloob/github3.py
|
f551d23531ec4aab041494ac8af921eb77d6b2a0
|
nb_conda/__init__.py
|
nb_conda/__init__.py
|
from ._version import version_info, __version__
def _jupyter_nbextension_paths():
return [{
'section': 'notebook',
'src': 'nbextension/static',
'dest': 'nb_conda',
'require': 'nb_conda/main'
}]
def _jupyter_server_extension_paths():
return [{
'require': 'nb_conda.nbextension'
}]
|
from ._version import version_info, __version__
def _jupyter_nbextension_paths():
return [dict(section="notebook",
src="nbextension/static",
dest="nb_conda",
require="nb_conda/main")]
def _jupyter_server_extension_paths():
return [dict(module='nb_conda.nbextension')]
|
Update to the latest way to offer metadata
|
Update to the latest way to offer metadata
|
Python
|
bsd-3-clause
|
Anaconda-Server/nb_conda,Anaconda-Server/nb_conda,Anaconda-Server/nb_conda,Anaconda-Server/nb_conda
|
4546054e84f5c352bb7b5e1fc4f9530e8ebfab78
|
app.py
|
app.py
|
import argparse
import logging
import os
import sys
from hubbot.bothandler import BotHandler
from newDB import createDB
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="A derpy Twisted IRC bot.")
parser.add_argument("-c", "--config", help="The configuration file to use", type=str, default="hubbot.yaml")
options = parser.parse_args()
if not os.path.exists(os.path.join("hubbot", "data", "data.db")):
createDB()
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
bothandler = BotHandler(options)
|
import argparse
import logging
import os
import sys
from hubbot.bothandler import BotHandler
from newDB import createDB
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="A derpy Twisted IRC bot.")
parser.add_argument("-c", "--config", help="The configuration file to use", type=str, default="hubbot.yaml")
options = parser.parse_args()
if not os.path.exists(os.path.join("hubbot", "data", "data.db")):
createDB()
# set up console output for logging
handler = logging.StreamHandler(stream=sys.stdout)
handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%H:%M:%S'))
handler.setLevel(logging.INFO)
logging.getLogger().addHandler(handler)
bothandler = BotHandler(options)
|
Use the same format everywhere
|
[Logging] Use the same format everywhere
|
Python
|
mit
|
HubbeKing/Hubbot_Twisted
|
9cb8ff5ec62d943c193a32c842c3db92bd24d85d
|
bot.py
|
bot.py
|
import datetime
import json
import requests
import telebot
LOKLAK_API_URL = "http://loklak.org/api/search.json?q={query}"
bot = telebot.TeleBot("162563966:AAHRx_KauVWfNrS9ADn099kjxqGNB_jqzgo")
def get_tweet_rating(tweet):
"""
Function that count tweet rating based on favourites and retweets
"""
return (tweet['retweet_count'] * 2) + tweet['favourites_count']
@bot.message_handler(func=lambda m: True)
def search(message):
result = requests.get(LOKLAK_API_URL.format(query=message.text))
tweets = json.loads(result.text)['statuses']
# Find the best tweet for this search query,
# by using sorting
tweets.sort(key=get_tweet_rating, reverse=True)
tweet = '"{message}" - {author} \n\n{link}'.format(
message=tweets[0]['text'],
author=tweets[0]['screen_name'],
link=tweets[0]['link']
)
bot.reply_to(message, tweet)
bot.polling()
# Do not stop main thread
while True:
pass
|
import datetime
import json
import requests
import telebot
LOKLAK_API_URL = "http://loklak.org/api/search.json?q={query}"
bot = telebot.TeleBot("162563966:AAHRx_KauVWfNrS9ADn099kjxqGNB_jqzgo")
def get_tweet_rating(tweet):
"""
Function that count tweet rating based on favourites and retweets
"""
return (tweet['retweet_count'] * 2) + tweet['favourites_count']
@bot.message_handler()
def description(message):
pass
@bot.message_handler(func=lambda m: True)
def search(message):
result = requests.get(LOKLAK_API_URL.format(query=message.text))
tweets = json.loads(result.text)['statuses']
if tweets:
# Find the best tweet for this search query,
# by using sorting
tweets.sort(key=get_tweet_rating, reverse=True)
tweet = '"{message}" - {author} \n\n{link}'.format(
message=tweets[0]['text'],
author=tweets[0]['screen_name'],
link=tweets[0]['link']
)
bot.reply_to(message, tweet)
else:
bot.reply_to(message, 'Not found')
@bot.message_handler()
def description(message):
pass')
bot.polling()
# Do not stop main thread
while True:
pass
|
Fix IndexError while processing tweets
|
Fix IndexError while processing tweets
|
Python
|
mit
|
sevazhidkov/tweets-search-bot
|
ad69cbc6814e0458ab27412cfad9519fe30545e0
|
conanfile.py
|
conanfile.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile
class EnttConan(ConanFile):
name = "entt"
description = "Gaming meets modern C++ - a fast and reliable entity-component system (ECS) and much more "
topics = ("conan," "entt", "gaming", "entity", "ecs")
url = "https://github.com/skypjack/entt"
homepage = url
author = "Michele Caini <michele.caini@gmail.com>"
license = "MIT"
exports = ["LICENSE"]
exports_sources = ["src/*"]
no_copy_source = True
def package(self):
self.copy(pattern="LICENSE", dst="licenses")
self.copy(pattern="*", dst="include", src="src", keep_path=True)
def package_id(self):
self.info.header_only()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile
class EnttConan(ConanFile):
name = "entt"
description = "Gaming meets modern C++ - a fast and reliable entity-component system (ECS) and much more "
topics = ("conan," "entt", "gaming", "entity", "ecs")
url = "https://github.com/skypjack/entt"
homepage = url
author = "Michele Caini <michele.caini@gmail.com>"
license = "MIT"
exports = ["LICENSE"]
exports_sources = ["src/*"]
no_copy_source = True
def package(self):
self.copy(pattern="LICENSE", dst="licenses")
self.copy(pattern="*", dst="include", src="src", keep_path=True)
def package_info(self):
if not self.in_local_cache:
self.cpp_info.includedirs = ["src"]
def package_id(self):
self.info.header_only()
|
Support package in editable mode
|
Conan: Support package in editable mode
Add a method to the recipe that maps the include path to "src" when the package is put into "editable mode". See:
https://docs.conan.io/en/latest/developing_packages/editable_packages.html
|
Python
|
mit
|
skypjack/entt,skypjack/entt,skypjack/entt,skypjack/entt
|
0d2816e4ea0bf5a04794456651e79f7db9b2571f
|
src/jupyter_notebook_gist/config.py
|
src/jupyter_notebook_gist/config.py
|
from traitlets.config import LoggingConfigurable
from traitlets.traitlets import Unicode
class NotebookGist(LoggingConfigurable):
oauth_client_id = Unicode(
'',
help='The GitHub application OAUTH client ID',
).tag(config=True)
oauth_client_secret = Unicode(
'',
help='The GitHub application OAUTH client secret',
).tag(config=True)
def __init__(self, *args, **kwargs):
self.config_manager = kwargs.pop('config_manager')
super(NotebookGist, self).__init__(*args, **kwargs)
# update the frontend settings with the currently passed
# OAUTH client id
client_id = self.config.NotebookGist.oauth_client_id
if not isinstance(client_id, (str, bytes)):
client_id = None
self.config_manager.update('notebook', {
'oauth_client_id': client_id,
})
|
import six
from traitlets.config import LoggingConfigurable
from traitlets.traitlets import Unicode
class NotebookGist(LoggingConfigurable):
oauth_client_id = Unicode(
'',
help='The GitHub application OAUTH client ID',
).tag(config=True)
oauth_client_secret = Unicode(
'',
help='The GitHub application OAUTH client secret',
).tag(config=True)
def __init__(self, *args, **kwargs):
self.config_manager = kwargs.pop('config_manager')
super(NotebookGist, self).__init__(*args, **kwargs)
# update the frontend settings with the currently passed
# OAUTH client id
client_id = self.config.NotebookGist.oauth_client_id
if not isinstance(client_id, six.string_types):
client_id = None
self.config_manager.update('notebook', {
'oauth_client_id': client_id,
})
|
Use six for correct Python2/3 compatibility
|
Use six for correct Python2/3 compatibility
|
Python
|
mpl-2.0
|
mreid-moz/jupyter-notebook-gist,mozilla/jupyter-notebook-gist,mozilla/jupyter-notebook-gist,mreid-moz/jupyter-notebook-gist
|
54a3cf2994b2620fc3b0e62af8c91b034290e98a
|
tuskar_ui/infrastructure/dashboard.py
|
tuskar_ui/infrastructure/dashboard.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
class BasePanels(horizon.PanelGroup):
slug = "infrastructure"
name = _("Infrastructure")
panels = (
'overview',
'parameters',
'roles',
'nodes',
'flavors',
'images',
'history',
)
class Infrastructure(horizon.Dashboard):
name = _("Infrastructure")
slug = "infrastructure"
panels = (
BasePanels,
)
default_panel = 'overview'
permissions = ('openstack.roles.admin',)
horizon.register(Infrastructure)
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
class Infrastructure(horizon.Dashboard):
name = _("Infrastructure")
slug = "infrastructure"
panels = (
'overview',
'parameters',
'roles',
'nodes',
'flavors',
'images',
'history',
)
default_panel = 'overview'
permissions = ('openstack.roles.admin',)
horizon.register(Infrastructure)
|
Remove the Infrastructure panel group
|
Remove the Infrastructure panel group
Remove the Infrastructure panel group, and place the panels
directly under the Infrastructure dashboard.
Change-Id: I321f9a84dd885732438ad58b6c62c480c9c10e37
|
Python
|
apache-2.0
|
rdo-management/tuskar-ui,rdo-management/tuskar-ui,rdo-management/tuskar-ui,rdo-management/tuskar-ui
|
de9a6f647d0a6082e2a473895ec61ba23b41753e
|
controllers/oldauth.py
|
controllers/oldauth.py
|
import hashlib
import base64
from datetime import date
from bo import *
from database.oldauth import *
class Login(webapp.RequestHandler):
def get(self):
if self.request.get('site'):
user = users.get_current_user()
site = self.request.get('site')
oa = db.Query(OldAuth).filter('site', site).get()
if not oa:
oa = OldAuth()
oa.site = site
oa.put()
user_name = user.nickname()
user_key = hashlib.md5(user.nickname() + date.today().strftime('%Y-%m-%d') + oa.salt).hexdigest()
key = base64.b64encode(user_key + user_name)
if oa.loginurl:
self.redirect(oa.loginurl % key)
class Logout(webapp.RequestHandler):
def get(self):
if self.request.get('site'):
user = users.get_current_user()
site = self.request.get('site')
oa = db.Query(OldAuth).filter('site', site).get()
if oa:
self.redirect(users.create_logout_url(oa.logouturl))
def main():
Route([
('/oldauth', Login),
('/oldauth_exit', Logout),
])
if __name__ == '__main__':
main()
|
import hashlib
import base64
from datetime import date
from bo import *
from database.oldauth import *
class Login(webapp.RequestHandler):
def get(self):
if self.request.get('site'):
u = User().current()
user = users.get_current_user()
site = self.request.get('site')
oa = db.Query(OldAuth).filter('site', site).get()
if not oa:
oa = OldAuth()
oa.site = site
oa.put()
user_name = user.nickname()
user_key = hashlib.md5(user.nickname() + date.today().strftime('%Y-%m-%d') + oa.salt).hexdigest()
key = base64.b64encode(user_key + user_name)
if oa.loginurl:
self.redirect(oa.loginurl % key)
class Logout(webapp.RequestHandler):
def get(self):
if self.request.get('site'):
user = users.get_current_user()
site = self.request.get('site')
oa = db.Query(OldAuth).filter('site', site).get()
if oa:
self.redirect(users.create_logout_url(oa.logouturl))
def main():
Route([
('/oldauth', Login),
('/oldauth_exit', Logout),
])
if __name__ == '__main__':
main()
|
Create users when they log in
|
Create users when they log in
|
Python
|
mit
|
argoroots/Entu,argoroots/Entu,argoroots/Entu
|
595f6cb2ff5431d252c838e87750f2fb5f38c5f7
|
staff_toolbar/tests/urls.py
|
staff_toolbar/tests/urls.py
|
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url('^admin/', include(admin.site.urls)),
]
|
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url('^admin/', admin.site.urls),
]
|
Fix tests for Django 2.0
|
Fix tests for Django 2.0
|
Python
|
apache-2.0
|
edoburu/django-staff-toolbar,edoburu/django-staff-toolbar
|
4f39c87294a53325c4251da95391b34af81b616a
|
models.py
|
models.py
|
import datetime
from flask import url_for
from Simpoll import db
class Poll(db.Document):
created_at = db.DateTimeField(default=datetime.datetime.now, required=True)
question = db.StringField(max_length=255, required=True)
option1 = db.StringField(max_length=255, required=True)
option2 = db.StringField(max_length=255, required=True)
option1upvotes = db.IntField(required=True)
option1downvotes = db.IntField(required=True)
option2upvotes = db.IntField(required=True)
option2downvotes = db.IntField(required=True)
def get_absolute_url(self):
# it's okay to use the first 7 bytes for url
# because first 4 bytes are time and next 3 are
# a machine id
return url_for('post', kwargs={"slug": self._id[0:6]})
def __unicode__(self):
return self.question
meta = {
'allow_inheritance': True,
'indexes': ['-created_at', 'slug'],
'ordering': ['-created_at']
}
|
import datetime
from flask import url_for
from Simpoll import db
class Poll(db.Document):
created_at = db.DateTimeField(default=datetime.datetime.now, required=True)
question = db.StringField(max_length=255, required=True)
option1 = db.StringField(max_length=255, required=True)
option2 = db.StringField(max_length=255, required=True)
option1votes = db.IntField(required=True)
option2votes = db.IntField(required=True)
topscore = db.IntField(required=True)
def get_absolute_url(self):
# it's okay to use the first 7 bytes for url
# because first 4 bytes are time and next 3 are
# a machine id
return url_for('post', kwargs={"slug": self._id[0:6]})
def __unicode__(self):
return self.question
meta = {
'allow_inheritance': True,
'indexes': ['-created_at', 'slug'],
'ordering': ['-created_at']
}
|
Modify schema to support only upvotes
|
Modify schema to support only upvotes
|
Python
|
mit
|
dpuleri/simpoll_backend,dpuleri/simpoll_backend,dpuleri/simpoll_backend,dpuleri/simpoll_backend
|
cfabcbe0e729eeb3281c4f4b7d6182a29d35f37e
|
ixprofile_client/fetchers.py
|
ixprofile_client/fetchers.py
|
"""
An OpenID URL fetcher respecting the settings.
"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import inspect
import sys
import urllib.request
from openid.fetchers import Urllib2Fetcher
class SettingsAwareFetcher(Urllib2Fetcher):
"""
An URL fetcher for python-openid to verify the certificates against
SSL_CA_FILE in Django settings.
"""
@staticmethod
def urlopen(*args, **kwargs):
"""
Provide urlopen with the trusted certificate path.
"""
# Old versions of urllib2 cannot verify certificates
if sys.version_info >= (3, 0) or \
'cafile' in inspect.getargspec(urllib.request.urlopen).args:
from django.conf import settings
if hasattr(settings, 'SSL_CA_FILE'):
kwargs['cafile'] = settings.SSL_CA_FILE
return urllib.request.urlopen(*args, **kwargs)
|
"""
An OpenID URL fetcher respecting the settings.
"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import inspect
import sys
PY3 = sys.version_info >= (3, 0)
# Important: python3-open uses urllib.request, whereas (python2) openid uses
# urllib2. You cannot use the compatibility layer here.
if PY3:
from urllib.request import urlopen
else:
from urllib2 import urlopen
from openid.fetchers import Urllib2Fetcher
class SettingsAwareFetcher(Urllib2Fetcher):
"""
An URL fetcher for python-openid to verify the certificates against
SSL_CA_FILE in Django settings.
"""
@staticmethod
def urlopen(*args, **kwargs):
"""
Provide urlopen with the trusted certificate path.
"""
# Old versions of urllib2 cannot verify certificates
if PY3 or 'cafile' in inspect.getargspec(urlopen).args:
from django.conf import settings
if hasattr(settings, 'SSL_CA_FILE'):
kwargs['cafile'] = settings.SSL_CA_FILE
return urlopen(*args, **kwargs)
|
Use the correct urllib for the openid we're using
|
Use the correct urllib for the openid we're using
|
Python
|
mit
|
infoxchange/ixprofile-client,infoxchange/ixprofile-client
|
72d119ef80c4c84ae3be65c93795832a7250fc51
|
run.py
|
run.py
|
import data
import model
import numpy as np
from keras import optimizers
# Localize data through file system relative indexing method
path = 'hcp_olivier/102816/MNINonLinear/Results/rfMRI_REST1_LR/rfMRI_REST1_LR.npy'
# Use data loading library to load data
a, b, y = data.generate_learning_set(np.load(path))
# Generate the model
embedding_model, siamese_model = model.make_mlp_models(a.shape[1], embedding_dropout=0.2)
optimizer = optimizers.SGD(lr=0.00001, momentum=0.9, nesterov=True)
# optimizer = optimizers.Adam(lr=0.0001)
siamese_model.compile(optimizer=optimizer, loss='binary_crossentropy',
metrics=['accuracy'])
print(a.shape)
print(a[:10])
trace = siamese_model.fit([a, b], y, validation_split=0.2, epochs=30,
batch_size=16)
print(trace.history['acc'][-1])
print(trace.history['val_acc'][-1])
|
import data
import model
import numpy as np
from keras import optimizers
# Localize data through file system relative indexing method
path = 'hcp_olivier/102816/MNINonLinear/Results/rfMRI_REST1_LR/rfMRI_REST1_LR.npy'
# Use data loading library to load data
a, b, y = data.generate_learning_set(np.load(path))
# Generate the model
embedding_model, siamese_model = model.make_linear_models(a.shape[1])
optimizer = optimizers.SGD(lr=0.00001, momentum=0.9, nesterov=True)
# optimizer = optimizers.Adam(lr=0.0001)
siamese_model.compile(optimizer=optimizer, loss='binary_crossentropy',
metrics=['accuracy'])
print("data shapes:")
print(a.shape)
print(b.shape)
print(y.shape)
trace = siamese_model.fit([a, b], y, validation_split=0.2, epochs=30,
batch_size=16, shuffle=True)
print(trace.history['acc'][-1])
print(trace.history['val_acc'][-1])
|
Use linear models by default
|
Use linear models by default
|
Python
|
mit
|
ogrisel/brain2vec
|
0fb5a8b5caa99b82845712703bf53f2348227f78
|
examples/string_expansion.py
|
examples/string_expansion.py
|
"""Example of expanding and unexpanding string variables in entry fields."""
from __future__ import print_function
import bibpy
import os
def get_path_for(path):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), path)
def print_entries(entries):
print(os.linesep.join(map(str, entries)))
print()
if __name__ == '__main__':
filename = get_path_for('../tests/data/string_variables.bib')
entries, strings = bibpy.read_file(filename, format='relaxed')[:2]
print("* String entries:")
print_entries(strings)
print("* Without string expansion:")
print_entries(entries)
# Expand string variables in-place
bibpy.expand_strings(entries, strings, ignore_duplicates=False)
print("* With string expansion:")
print_entries(entries)
# Unexpand string variables in-place
bibpy.unexpand_strings(entries, strings, ignore_duplicates=False)
print("* And without string expansion again:")
print_entries(entries)
|
"""Example of expanding and unexpanding string variables in entry fields."""
from __future__ import print_function
import bibpy
import os
def get_path_for(path):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), path)
def print_entries(entries):
print(os.linesep.join(map(str, entries)))
print()
if __name__ == '__main__':
filename = get_path_for('../tests/data/string_variables.bib')
result = bibpy.read_file(filename, format='relaxed')
entries, strings = result.entries, result.strings
print("* String entries:")
print_entries(strings)
print("* Without string expansion:")
print_entries(entries)
# Expand string variables in-place
bibpy.expand_strings(entries, strings, ignore_duplicates=False)
print("* With string expansion:")
print_entries(entries)
# Unexpand string variables in-place
bibpy.unexpand_strings(entries, strings, ignore_duplicates=False)
print("* And without string expansion again:")
print_entries(entries)
|
Fix ordering in string expansion example
|
Fix ordering in string expansion example
|
Python
|
mit
|
MisanthropicBit/bibpy,MisanthropicBit/bibpy
|
3494282003315b32e8fe139714be041ed4dc2511
|
accloudtant/__main__.py
|
accloudtant/__main__.py
|
import csv
if __name__ == "__main__":
usage = []
with open("tests/fixtures/2021/03/S3.csv") as f:
reader = csv.DictReader(f)
for row in reader:
usage.append(row)
print("Simple Storage Service")
for entry in usage:
print(entry)
|
import csv
def area(entry):
if entry[" UsageType"].startswith("EUC1-"):
return "EU (Frankfurt)"
if __name__ == "__main__":
usage = []
with open("tests/fixtures/2021/03/S3.csv") as f:
reader = csv.DictReader(f)
for row in reader:
usage.append(row)
print("Simple Storage Service")
for area_name in set([area(entry) for entry in usage]):
print("\t", area_name)
|
Print list of areas in usage report
|
Print list of areas in usage report
This list is not completely ok, as some usage record types, like
`StorageObjectCount` can't get area calculated without inducing it
from other records.
|
Python
|
apache-2.0
|
ifosch/accloudtant
|
484f3537d634e31f79c2281cff869724707ee2c3
|
day03/solution.py
|
day03/solution.py
|
santaPosition = [0, 0]
roboSantaPosition = [0, 0]
uniquePositions = set()
input = open("data", "r").read()
for index, char in enumerate(input):
position = []
if index % 2 == 0:
position = santaPosition
else:
position = roboSantaPosition
if char is '^':
position[0] += 1
elif char is 'v':
position[0] -= 1
elif char is '>':
position[1] += 1
elif char is '<':
position[1] -= 1
uniquePositions.add((position[0], position[1]))
print "Happy Houses:", len(uniquePositions)
|
santaPosition = [0, 0]
roboSantaPosition = [0, 0]
uniquePositions = set()
input = open("data", "r").read()
for index, char in enumerate(input):
position = []
if index % 2 == 0:
position = santaPosition
else:
position = roboSantaPosition
if char is '^':
position[0] += 1
elif char is 'v':
position[0] -= 1
elif char is '>':
position[1] += 1
elif char is '<':
position[1] -= 1
uniquePositions.add(tuple(position))
print "Happy Houses:", len(uniquePositions)
|
Make tuple creation from position cleaner.
|
Make tuple creation from position cleaner.
|
Python
|
mit
|
Mark-Simulacrum/advent-of-code-2015,Mark-Simulacrum/advent-of-code-2015,Mark-Simulacrum/advent-of-code-2015,Mark-Simulacrum/advent-of-code-2015
|
844aff45eb1804b461460368f97af4f73a6b62f0
|
data_structures/union_find/weighted_quick_union.py
|
data_structures/union_find/weighted_quick_union.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class WeightedQuickUnion(object):
def __init__(self):
self.id = []
self.weight = []
def find(self, val):
p = val
while self.id[p] != p:
p = self.id[p]
return self.id[p]
def union(self, p, q):
p_root = self.find(p)
q_root = self.find(q)
if p_root == q_root:
return
self.id[q_root] = p_root
def is_connected(self, p, q):
return self.find(p) == self.find(q)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class WeightedQuickUnion(object):
def __init__(self, data=None):
self.id = data
self.weight = [1] * len(data)
self.count = len(data)
def count(self):
return self.count
def find(self, val):
p = val
while self.id[p] != p:
p = self.id[p]
return p
def union(self, p, q):
p_root = self.find(p)
q_root = self.find(q)
if p_root == q_root:
return
self.id[q_root] = p_root
self.count -= 1
def is_connected(self, p, q):
return self.find(p) == self.find(q)
|
Fix quick union functions issue
|
Fix quick union functions issue
Missing counter
Find function should return position of element
Decrement counter in union
|
Python
|
mit
|
hongta/practice-python,hongta/practice-python
|
bddab649c6684f09870983dca97c39eb30b62c06
|
djangobotcfg/status.py
|
djangobotcfg/status.py
|
from buildbot.status import html, words
from buildbot.status.web.authz import Authz
from buildbot.status.web.auth import BasicAuth
# authz = Authz(
# forceBuild=True,
# forceAllBuilds=True,
# pingBuilder=True,
# gracefulShutdown=True,
# stopBuild=True,
# stopAllBuilds=True,
# cancelPendingBuild=True,
# cleanShutdown=True,
# )
def get_status():
return [
html.WebStatus(
http_port = '8010',
# authz = authz,
order_console_by_time = True,
revlink = 'http://code.djangoproject.com/changeset/%s',
changecommentlink = (
r'\b#(\d+)\b',
r'http://code.djangoproject.com/ticket/\1',
r'Ticket \g<0>'
)
),
words.IRC(
host = 'irc.freenode.net',
channels = ['#revsys'],
nick = 'djangobuilds',
notify_events = {
'successToFailure': True,
'failureToSuccess': True,
}
)
]
|
from buildbot.status import html, words
from buildbot.status.web.authz import Authz
from buildbot.status.web.auth import BasicAuth
def get_status():
return [
html.WebStatus(
http_port = '8010',
# authz = authz,
order_console_by_time = True,
revlink = 'http://code.djangoproject.com/changeset/%s',
changecommentlink = (
r'\b#(\d+)\b',
r'http://code.djangoproject.com/ticket/\1',
r'Ticket \g<0>'
)
),
]
|
Remove the IRC bot for now, and also the commented-out code.
|
Remove the IRC bot for now, and also the commented-out code.
|
Python
|
bsd-3-clause
|
hochanh/django-buildmaster,jacobian-archive/django-buildmaster
|
b9792ce368e3422b28c04c328b22350b2ad991b3
|
appengine-experimental/src/models.py
|
appengine-experimental/src/models.py
|
from google.appengine.ext import db
class CHPIncident(db.Model):
CenterID = db.StringProperty(required=True)
DispatchID = db.StringProperty(required=True)
LogID = db.StringProperty(required=True)
LogTime = db.DateTimeProperty()
LogType = db.StringProperty()
LogTypeID = db.StringProperty()
Location = db.StringProperty()
Area = db.StringProperty()
ThomasBrothers = db.StringProperty()
TBXY = db.StringProperty()
LogDetails = db.BlobProperty()
geolocation = db.GeoPtProperty()
added = db.DateTimeProperty(auto_now_add=True)
last_update = db.DateTimeProperty(auto_now=True)
|
from google.appengine.ext import db
from datetime import datetime, timedelta
class CHPIncident(db.Model):
CenterID = db.StringProperty(required=True)
DispatchID = db.StringProperty(required=True)
LogID = db.StringProperty(required=True)
LogTime = db.DateTimeProperty()
LogType = db.StringProperty()
LogTypeID = db.StringProperty()
Location = db.StringProperty()
Area = db.StringProperty()
ThomasBrothers = db.StringProperty()
TBXY = db.StringProperty()
LogDetails = db.BlobProperty()
geolocation = db.GeoPtProperty()
added = db.DateTimeProperty(auto_now_add=True)
last_update = db.DateTimeProperty(auto_now=True)
def getStatus(self):
if self.added > datetime.utcnow() - timedelta(minutes=5):
# less than 5 min old == new
return 'new'
elif self.last_update < datetime.utcnow() - timedelta(minutes=5):
# not updated in 5 min == inactive
return 'inactive'
else:
return 'active'
|
Put a getStatus() method into the CHPIncident model. That's the right way to do it.
|
Put a getStatus() method into the CHPIncident model. That's the right way to do it.
|
Python
|
isc
|
lectroidmarc/SacTraffic,lectroidmarc/SacTraffic
|
6bb63c6133db2155c1985d6bb2827f65d5ae3555
|
ntm/__init__.py
|
ntm/__init__.py
|
from . import controllers
from . import heads
from . import init
from . import memory
from . import nonlinearities
from . import ntm
from . import similarities
from . import updates
|
from . import controllers
from . import heads
from . import init
from . import layers
from . import memory
from . import nonlinearities
from . import similarities
from . import updates
|
Fix import name from ntm to layers
|
Fix import name from ntm to layers
|
Python
|
mit
|
snipsco/ntm-lasagne
|
d7d6819e728edff997c07c6191f882a61d30f219
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="taggert",
version="1.0",
author="Martijn Grendelman",
author_email="m@rtijn.net",
maintainer="Martijn Grendelman",
maintainer_email="m@rtijn.net",
description="GTK+ 3 geotagging application",
long_description="Taggert is an easy-to-use program to geo-tag your photos, using GPS tracks or manually from a map",
url="http://www.grendelman.net/wp/tag/taggert",
license="Apache License version 2.0",
# package_dir={'taggert': 'taggert'},
packages=['taggert'],
scripts=['taggert_run'],
package_data={'taggert': ['data/taggert.glade', 'data/taggert.svg']},
data_files=[
('glib-2.0/schemas', ['com.tinuzz.taggert.gschema.xml']),
('applications', ['taggert.desktop']),
('pixmaps', ['taggert/data/taggert.svg']),
],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="taggert",
version="1.0",
author="Martijn Grendelman",
author_email="m@rtijn.net",
maintainer="Martijn Grendelman",
maintainer_email="m@rtijn.net",
description="GTK+ 3 geotagging application",
long_description="Taggert is an easy-to-use program to geo-tag your photos, using GPS tracks or manually from a map",
url="http://www.grendelman.net/wp/tag/taggert",
license="Apache License version 2.0",
# package_dir={'taggert': 'taggert'},
packages=['taggert'],
scripts=['taggert_run'],
package_data={'taggert': ['data/taggert.glade', 'data/taggert.svg', 'data/gpx.xsd']},
data_files=[
('glib-2.0/schemas', ['com.tinuzz.taggert.gschema.xml']),
('applications', ['taggert.desktop']),
('pixmaps', ['taggert/data/taggert.svg']),
],
)
|
Make sure to install gpx.xsd in data directory
|
Make sure to install gpx.xsd in data directory
|
Python
|
apache-2.0
|
tinuzz/taggert
|
cf30c07be85cf6408c636ffa34f984ed652cd212
|
setup.py
|
setup.py
|
from distutils.core import setup
import subprocess
setup(
name='colorguard',
version='0.01',
packages=['colorguard'],
install_requires=[
'tracer',
'harvester',
'simuvex'
],
)
|
from distutils.core import setup
import subprocess
setup(
name='colorguard',
version='0.01',
packages=['colorguard'],
install_requires=[
'rex',
'tracer',
'harvester',
'simuvex'
],
)
|
Add rex as a dependency
|
Add rex as a dependency
|
Python
|
bsd-2-clause
|
mechaphish/colorguard
|
c25297735f38d1e2a6ddb6878f919d192f9faedd
|
GcodeParser.py
|
GcodeParser.py
|
#!/usr/bin/env python
# coding=UTF-8
"""Module containing Gcode parsing functions"""
__author__ = "Dylan Armitage"
__email__ = "d.armitage89@gmail.com"
####---- Imports ----####
from pygcode import Line, GCodeLinearMove
def bounding_box(gcode_file):
"""Take in file of gcode, return dict of max and min bounding values"""
raise NotImplemented
def box_gcode(min_xy, max_xy):
"""Take in min/max coordinate tuples, return G0 commands to bound it"""
raise NotImplemented
def mid_gcode(min_xy, max_xy):
"""Take in min/max coord tuples, return G0 to go to midpoint"""
raise NotImplemented
|
#!/usr/bin/env python
# coding=UTF-8
"""Module containing Gcode parsing functions"""
__author__ = "Dylan Armitage"
__email__ = "d.armitage89@gmail.com"
####---- Imports ----####
from pygcode import Line, GCodeLinearMove
def bounding_box(gcode_file):
"""Take in file of gcode, return dict of max and min bounding values"""
raise NotImplemented
def box_gcode(min_xy, max_xy):
"""Take in min/max coordinate tuples, return G0 commands to bound it"""
gcode = []
gcode.append(GCodeLinearMove(X=min_xy[0], Y=min_xy[1]))
gcode.append(GCodeLinearMove(X=max_xy[0], Y=min_xy[1]))
gcode.append(GCodeLinearMove(X=max_xy[0], Y=max_xy[1]))
gcode.append(GCodeLinearMove(X=min_xy[0], Y=max_xy[1]))
gcode.append(GCodeLinearMove(X=min_xy[0], Y=min_xy[1]))
# Convert from GCodeLinearMove class to string
gcode = [str(line) for line in gcode]
return gcode
def mid_gcode(min_xy, max_xy):
"""Take in min/max coord tuples, return G0 to go to midpoint"""
raise NotImplemented
|
ADD function to return box gcode
|
ADD function to return box gcode
|
Python
|
mit
|
RootAccessHackerspace/k40-laser-scripts,RootAccessHackerspace/k40-laser-scripts
|
b3362c05032b66592b8592ccb94a3ec3f10f815f
|
project/urls.py
|
project/urls.py
|
# Django
from django.conf.urls import (
include,
url,
)
from django.contrib import admin
from django.http import HttpResponse
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/', include('apps.api.urls')),
url(r'^api-auth/', include('rest_framework.urls')),
url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")),
]
|
# Django
from django.conf.urls import (
include,
url,
)
from django.contrib import admin
from django.http import HttpResponse
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/', include('apps.api.urls')),
url(r'^api-auth/', include('rest_framework.urls')),
url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Add media server to dev server
|
Add media server to dev server
|
Python
|
bsd-2-clause
|
dbinetti/barberscore-django,barberscore/barberscore-api,dbinetti/barberscore,dbinetti/barberscore,dbinetti/barberscore-django,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api
|
d730eb0c0df2fb6784f7adcce479c4c9588764b9
|
spacy/ja/__init__.py
|
spacy/ja/__init__.py
|
# encoding: utf8
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..attrs import LANG
from ..tokens import Doc
from .language_data import *
class Japanese(Language):
lang = 'ja'
def make_doc(self, text):
from janome.tokenizer import Tokenizer
words = [x.surface for x in Tokenizer().tokenize(text)]
return Doc(self.vocab, words=words, spaces=[False]*len(words))
|
# encoding: utf8
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..attrs import LANG
from ..tokens import Doc
from .language_data import *
class Japanese(Language):
lang = 'ja'
def make_doc(self, text):
try:
from janome.tokenizer import Tokenizer
except ImportError:
raise ImportError("The Japanese tokenizer requires the Janome library: https://github.com/mocobeta/janome")
words = [x.surface for x in Tokenizer().tokenize(text)]
return Doc(self.vocab, words=words, spaces=[False]*len(words))
|
Raise custom ImportError if importing janome fails
|
Raise custom ImportError if importing janome fails
|
Python
|
mit
|
raphael0202/spaCy,recognai/spaCy,explosion/spaCy,Gregory-Howard/spaCy,recognai/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,honnibal/spaCy,recognai/spaCy,raphael0202/spaCy,explosion/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,aikramer2/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,aikramer2/spaCy,explosion/spaCy,honnibal/spaCy,honnibal/spaCy,Gregory-Howard/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,raphael0202/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy
|
223872a6f894b429b3784365fe50e139e649d233
|
chempy/electrochemistry/nernst.py
|
chempy/electrochemistry/nernst.py
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T, constants=None, units=None):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.ideal_gas_constant
return (R * T) / (charge * F) * math.log(ion_conc_out / ion_conc_in)
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.ideal_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
|
Add keyword arg for backend for log
|
Add keyword arg for backend for log
Can be used to switch out math module with other modules, ex. sympy for
symbolic answers
|
Python
|
bsd-2-clause
|
bjodah/aqchem,bjodah/aqchem,bjodah/chempy,bjodah/chempy,bjodah/aqchem
|
5ea19da9fdd797963a7b7f1f2fd8f7163200b4bc
|
easy_maps/conf.py
|
easy_maps/conf.py
|
# -*- coding: utf-8 -*-
import warnings
from django.conf import settings # pylint: disable=W0611
from appconf import AppConf
class EasyMapsSettings(AppConf):
CENTER = (-41.3, 32)
GEOCODE = 'easy_maps.geocode.google_v3'
ZOOM = 16 # See https://developers.google.com/maps/documentation/javascript/tutorial#MapOptions for more information.
LANGUAGE = 'en' # See https://developers.google.com/maps/faq#languagesupport for supported languages.
GOOGLE_MAPS_API_KEY = None
GOOGLE_KEY = None
CACHE_LIFETIME = 600 # 10 minutes in seconds
class Meta:
prefix = 'easy_maps'
holder = 'easy_maps.conf.settings'
if hasattr(settings, 'EASY_MAPS_GOOGLE_MAPS_API_KEY'):
warnings.warn("EASY_MAPS_GOOGLE_MAPS_API_KEY is deprecated, use EASY_MAPS_GOOGLE_KEY", DeprecationWarning)
|
# -*- coding: utf-8 -*-
import warnings
from django.conf import settings # pylint: disable=W0611
from appconf import AppConf
class EasyMapsSettings(AppConf):
CENTER = (-41.3, 32)
GEOCODE = 'easy_maps.geocode.google_v3'
ZOOM = 16 # See https://developers.google.com/maps/documentation/javascript/tutorial#MapOptions for more information.
LANGUAGE = 'en' # See https://developers.google.com/maps/faq#languagesupport for supported languages.
GOOGLE_MAPS_API_KEY = None
GOOGLE_KEY = None
CACHE_LIFETIME = 600 # 10 minutes in seconds
class Meta:
prefix = 'easy_maps'
holder = 'easy_maps.conf.settings'
if settings.EASY_MAPS_GOOGLE_MAPS_API_KEY is not None:
warnings.warn("EASY_MAPS_GOOGLE_MAPS_API_KEY is deprecated, use EASY_MAPS_GOOGLE_KEY", DeprecationWarning)
|
Check is EASY_MAPS_GOOGLE_MAPS_API_KEY is not None before raising warning.
|
Check is EASY_MAPS_GOOGLE_MAPS_API_KEY is not None before raising warning.
|
Python
|
mit
|
kmike/django-easy-maps,kmike/django-easy-maps,bashu/django-easy-maps,bashu/django-easy-maps
|
2d74b55a0c110a836190af819b55673bce2300a0
|
gaphor/ui/macosshim.py
|
gaphor/ui/macosshim.py
|
try:
import gi
gi.require_version("GtkosxApplication", "1.0")
except ValueError:
macos_init = None
else:
from gi.repository import GtkosxApplication
macos_app = GtkosxApplication.Application.get()
def open_file(macos_app, path, application):
if path == __file__:
return False
app_file_manager = application.get_service("app_file_manager")
app_file_manager.load(path)
return True
def block_termination(macos_app, application):
quit = application.quit()
return not quit
def macos_init(application):
macos_app.connect("NSApplicationOpenFile", open_file, application)
macos_app.connect(
"NSApplicationBlockTermination", block_termination, application
)
|
try:
import gi
from gi.repository import Gtk
if Gtk.get_major_version() == 3:
gi.require_version("GtkosxApplication", "1.0")
else:
raise ValueError()
except ValueError:
macos_init = None
else:
from gi.repository import GtkosxApplication
macos_app = GtkosxApplication.Application.get()
def open_file(macos_app, path, application):
if path == __file__:
return False
app_file_manager = application.get_service("app_file_manager")
app_file_manager.load(path)
return True
def block_termination(macos_app, application):
quit = application.quit()
return not quit
def macos_init(application):
macos_app.connect("NSApplicationOpenFile", open_file, application)
macos_app.connect(
"NSApplicationBlockTermination", block_termination, application
)
|
Fix macos shim for gtk 4
|
Fix macos shim for gtk 4
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
4a84fe0c774638b7a00d37864b6d634200512f99
|
tests.py
|
tests.py
|
import unittest
from stacklogger import srcfile
class TestUtils(unittest.TestCase):
def test_srcfile(self):
self.assertTrue(srcfile("foo.py").endswith("foo.py"))
self.assertTrue(srcfile("foo.pyc").endswith("foo.py"))
self.assertTrue(srcfile("foo.pyo").endswith("foo.py"))
self.assertTrue(srcfile("foo").endswith("foo"))
|
import inspect
import unittest
from stacklogger import srcfile
currentframe = inspect.currentframe
class FakeFrames(object):
def fake_method(self):
return currentframe()
@property
def fake_property(self):
return currentframe()
@classmethod
def fake_classmethod(cls):
return currentframe()
@staticmethod
def fake_staticmethod():
return currentframe()
def fake_function():
return currentframe()
class TestUtils(unittest.TestCase):
def test_srcfile(self):
self.assertTrue(srcfile("foo.py").endswith("foo.py"))
self.assertTrue(srcfile("foo.pyc").endswith("foo.py"))
self.assertTrue(srcfile("foo.pyo").endswith("foo.py"))
self.assertTrue(srcfile("foo").endswith("foo"))
|
Build fake frames for later testing.
|
Build fake frames for later testing.
|
Python
|
isc
|
whilp/stacklogger
|
4b485e601b4410561ee4b4a681e392f4d141a339
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
import django
from django.conf import settings
if not settings.configured:
settings.configure(
AUTHENTICATION_BACKENDS=(
'django_authgroupex.auth.AuthGroupeXBackend',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'django_authgroupex',
),
SITE_ID=1,
SECRET_KEY='this-is-just-for-tests-so-not-that-secret',
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner',
AUTHGROUPEX_LOGIN_REDIRECT_URL='/login-success/'
)
from django.test.utils import get_runner
def runtests():
if hasattr(django, 'setup'):
django.setup()
apps = ['django_authgroupex', ]
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(apps)
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
import sys
import django
from django.conf import settings
if not settings.configured:
settings.configure(
AUTHENTICATION_BACKENDS=(
'django_authgroupex.auth.AuthGroupeXBackend',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'django_authgroupex',
),
SITE_ID=1,
SECRET_KEY='this-is-just-for-tests-so-not-that-secret',
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner',
AUTHGROUPEX_LOGIN_REDIRECT_URL='/login-success/',
MIDDLEWARE_CLASSES=()
)
from django.test.utils import get_runner
def runtests():
if hasattr(django, 'setup'):
django.setup()
apps = ['django_authgroupex', ]
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(apps)
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
Configure MIDDLEWARE_CLASSES in test settings
|
Configure MIDDLEWARE_CLASSES in test settings
Django 1.7 complained:
(1_7.W001) MIDDLEWARE_CLASSES is not set.
HINT: Django 1.7 changed the global defaults for the
MIDDLEWARE_CLASSES.
django.contrib.sessions.middleware.SessionMiddleware,
django.contrib.auth.middleware.AuthenticationMiddleware, and
django.contrib.messages.middleware.MessageMiddleware were removed
from the defaults. If your project needs these middleware then you
should configure this setting.
Set this setting to nothing to make Django happy.
|
Python
|
bsd-2-clause
|
Polytechnique-org/django-authgroupex
|
98be6419eed3dd6d0f056acbb31850291827ed46
|
doc/make_version.py
|
doc/make_version.py
|
print """
Version Info
============
To obtain version info::
from scan.version import __version__, version_history
print __version__
print version_history
"""
from scan.version import __version__, version_history
print "Version history::"
for line in version_history.splitlines():
print (" " + line)
|
print """
Version Info
============
To obtain version info::
from scan.version import __version__, version_history
print __version__
print version_history
"""
import sys
sys.path.append("..")
from scan.version import __version__, version_history
print "Version history::"
for line in version_history.splitlines():
print (" " + line)
|
Include version detail in doc
|
Include version detail in doc
|
Python
|
epl-1.0
|
PythonScanClient/PyScanClient,PythonScanClient/PyScanClient
|
48b1cfaadd7642706a576d8ba9bf38c297a2d873
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import django
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ALLOWED_HOSTS=[
'testserver',
],
INSTALLED_APPS=[
'django_nose',
'permissions',
'permissions.tests',
],
ROOT_URLCONF='permissions.tests.urls',
TEST_RUNNER='django_nose.NoseTestSuiteRunner'
)
if django.VERSION[:2] >= (1, 7):
from django import setup
else:
setup = lambda: None
setup()
call_command("test")
|
#!/usr/bin/env python
import django
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ALLOWED_HOSTS=[
'testserver',
],
INSTALLED_APPS=[
'django_nose',
'permissions',
'permissions.tests',
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='permissions.tests.urls',
TEST_RUNNER='django_nose.NoseTestSuiteRunner'
)
if django.VERSION[:2] >= (1, 7):
from django import setup
else:
setup = lambda: None
setup()
call_command("test")
|
Add MIDDLEWARE_CLASSES to test settings
|
Add MIDDLEWARE_CLASSES to test settings
Squelches a warning when using Django 1.7.
|
Python
|
mit
|
PSU-OIT-ARC/django-perms,wylee/django-perms
|
eba6e117c0a13b49219bb60e773f896b274b6601
|
tests/_support/configs/collection.py
|
tests/_support/configs/collection.py
|
from spec import eq_
from invoke import ctask, Collection
@ctask
def collection(c):
c.run('false') # Ensures a kaboom if mocking fails
ns = Collection(collection)
ns.configure({'run': {'echo': True}})
|
from spec import eq_
from invoke import ctask, Collection
@ctask
def go(c):
c.run('false') # Ensures a kaboom if mocking fails
ns = Collection(go)
ns.configure({'run': {'echo': True}})
|
Fix test fixture to match earlier test change
|
Fix test fixture to match earlier test change
|
Python
|
bsd-2-clause
|
singingwolfboy/invoke,kejbaly2/invoke,tyewang/invoke,frol/invoke,mattrobenolt/invoke,mkusz/invoke,pfmoore/invoke,mkusz/invoke,pyinvoke/invoke,kejbaly2/invoke,pfmoore/invoke,sophacles/invoke,frol/invoke,pyinvoke/invoke,mattrobenolt/invoke
|
c31c54624d7a46dfd9df96e32d2e07246868aecc
|
tomviz/python/DefaultITKTransform.py
|
tomviz/python/DefaultITKTransform.py
|
def transform_scalars(dataset):
"""Define this method for Python operators that
transform the input array."""
from tomviz import utils
import numpy as np
import itk
# Get the current volume as a numpy array.
array = utils.get_array(dataset)
# Set up some ITK variables
itk_image_type = itk.Image.F3
itk_converter = itk.PyBuffer[itk_image_type]
# Read the image into ITK
itk_image = itk_converter.GetImageFromArray(array)
# ITK filter (I have no idea if this is right)
filter = \
itk.ConfidenceConnectedImageFilter[itk_image_type,itk.Image.SS3].New()
filter.SetInitialNeighborhoodRadius(3)
filter.SetMultiplier(3)
filter.SetNumberOfIterations(25)
filter.SetReplaceValue(255)
filter.SetSeed((24,65,37))
filter.SetInput(itk_image)
filter.Update()
# Get the image back from ITK (result is a numpy image)
result = itk.PyBuffer[itk.Image.SS3].GetArrayFromImage(filter.GetOutput())
# This is where the transformed data is set, it will display in tomviz.
utils.set_array(dataset, result)
|
import tomviz.operators
class DefaultITKTransform(tomviz.operators.CancelableOperator):
def transform_scalars(self, dataset):
"""Define this method for Python operators that transform the input
array. This example uses an ITK filter to add 10 to each voxel value."""
# Try imports to make sure we have everything that is needed
try:
from tomviz import itkutils
import itk
except Exception as exc:
print("Could not import necessary module(s)")
raise exc
self.progress.value = 0
self.progress.maximum = 100
# Add a try/except around the ITK portion. ITK exceptions are
# passed up to the Python layer, so we can at least report what
# went wrong with the script, e.g., unsupported image type.
try:
self.progress.value = 0
self.progress.message = "Converting data to ITK image"
# Get the ITK image
itk_image = itkutils.convert_vtk_to_itk_image(dataset)
itk_input_image_type = type(itk_image)
self.progress.value = 30
self.progress.message = "Running filter"
# ITK filter
filter = itk.AddImageFilter[itk_input_image_type, # Input 1
itk_input_image_type, # Input 2
itk_input_image_type].New() # Output
filter.SetInput1(itk_image)
filter.SetConstant2(10)
itkutils.observe_filter_progress(self, filter, 30, 70)
try:
filter.Update()
except RuntimeError: # Exception thrown when ITK filter is aborted
return
self.progress.message = "Saving results"
itkutils.set_array_from_itk_image(dataset, filter.GetOutput())
self.progress.value = 100
except Exception as exc:
print("Problem encountered while running %s" %
self.__class__.__name__)
raise exc
|
Change the ITK example to use a simpler ITK filter
|
Change the ITK example to use a simpler ITK filter
|
Python
|
bsd-3-clause
|
cjh1/tomviz,cryos/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,cjh1/tomviz,thewtex/tomviz,thewtex/tomviz,cryos/tomviz,mathturtle/tomviz,thewtex/tomviz,cjh1/tomviz,cryos/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,mathturtle/tomviz
|
8d8b122ecbb306bb53de4ee350104e7627e8b362
|
app/app/__init__.py
|
app/app/__init__.py
|
import os
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession, Base
def main(global_config, **settings):
'''This function returns a Pyramid WSGI application.'''
settings['sqlalchemy.url'] = os.environ.get('DATABASE_URL')
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('pyramid_jinja2')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('index', '/')
config.add_route('request_scene', '/request/{scene_id}')
config.add_route('done', '/done')
config.add_route('scene_status', '/scene/{scene_id}')
config.add_route('ajax', '/ajax')
config.scan()
return config.make_wsgi_app()
|
import os
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession, Base
def main(global_config, **settings):
'''This function returns a Pyramid WSGI application.'''
settings['sqlalchemy.url'] = os.environ.get('DATABASE_URL')
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('pyramid_jinja2')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('index', '/')
config.add_route('request_scene', '/request/{scene_id}')
config.add_route('request_preview', '/request_p/{scene_id}')
config.add_route('done', '/done')
config.add_route('scene_status', '/scene/{scene_id}')
config.add_route('ajax', '/ajax')
config.scan()
return config.make_wsgi_app()
|
Add route for preview request
|
Add route for preview request
|
Python
|
mit
|
recombinators/snapsat,recombinators/snapsat,recombinators/snapsat
|
0654d962918327e5143fb9250ad344de26e284eb
|
electrumx_server.py
|
electrumx_server.py
|
#!/usr/bin/env python3
#
# Copyright (c) 2016, Neil Booth
#
# All rights reserved.
#
# See the file "LICENCE" for information about the copyright
# and warranty status of this software.
'''Script to kick off the server.'''
import logging
import traceback
from server.env import Env
from server.controller import Controller
def main():
'''Set up logging and run the server.'''
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)-9s %(message)-100s '
'%(name)s [%(filename)s:%(lineno)d]')
logging.info('ElectrumX server starting')
try:
controller = Controller(Env())
controller.run()
except Exception:
traceback.print_exc()
logging.critical('ElectrumX server terminated abnormally')
else:
logging.info('ElectrumX server terminated normally')
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
#
# Copyright (c) 2016, Neil Booth
#
# All rights reserved.
#
# See the file "LICENCE" for information about the copyright
# and warranty status of this software.
'''Script to kick off the server.'''
import logging
import traceback
from server.env import Env
from server.controller import Controller
def main():
'''Set up logging and run the server.'''
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)-7s %(message)-100s '
'[%(filename)s:%(lineno)d]')
logging.info('ElectrumX server starting')
try:
controller = Controller(Env())
controller.run()
except Exception:
traceback.print_exc()
logging.critical('ElectrumX server terminated abnormally')
else:
logging.info('ElectrumX server terminated normally')
if __name__ == '__main__':
main()
|
Remove logger name from logs
|
Remove logger name from logs
|
Python
|
mit
|
thelazier/electrumx,shsmith/electrumx,shsmith/electrumx,erasmospunk/electrumx,erasmospunk/electrumx,thelazier/electrumx
|
94264880688f5e2f8dbf098108bb05c2c244048d
|
froide_campaign/listeners.py
|
froide_campaign/listeners.py
|
from .models import Campaign, InformationObject
def connect_info_object(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
if not reference.startswith('campaign:'):
return
namespace, campaign_value = reference.split(':', 1)
try:
campaign, slug = campaign_value.split('@', 1)
except (ValueError, IndexError):
return
try:
campaign_pk = int(campaign)
except ValueError:
return
try:
campaign = Campaign.objects.get(pk=campaign_pk)
except Campaign.DoesNotExist:
return
try:
kwargs = {
'pk': int(slug)
}
except ValueError:
kwargs = {'slug': slug}
try:
iobj = InformationObject.objects.get(campaign=campaign, **kwargs)
except InformationObject.DoesNotExist:
return
if iobj.foirequest is not None:
return
if iobj.publicbody != sender.public_body:
return
if not sender.public:
return
iobj.foirequest = sender
iobj.save()
|
from .models import Campaign, InformationObject
def connect_info_object(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
if not reference.startswith('campaign:'):
return
namespace, campaign_value = reference.split(':', 1)
try:
campaign, slug = campaign_value.split('@', 1)
except (ValueError, IndexError):
return
try:
campaign_pk = int(campaign)
except ValueError:
return
try:
campaign = Campaign.objects.get(pk=campaign_pk)
except Campaign.DoesNotExist:
return
try:
kwargs = {
'pk': int(slug)
}
except ValueError:
kwargs = {'slug': slug}
try:
iobj = InformationObject.objects.get(campaign=campaign, **kwargs)
except InformationObject.DoesNotExist:
return
if iobj.publicbody != sender.public_body:
return
if not sender.public:
return
if iobj.foirequest is None:
iobj.foirequest = sender
iobj.foirequests.add(sender)
iobj.save()
|
Save request in new m2m filed
|
Save request in new m2m filed
|
Python
|
mit
|
okfde/froide-campaign,okfde/froide-campaign,okfde/froide-campaign
|
a910cd19890ef02a08aeb05c8ba450b2c59f0352
|
monitoring/nagios/plugin/__init__.py
|
monitoring/nagios/plugin/__init__.py
|
from monitoring.nagios.plugin.base import NagiosPlugin
from monitoring.nagios.plugin.snmp import NagiosPluginSNMP
from monitoring.nagios.plugin.secureshell import NagiosPluginSSH
from monitoring.nagios.plugin.database import NagiosPluginMSSQL
from monitoring.nagios.plugin.wmi import NagiosPluginWMI
|
from monitoring.nagios.plugin.base import NagiosPlugin
from monitoring.nagios.plugin.snmp import NagiosPluginSNMP
from monitoring.nagios.plugin.secureshell import NagiosPluginSSH
from monitoring.nagios.plugin.database import NagiosPluginMSSQL
from monitoring.nagios.plugin.wmi import NagiosPluginWMI
from monitoring.nagios.plugin.http import NagiosPluginHTTP
|
Make NagiosPluginHTTP available from monitoring.nagios.plugin package.
|
Make NagiosPluginHTTP available from monitoring.nagios.plugin package.
|
Python
|
mit
|
bigbrozer/monitoring.nagios,bigbrozer/monitoring.nagios
|
f794c6ed1f6be231d79ac35759ad76270c3e14e0
|
brains/mapping/admin.py
|
brains/mapping/admin.py
|
from django.contrib import admin
from mapping.models import Location, Report
class LocationAdmin(admin.ModelAdmin):
fieldsets = ((None,
{'fields': (
('name', 'suburb'),
('x', 'y'),
'building_type'
)}
),)
list_display = ['name', 'x', 'y', 'suburb']
list_filter = ['suburb']
search_fields = ['name']
readonly_fields = ['x', 'y', 'name', 'building_type', 'suburb']
actions = None
def has_add_permission(self, request):
return False
class ReportAdmin(admin.ModelAdmin):
fieldsets = ((None,
{'fields': ('location',
('zombies_only', 'inside'),
('is_ruined', 'is_illuminated', 'has_tree'),
('zombies_present', 'barricade_level'),
'players',
('reported_by', 'origin', 'reported_date')
)}
),)
readonly_fields = ['players', 'reported_date']
admin.site.register(Location, LocationAdmin)
admin.site.register(Report, ReportAdmin)
|
from django.contrib import admin
from mapping.models import Location, Report
class LocationAdmin(admin.ModelAdmin):
fieldsets = ((None,
{'fields': (
('name', 'suburb'),
('x', 'y'),
'building_type'
)}
),)
list_display = ['name', 'x', 'y', 'suburb']
list_filter = ['suburb']
search_fields = ['name']
readonly_fields = ['x', 'y', 'name', 'building_type', 'suburb']
actions = None
def has_add_permission(self, request):
return False
class ReportAdmin(admin.ModelAdmin):
fieldsets = ((None,
{'fields': ('location',
('zombies_only', 'inside'),
('is_ruined', 'is_illuminated', 'has_tree'),
('zombies_present', 'barricade_level'),
'players',
('reported_by', 'origin'),
'reported_date',
)}
),)
readonly_fields = ['location', 'zombies_only', 'inside', 'is_ruined',
'is_illuminated', 'has_tree', 'zombies_present', 'barricade_level',
'players', 'reported_by', 'origin', 'reported_date']
admin.site.register(Location, LocationAdmin)
admin.site.register(Report, ReportAdmin)
|
Set everything on the report read only.
|
Set everything on the report read only.
|
Python
|
bsd-3-clause
|
crisisking/udbraaains,crisisking/udbraaains,crisisking/udbraaains,crisisking/udbraaains
|
e76bd7de6a0eb7f46e9e5ce3cdaec44943b848d2
|
pagseguro/configs.py
|
pagseguro/configs.py
|
# coding: utf-8
class Config(object):
BASE_URL = "https://ws.pagseguro.uol.com.br"
VERSION = "/v2/"
CHECKOUT_SUFFIX = VERSION + "checkout"
NOTIFICATION_SUFFIX = VERSION + "transactions/notifications/%s"
TRANSACTION_SUFFIX = VERSION + "transactions/"
CHECKOUT_URL = BASE_URL + CHECKOUT_SUFFIX
NOTIFICATION_URL = BASE_URL + NOTIFICATION_SUFFIX
TRANSACTION_URL = BASE_URL + TRANSACTION_SUFFIX
CURRENCY = "BRL"
HEADERS = {
"Content-Type": "application/x-www-form-urlencoded; charset=ISO-8859-1"
}
REFERENCE_PREFIX = "REF%s"
PAYMENT_HOST = "https://pagseguro.uol.com.br"
PAYMENT_URL = PAYMENT_HOST + CHECKOUT_SUFFIX + "/payment.html?code=%s"
DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
# coding: utf-8
class Config(object):
BASE_URL = "https://ws.pagseguro.uol.com.br"
VERSION = "/v2/"
CHECKOUT_SUFFIX = VERSION + "checkout"
CHARSET = "UTF-8" # ISO-8859-1
NOTIFICATION_SUFFIX = VERSION + "transactions/notifications/%s"
TRANSACTION_SUFFIX = VERSION + "transactions/"
CHECKOUT_URL = BASE_URL + CHECKOUT_SUFFIX
NOTIFICATION_URL = BASE_URL + NOTIFICATION_SUFFIX
TRANSACTION_URL = BASE_URL + TRANSACTION_SUFFIX
CURRENCY = "BRL"
HEADERS = {
"Content-Type": "application/x-www-form-urlencoded; charset={}".format(CHARSET)
}
REFERENCE_PREFIX = "REF%s"
PAYMENT_HOST = "https://pagseguro.uol.com.br"
PAYMENT_URL = PAYMENT_HOST + CHECKOUT_SUFFIX + "/payment.html?code=%s"
DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
Fix charset default to UTF-8
|
Fix charset default to UTF-8
|
Python
|
mit
|
vintasoftware/python-pagseguro,rochacbruno/python-pagseguro,rgcarrasqueira/python-pagseguro
|
08461a2f61b5a5981a6da9f6ef91a362eed92bfd
|
pycroft/__init__.py
|
pycroft/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
"""
pycroft
~~~~~~~~~~~~~~
This package contains everything.
:copyright: (c) 2011 by AG DSN.
"""
import json, collections, pkgutil
class Config(object):
def __init__(self):
self._config_data = None
self._package = "pycroft"
self._resource = "config.json"
def load(self):
data = (pkgutil.get_data(self._package, self._resource) or
pkgutil.get_data(self._package, self._resource+".default"))
if data is None:
raise Exception(
"Could not load config file {1} "
"from package {0}".format(self._package, self._resource)
)
self._config_data = json.loads(data)
if not isinstance(self._config_data, collections.Mapping):
raise Exception("Config must be a JSON object!")
def __getitem__(self, key):
if self._config_data is None:
self.load()
return self._config_data[key]
def __setitem__(self, key, value):
raise Exception("It is not possible to set configuration entries!")
config = Config()
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
"""
pycroft
~~~~~~~~~~~~~~
This package contains everything.
:copyright: (c) 2011 by AG DSN.
"""
import json, collections, pkgutil
class Config(object):
def __init__(self):
self._config_data = None
self._package = "pycroft"
self._resource = "config.json"
def load(self):
data = None
try:
data = pkgutil.get_data(self._package, self._resource)
except IOError:
data = pkgutil.get_data(self._package, self._resource+".default")
if data is None:
raise Exception(
"Could not load config file {1} "
"from package {0}".format(self._package, self._resource)
)
self._config_data = json.loads(data)
if not isinstance(self._config_data, collections.Mapping):
raise Exception("Config must be a JSON object!")
def __getitem__(self, key):
if self._config_data is None:
self.load()
return self._config_data[key]
def __setitem__(self, key, value):
raise Exception("It is not possible to set configuration entries!")
config = Config()
|
Fix config loader (bug in commit:5bdf6e47 / commit:eefe7561)
|
Fix config loader (bug in commit:5bdf6e47 / commit:eefe7561)
|
Python
|
apache-2.0
|
lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft
|
92595871f908aa22d353a2490f851da23f3d1f64
|
gitcd/Config/FilePersonal.py
|
gitcd/Config/FilePersonal.py
|
import os
import yaml
from gitcd.Config.Parser import Parser
from gitcd.Config.DefaultsPersonal import DefaultsPersonal
class FilePersonal:
loaded = False
filename = ".gitcd-personal"
parser = Parser()
defaults = DefaultsPersonal()
config = False
def setFilename(self, configFilename: str):
self.filename = configFilename
def load(self):
if not os.path.isfile(self.filename):
self.config = self.defaults.load()
else:
self.config = self.parser.load(self.filename)
def write(self):
self.parser.write(self.filename, self.config)
def getToken(self):
return self.config['token']
def setToken(self, token):
self.config['token'] = token
|
import os
import yaml
from gitcd.Config.Parser import Parser
from gitcd.Config.DefaultsPersonal import DefaultsPersonal
class FilePersonal:
loaded = False
filename = ".gitcd-personal"
parser = Parser()
defaults = DefaultsPersonal()
config = False
def setFilename(self, configFilename: str):
self.filename = configFilename
def load(self):
if not os.path.isfile(self.filename):
self.config = self.defaults.load()
else:
self.config = self.parser.load(self.filename)
def write(self):
self.parser.write(self.filename, self.config)
# add .gitcd-personal to .gitignore
gitignore = ".gitignore"
if not os.path.isfile(gitignore):
gitignoreContent = self.filename
else:
with open(gitignore, "r") as gitignoreFile:
gitignoreContent = gitignoreFile.read()
# if not yet in gitignore
if "\n%s\n" % (self.filename) not in gitignoreContent:
# add it
gitignoreContent = "%s\n%s\n" % (gitignoreContent, self.filename)
with open(gitignore, "w") as gitignoreFile:
gitignoreFile.write(gitignoreContent)
def getToken(self):
return self.config['token']
def setToken(self, token):
self.config['token'] = token
|
Add .gitcd-personal to .gitignore automaticly
|
Add .gitcd-personal to .gitignore automaticly
|
Python
|
apache-2.0
|
claudio-walser/gitcd,claudio-walser/gitcd
|
4dd86439d4c8393ac9c3bb6b958a1c8cb45b243a
|
gitfs/views/history_index.py
|
gitfs/views/history_index.py
|
from .view import View
class HistoryIndexView(View):
pass
|
from .view import View
from errno import ENOENT
from stat import S_IFDIR
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def readdir(self, path, fh):
return ['.', '..', 'commits everywhere']
|
Add mandatory methods to HistoryIndexView (refactor when working)
|
Add mandatory methods to HistoryIndexView (refactor when working)
|
Python
|
apache-2.0
|
PressLabs/gitfs,ksmaheshkumar/gitfs,rowhit/gitfs,PressLabs/gitfs,bussiere/gitfs
|
36da7bdc8402494b5ef3588289739e1696ad6002
|
docs/_ext/djangodummy/settings.py
|
docs/_ext/djangodummy/settings.py
|
# Settings file to allow parsing API documentation of Django modules,
# and provide defaults to use in the documentation.
#
# This file is placed in a subdirectory,
# so the docs root won't be detected by find_packages()
# Display sane URLs in the docs:
STATIC_URL = '/static/'
|
# Settings file to allow parsing API documentation of Django modules,
# and provide defaults to use in the documentation.
#
# This file is placed in a subdirectory,
# so the docs root won't be detected by find_packages()
# Display sane URLs in the docs:
STATIC_URL = '/static/'
# Avoid error for missing the secret key
SECRET_KEY = 'docs'
|
Fix autodoc support with Django 1.5
|
Fix autodoc support with Django 1.5
|
Python
|
apache-2.0
|
django-fluent/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents,django-fluent/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents,jpotterm/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents,jpotterm/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents
|
9cefd4d933590dcea28cf221e6c5706c81cac882
|
invocations/testing.py
|
invocations/testing.py
|
from invoke import ctask as task
@task
def test(ctx, module=None, runner='spec'):
"""
Run a Spec or Nose-powered internal test suite.
Say ``--module=foo``/``-m foo`` to just run ``tests/foo.py``.
Defaults to running the ``spec`` tool; may override by saying e.g.
``runner='nosetests'``.
"""
# Allow selecting specific submodule
specific_module = " --tests=tests/%s.py" % module
args = (specific_module if module else "")
# Use pty so the spec/nose/Python process buffers "correctly"
ctx.run(runner + args, pty=True)
|
from invoke import ctask as task
@task(help={
'module': "Just runs tests/STRING.py.",
'runner': "Use STRING to run tests instead of 'spec'."
})
def test(ctx, module=None, runner='spec'):
"""
Run a Spec or Nose-powered internal test suite.
"""
# Allow selecting specific submodule
specific_module = " --tests=tests/%s.py" % module
args = (specific_module if module else "")
# Use pty so the spec/nose/Python process buffers "correctly"
ctx.run(runner + args, pty=True)
|
Move docstring arg bits into per-arg help
|
Move docstring arg bits into per-arg help
|
Python
|
bsd-2-clause
|
mrjmad/invocations,singingwolfboy/invocations,pyinvoke/invocations,alex/invocations
|
00b5599e574740680e6c08884510ad605294fad2
|
tests/conftest.py
|
tests/conftest.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Shared fixtures for :mod:`pytest`."""
from __future__ import print_function, absolute_import
import os
import pytest # noqa
import gryaml
from py2neo_compat import py2neo_ver
@pytest.fixture
def graphdb():
"""Fixture connecting to graphdb."""
if 'NEO4J_URI' not in os.environ:
pytest.skip('Need NEO4J_URI environment variable set')
graphdb = gryaml.connect(uri=os.environ['NEO4J_URI'])
graphdb.cypher.execute('MATCH (n) DETACH DELETE n')
return graphdb
@pytest.yield_fixture
def graphdb_offline():
"""Ensure the database is not connected."""
if py2neo_ver < 2:
pytest.skip('Offline not supported in py2neo < 2')
neo4j_uri_env = os.environ.get('NEO4J_URI', None)
if neo4j_uri_env:
del os.environ['NEO4J_URI']
old_graphdb = gryaml._py2neo.graphdb
gryaml._py2neo.graphdb = None
yield
gryaml._py2neo.graphdb = old_graphdb
if neo4j_uri_env:
os.environ['NEO4J_URI'] = neo4j_uri_env
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Shared fixtures for :mod:`pytest`."""
from __future__ import print_function, absolute_import
import os
import pytest # noqa
import gryaml
from py2neo_compat import py2neo_ver
@pytest.fixture
def graphdb():
"""Fixture connecting to graphdb."""
if 'NEO4J_URI' not in os.environ:
pytest.skip('Need NEO4J_URI environment variable set')
graphdb = gryaml.connect(uri=os.environ['NEO4J_URI'])
graphdb.delete_all()
return graphdb
@pytest.yield_fixture
def graphdb_offline():
"""Ensure the database is not connected."""
if py2neo_ver < 2:
pytest.skip('Offline not supported in py2neo < 2')
neo4j_uri_env = os.environ.get('NEO4J_URI', None)
if neo4j_uri_env:
del os.environ['NEO4J_URI']
old_graphdb = gryaml._py2neo.graphdb
gryaml._py2neo.graphdb = None
yield
gryaml._py2neo.graphdb = old_graphdb
if neo4j_uri_env:
os.environ['NEO4J_URI'] = neo4j_uri_env
|
Use `delete_all` instead of running cypher query
|
Use `delete_all` instead of running cypher query
|
Python
|
mit
|
wcooley/python-gryaml
|
0d1ab7106e438b418b575c4e0f6c22797158358a
|
tests/settings.py
|
tests/settings.py
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'tests',
)
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'tests',
)
SECRET_KEY = 'abc123'
|
Add required SECRET_KEY setting for Django 1.5+
|
Add required SECRET_KEY setting for Django 1.5+
|
Python
|
bsd-2-clause
|
bruth/django-preserialize,scottp-dpaw/django-preserialize
|
99f8fc1ec43aa7a1b96b2d8446a77111dbc61195
|
tests/test_cli.py
|
tests/test_cli.py
|
from mdformat._cli import run
def test_no_files_passed():
assert run(()) == 0
|
from mdformat._cli import run
def test_no_files_passed():
assert run(()) == 0
def test_format(tmp_path):
unformatted_markdown = "\n\n# A header\n\n"
formatted_markdown = "# A header\n"
file_path = tmp_path / "test_markdown.md"
file_path.write_text(unformatted_markdown)
assert run((str(file_path),)) == 0
assert file_path.read_text() == formatted_markdown
|
Add a test for formatting with cli
|
Add a test for formatting with cli
|
Python
|
mit
|
executablebooks/mdformat
|
e4fde66624f74c4b0bbfae7c7c11a50884a0a73c
|
pyfr/readers/base.py
|
pyfr/readers/base.py
|
# -*- coding: utf-8 -*-
from abc import ABCMeta, abstractmethod
import uuid
class BaseReader(object, metaclass=ABCMeta):
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
|
# -*- coding: utf-8 -*-
from abc import ABCMeta, abstractmethod
import uuid
import numpy as np
class BaseReader(object, metaclass=ABCMeta):
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = np.array(str(uuid.uuid4()), dtype='S')
return mesh
|
Fix the HDF5 type of mesh_uuid for imported meshes.
|
Fix the HDF5 type of mesh_uuid for imported meshes.
|
Python
|
bsd-3-clause
|
BrianVermeire/PyFR,Aerojspark/PyFR
|
0af3f7ddd1912d18d502ca1795c596397d9cd495
|
python/triple-sum.py
|
python/triple-sum.py
|
# A special triplet is defined as: a <= b <= c for
# a in list_a, b in list_b, and c in list_c
def get_num_special_triplets(list_a, list_b, list_c):
num_special_triplets = 0
for b in list_b:
len_a_candidates = len([a for a in list_a if a <= b])
len_c_candidates = len([c for c in list_c if c <= b])
num_special_triplets += 1 * len_a_candidates * len_c_candidates
return num_special_triplets
if __name__ == '__main__':
_ = input().split()
list_a = list(set(map(int, input().rstrip().split())))
list_b = list(set(map(int, input().rstrip().split())))
list_c = list(set(map(int, input().rstrip().split())))
num_special_triplets = get_num_special_triplets(list_a, list_b, list_c)
print(num_special_triplets)
|
# A special triplet is defined as: a <= b <= c for
# a in list_a, b in list_b, and c in list_c
def get_num_special_triplets(list_a, list_b, list_c):
# remove duplicates and sort lists
list_a = sorted(set(list_a))
list_b = sorted(set(list_b))
list_c = sorted(set(list_c))
num_special_triplets = 0
for b in list_b:
len_a_candidates = num_elements_less_than(b, list_a)
len_c_candidates = num_elements_less_than(b, list_c)
num_special_triplets += 1 * len_a_candidates * len_c_candidates
return num_special_triplets
def num_elements_less_than(target, sorted_list):
for index, candidate in enumerate(sorted_list):
if candidate > target:
return index
return len(sorted_list)
if __name__ == '__main__':
_ = input().split()
list_a = list(map(int, input().rstrip().split()))
list_b = list(map(int, input().rstrip().split()))
list_c = list(map(int, input().rstrip().split()))
num_special_triplets = get_num_special_triplets(list_a, list_b, list_c)
print(num_special_triplets)
|
Sort lists prior to computing len of candidates
|
Sort lists prior to computing len of candidates
|
Python
|
mit
|
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
|
90cbe055e9f0722de7619fbd777a10e2b95d63b3
|
tests/constants_test.py
|
tests/constants_test.py
|
import unittest
from mtglib.constants import base_url, card_flags
class DescribeConstants(unittest.TestCase):
def should_have_base_url(self):
url = ('http://gatherer.wizards.com/Pages/Search/Default.aspx'
'?output=standard&action=advanced&')
assert base_url == url
def should_have_card_flags(self):
assert card_flags == ['text', 'color', 'subtype', 'type', 'set', 'cmc',
'power', 'tough', 'rarity', 'name', 'block']
|
import unittest
from nose.tools import eq_
from mtglib.constants import base_url, card_flags
class DescribeConstants(unittest.TestCase):
def should_have_base_url(self):
url = ('http://gatherer.wizards.com/Pages/Search/Default.aspx'
'?output=standard&action=advanced&')
eq_(base_url, url)
def should_have_card_flags(self):
eq_(card_flags, ['text', 'color', 'subtype', 'type', 'set', 'cmc',
'power', 'tough', 'rarity', 'name', 'block'])
|
Use helper function for testing equality.
|
Use helper function for testing equality.
|
Python
|
mit
|
chigby/mtg,chigby/mtg
|
2daee974533d1510a17280cddb5a4dfc147338fa
|
tests/level/test_map.py
|
tests/level/test_map.py
|
import unittest
from hunting.level.map import LevelTile, LevelMap
class TestPathfinding(unittest.TestCase):
def test_basic_diagonal(self):
level_map = LevelMap()
level_map.set_map([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)])
self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4))
def test_paths_around_wall(self):
level_map = LevelMap()
level_map.set_map([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)])
for x in range(1, 5):
level_map[x][1].blocks = True
self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)],
level_map.a_star_path(4, 0, 4, 2))
|
import unittest
from hunting.level.map import LevelTile, LevelMap
class TestPathfinding(unittest.TestCase):
def test_basic_diagonal(self):
level_map = LevelMap([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)])
self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4))
def test_paths_around_wall(self):
level_map = LevelMap([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)])
for x in range(1, 5):
level_map[x][1].blocks = True
self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)],
level_map.a_star_path(4, 0, 4, 2))
def tests_force_pathable_endpoint_parameter(self):
level_map = LevelMap([[LevelTile(False, False)], [LevelTile(True, True)]])
self.assertEqual([(1, 0)], level_map.a_star_path(0, 0, 1, 0, True))
self.assertEqual([], level_map.a_star_path(0, 0, 1, 0, False))
|
Add test for force_pathable_endpoint pathfind param
|
Add test for force_pathable_endpoint pathfind param
This parameter is intended to allow pathing to adjacent squares
of an unpassable square. This is necessary because if you want to
pathfind to a monster which blocks a square, you don't want to
actually go *onto* the square, you just want to go next to it,
presumably so you can hit it.
|
Python
|
mit
|
MoyTW/RL_Arena_Experiment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.