commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
7783146e9bae3c63f5b8292df509f862c33881be | Update admin.py | ulule/django-courriers,ulule/django-courriers | courriers/admin.py | courriers/admin.py | from django.contrib import admin
from django.conf.urls.defaults import patterns, url
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext as _
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from .models import Newsletter, NewsletterItem, NewsletterSubscriber, NewsletterList
class NewsletterAdmin(admin.ModelAdmin):
change_form_template = 'admin/courriers/newsletter/change_form.html'
list_display = ('name', 'headline', 'languages', 'published_at', 'status', 'newsletter_list',)
list_filter = ('published_at', 'status',)
def get_urls(self):
urls = super(NewsletterAdmin, self).get_urls()
my_urls = patterns(
'',
url(r'^send/(?P<newsletter_id>(\d+))/$',
self.send_newsletter,
name="send_newsletter")
)
return my_urls + urls
def send_newsletter(self, request, newsletter_id):
from courriers.backends import get_backend
backend_klass = get_backend()
backend = backend_klass()
newsletter = get_object_or_404(Newsletter, pk=newsletter_id)
backend.send_mails(newsletter)
self.message_user(request, _('The newsletter %s has been sent.') % newsletter)
return HttpResponseRedirect(reverse('admin:courriers_newsletter_change', args=(newsletter.id,)))
class NewsletterItemAdmin(admin.ModelAdmin):
list_display = ('description', 'content_type', 'newsletter',)
class NewsletterSubscriberAdmin(admin.ModelAdmin):
list_display = ('email', 'user', 'lang', 'is_unsubscribed',)
list_filter = ('is_unsubscribed',)
class NewsletterListAdmin(admin.ModelAdmin):
list_display = ('name', 'slug', 'languages', 'created_at',)
admin.site.register(Newsletter, Newsletter)
admin.site.register(NewsletterItem, NewsletterItemAdmin)
admin.site.register(NewsletterSubscriber, NewsletterSubscriberAdmin)
admin.site.register(NewsletterList, NewsletterListAdmin)
| from django.contrib import admin
from django.conf.urls.defaults import patterns, url
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext as _
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from .models import Newsletter, NewsletterItem, NewsletterSubscriber, NewsletterList
class NewsletterAdmin(admin.ModelAdmin):
change_form_template = 'admin/courriers/newsletter/change_form.html'
list_display = ('name', 'headline', 'languages', 'published_at', 'status', 'newsletter_list',)
list_filter = ('published_at', 'status',)
def get_urls(self):
urls = super(NewsletterAdmin, self).get_urls()
my_urls = patterns(
'',
url(r'^send/(?P<newsletter_id>(\d+))/$',
self.send_newsletter,
name="send_newsletter")
)
return my_urls + urls
def send_newsletter(self, request, newsletter_id):
from courriers.backends import get_backend
backend_klass = get_backend()
backend = backend_klass()
newsletter = get_object_or_404(Newsletter, pk=newsletter_id)
backend.send_mails(newsletter)
self.message_user(request, _('The newsletter %s has been sent.') % newsletter)
return HttpResponseRedirect(reverse('admin:courriers_newsletter_change', args=(newsletter.id,)))
class NewsletterItemAdmin(admin.ModelAdmin):
list_display = ('description', 'content_type', 'newsletter',)
class NewsletterSubscriberAdmin(admin.ModelAdmin):
list_display = ('email', 'user', 'lang', 'is_unsubscribed',)
list_filter = ('is_unsubscribed',)
class NewsletterListAdmin(admin.ModelAdmin):
list_display = ('name', 'slug', 'languages', 'created_at',)
admin.site.register(NewsletterList, NewsletterListAdmin)
admin.site.register(NewsletterItem, NewsletterItemAdmin)
admin.site.register(NewsletterSubscriber, NewsletterSubscriberAdmin)
admin.site.register(NewsletterList, NewsletterListAdmin)
| mit | Python |
c3b9d523301e483c54ede4f519cdd4456ed54fe8 | Add message_annotations to Interceptor object | ooici/pyon,scionrep/scioncc,mkl-/scioncc,ooici/pyon,crchemist/scioncc,scionrep/scioncc,mkl-/scioncc,scionrep/scioncc,mkl-/scioncc,crchemist/scioncc,crchemist/scioncc | pyon/core/interceptor/interceptor.py | pyon/core/interceptor/interceptor.py |
class Invocation(object):
"""
Container object for parameters of events/messages passed to internal
capability container processes
"""
# Event outbound processing path
PATH_OUT = 'outgoing'
# Event inbound processing path
PATH_IN = 'incoming'
def __init__(self, **kwargs):
self.args = kwargs
self.path = kwargs.get('path')
self.message = kwargs.get('message')
self.headers = kwargs.get('headers') or {} # ensure dict
self.message_annotations = {}
class Interceptor(object):
"""
Basic interceptor model.
"""
def configure(self, config):
pass
def outgoing(self, invocation):
pass
def incoming(self, invocation):
pass
def process_interceptors(interceptors, invocation):
for interceptor in interceptors:
func = getattr(interceptor, invocation.path)
invocation = func(invocation)
return invocation
def walk(o, cb):
"""
Utility method to do recursive walking of a possible iterable (inc dicts) and do inline transformations.
You supply a callback which receives an object. That object may be an iterable (which will then be walked
after you return it, as long as it remains an iterable), or it may be another object inside of that.
If a dict is discovered, your callback will receive the dict as a whole and the contents of values only. Keys are left untouched.
@TODO move to a general utils area?
"""
if hasattr(o, '__iter__'):
newo = cb(o)
# still an iterable? iterate it.
if hasattr(newo, '__iter__'):
if isinstance(newo, dict):
return dict(((k, walk(v, cb)) for k,v in newo.iteritems()))
else:
return [walk(x, cb) for x in newo]
else:
return newo
else:
return cb(o)
|
class Invocation(object):
"""
Container object for parameters of events/messages passed to internal
capability container processes
"""
# Event outbound processing path
PATH_OUT = 'outgoing'
# Event inbound processing path
PATH_IN = 'incoming'
def __init__(self, **kwargs):
self.args = kwargs
self.path = kwargs.get('path')
self.message = kwargs.get('message')
self.headers = kwargs.get('headers') or {} # ensure dict
class Interceptor(object):
"""
Basic interceptor model.
"""
def configure(self, config):
pass
def outgoing(self, invocation):
pass
def incoming(self, invocation):
pass
def process_interceptors(interceptors, invocation):
for interceptor in interceptors:
func = getattr(interceptor, invocation.path)
invocation = func(invocation)
return invocation
def walk(o, cb):
"""
Utility method to do recursive walking of a possible iterable (inc dicts) and do inline transformations.
You supply a callback which receives an object. That object may be an iterable (which will then be walked
after you return it, as long as it remains an iterable), or it may be another object inside of that.
If a dict is discovered, your callback will receive the dict as a whole and the contents of values only. Keys are left untouched.
@TODO move to a general utils area?
"""
if hasattr(o, '__iter__'):
newo = cb(o)
# still an iterable? iterate it.
if hasattr(newo, '__iter__'):
if isinstance(newo, dict):
return dict(((k, walk(v, cb)) for k,v in newo.iteritems()))
else:
return [walk(x, cb) for x in newo]
else:
return newo
else:
return cb(o)
| bsd-2-clause | Python |
8795815dd9e1ad1faf36359dc24659db9eebad1f | Update test_mixins.py,test_performance.py, test_dns.py to reflect changes in mixins.py. Added dns functional test, and trust classes | F5Networks/f5-common-python,F5Networks/f5-common-python,wojtek0806/f5-common-python | f5/bigip/sys/test/test_dns.py | f5/bigip/sys/test/test_dns.py | # Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
import pytest
from f5.bigip.mixins import UnsupportedMethod
from f5.bigip.sys.dns import Dns
@pytest.fixture
def FakeDns():
fake_sys = mock.MagicMock()
return Dns(fake_sys)
def test_create_raises(FakeDns):
with pytest.raises(UnsupportedMethod) as EIO:
FakeDns.create()
assert EIO.value.message == "Dns does not support the create method"
def test_delete_raises(FakeDns):
with pytest.raises(UnsupportedMethod) as EIO:
FakeDns.delete()
assert EIO.value.message == "Dns does not support the delete method"
| # Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
import pytest
from f5.bigip.mixins import UnnamedResourceMixin
from f5.bigip.sys.dns import Dns
@pytest.fixture
def FakeDns():
fake_sys = mock.MagicMock()
return Dns(fake_sys)
def test_create_raises(FakeDns):
with pytest.raises(UnnamedResourceMixin.UnsupportedMethod) as EIO:
FakeDns.create()
assert EIO.value.message == "Dns does not support the create method"
def test_delete_raises(FakeDns):
with pytest.raises(UnnamedResourceMixin.UnsupportedMethod) as EIO:
FakeDns.delete()
assert EIO.value.message == "Dns does not support the delete method"
| apache-2.0 | Python |
14b1648b96064363a833c496da38e62ffc9dbbcb | Revert splitString to former value | mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData | external_tools/src/main/python/images/common.py | external_tools/src/main/python/images/common.py | #!/usr/bin/python
splitString='images/clean/impc/'
| #!/usr/bin/python
#splitString='images/clean/impc/'
splitString='images/holding_area/impc/'
| apache-2.0 | Python |
d6c9e40341b700360164b4f8673dc991e5fc61b5 | Update extension | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | docs/doc_extensions.py | docs/doc_extensions.py | """
Read the Docs documentation extensions for Sphinx
Adds the following roles:
djangosetting
Output an inline literal of the corresponding setting value. Useful for
keeping documentation up to date without editing on settings changes.
buildpyversions
Output a comma separated list of the supported python versions for a
Read the Docs build image.
"""
from __future__ import division, print_function, unicode_literals
from django.conf import settings
from docutils import nodes, utils
def django_setting_role(typ, rawtext, text, lineno, inliner, options=None,
content=None):
"""Always up to date Django settings from the application"""
dj_setting = getattr(settings, utils.unescape(text), 'None')
node = nodes.literal(dj_setting, dj_setting)
return [node], []
def python_supported_versions_role(typ, rawtext, text, lineno, inliner,
options=None, content=None):
"""Up to date supported python versions for each build image."""
image = '{}:{}'.format(settings.DOCKER_DEFAULT_IMAGE, text)
image_settings = settings.DOCKER_IMAGE_SETTINGS[image]
python_versions = image_settings['python']['supported_versions']
node_list = []
separator = ', '
for i, version in enumerate(python_versions):
node_list.append(nodes.literal(version, version))
if i < len(python_versions) - 1:
node_list.append(nodes.Text(separator))
return (node_list, [])
def setup(_):
from docutils.parsers.rst import roles
roles.register_local_role(
'djangosetting',
django_setting_role
)
roles.register_local_role(
'buildpyversions',
python_supported_versions_role
)
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
}
| """
Read the Docs documentation extensions for Sphinx
Adds the following roles:
djangosetting
Output an inline literal of the corresponding setting value. Useful for
keeping documentation up to date without editing on settings changes.
buildpyversions
Output a comma separated list of the supported python versions for a
Read the Docs build image.
"""
from __future__ import division, print_function, unicode_literals
from django.conf import settings
from docutils import nodes, utils
from readthedocs.config.config import (
DOCKER_DEFAULT_IMAGE, DOCKER_IMAGE_SETTINGS)
def django_setting_role(typ, rawtext, text, lineno, inliner, options=None,
content=None):
"""Always up to date Django settings from the application"""
dj_setting = getattr(settings, utils.unescape(text), 'None')
node = nodes.literal(dj_setting, dj_setting)
return [node], []
def python_supported_versions_role(typ, rawtext, text, lineno, inliner,
options=None, content=None):
"""Up to date supported python versions for each build image."""
image = '{}:{}'.format(DOCKER_DEFAULT_IMAGE, text)
image_settings = DOCKER_IMAGE_SETTINGS[image]
python_versions = image_settings['python']['supported_versions']
node_list = []
separator = ', '
for i, version in enumerate(python_versions):
node_list.append(nodes.literal(version, version))
if i < len(python_versions) - 1:
node_list.append(nodes.Text(separator))
return (node_list, [])
def setup(_):
from docutils.parsers.rst import roles
roles.register_local_role(
'djangosetting',
django_setting_role
)
roles.register_local_role(
'buildpyversions',
python_supported_versions_role
)
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
}
| mit | Python |
c81f5f85c30d8ca57b42d82829c36915e7aca605 | Allow anyone to be winners | steakholders-tm/bingo-server | src/bingo_server/api/views.py | src/bingo_server/api/views.py | from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin
from rest_framework.viewsets import GenericViewSet, ModelViewSet
from rest_framework.permissions import AllowAny
from ..models import Game, GameType, Place, PrimaryCategory, SecondaryCategory, Tile, Winner
from .serializers import GameSerializer, GameTypeSerializer, PlaceSerializer, PrimaryCategorySerializer, \
SecondaryCategorySerializer, TileSerializer, WinnerSerializer
class GameViewSet(GenericViewSet, CreateModelMixin, ListModelMixin, RetrieveModelMixin):
serializer_class = GameSerializer
queryset = Game.objects.all()
permission_classes = (AllowAny,)
class PrimaryCategoryViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = PrimaryCategorySerializer
queryset = PrimaryCategory.objects.all()
permission_classes = (AllowAny,)
class SecondaryCategoryViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = SecondaryCategorySerializer
queryset = SecondaryCategory.objects.all()
permission_classes = (AllowAny,)
class PlaceViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = PlaceSerializer
queryset = Place.objects.all()
permission_classes = (AllowAny,)
class GameTypeViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = GameTypeSerializer
queryset = GameType.objects.all()
permission_classes = (AllowAny,)
class TileViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = TileSerializer
queryset = Tile.objects.all()
permission_classes = (AllowAny,)
filter_fields = ('games', 'primary_categories', 'secondary_categories')
class WinnerViewSet(ModelViewSet):
serializer_class = WinnerSerializer
queryset = Winner.objects.all()
permission_classes = (AllowAny,)
filter_fields = ('game',)
| from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin
from rest_framework.viewsets import GenericViewSet
from rest_framework.permissions import AllowAny
from ..models import Game, GameType, Place, PrimaryCategory, SecondaryCategory, Tile, Winner
from .serializers import GameSerializer, GameTypeSerializer, PlaceSerializer, PrimaryCategorySerializer, \
SecondaryCategorySerializer, TileSerializer, WinnerSerializer
class GameViewSet(GenericViewSet, CreateModelMixin, ListModelMixin, RetrieveModelMixin):
serializer_class = GameSerializer
queryset = Game.objects.all()
permission_classes = (AllowAny,)
class PrimaryCategoryViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = PrimaryCategorySerializer
queryset = PrimaryCategory.objects.all()
permission_classes = (AllowAny,)
class SecondaryCategoryViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = SecondaryCategorySerializer
queryset = SecondaryCategory.objects.all()
permission_classes = (AllowAny,)
class PlaceViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = PlaceSerializer
queryset = Place.objects.all()
permission_classes = (AllowAny,)
class GameTypeViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = GameTypeSerializer
queryset = GameType.objects.all()
permission_classes = (AllowAny,)
class TileViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = TileSerializer
queryset = Tile.objects.all()
permission_classes = (AllowAny,)
filter_fields = ('games', 'primary_categories', 'secondary_categories')
class WinnerViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin):
serializer_class = WinnerSerializer
queryset = Winner.objects.all()
permission_classes = (AllowAny,)
filter_fields = ('game',)
| mit | Python |
d492f17edc1e7d464242942f2c786337d8687304 | Update binary-tree-maximum-path-sum.py | jaredkoontz/leetcode,yiwen-luo/LeetCode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,githubutilities/LeetCode,jaredkoontz/leetcode,kamyu104/LeetCode,jaredkoontz/leetcode,jaredkoontz/leetcode,githubutilities/LeetCode,kamyu104/LeetCode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,githubutilities/LeetCode,githubutilities/LeetCode,githubutilities/LeetCode | Python/binary-tree-maximum-path-sum.py | Python/binary-tree-maximum-path-sum.py | # Time: O(n)
# Space: O(h), h is height of binary tree
#
# Given a binary tree, find the maximum path sum.
#
# The path may start and end at any node in the tree.
#
# For example:
# Given the below binary tree,
#
# 1
# / \
# 2 3
# Return 6.
#
# Definition for a binary tree node
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
maxSum = float("-inf")
# @param root, a tree node
# @return an integer
def maxPathSum(self, root):
self.maxPathSumRecu(root)
return self.maxSum
def maxPathSumRecu(self, root):
if root is None:
return 0
left = max(0, self.maxPathSumRecu(root.left))
right = max(0, self.maxPathSumRecu(root.right))
self.maxSum = max(self.maxSum, root.val + left + right)
return root.val + max(left, right)
if __name__ == "__main__":
root = TreeNode(1)
root.left = TreeNode(2)
root.right = TreeNode(3)
result = Solution().maxPathSum(root)
print result
| # Time: O(n)
# Space: O(logn)
#
# Given a binary tree, find the maximum path sum.
#
# The path may start and end at any node in the tree.
#
# For example:
# Given the below binary tree,
#
# 1
# / \
# 2 3
# Return 6.
#
# Definition for a binary tree node
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
maxSum = float("-inf")
# @param root, a tree node
# @return an integer
def maxPathSum(self, root):
self.maxPathSumRecu(root)
return self.maxSum
def maxPathSumRecu(self, root):
if root is None:
return 0
left = max(0, self.maxPathSumRecu(root.left))
right = max(0, self.maxPathSumRecu(root.right))
self.maxSum = max(self.maxSum, root.val + left + right)
return root.val + max(left, right)
if __name__ == "__main__":
root = TreeNode(1)
root.left = TreeNode(2)
root.right = TreeNode(3)
result = Solution().maxPathSum(root)
print result
| mit | Python |
8ca26efbb9224c98c0d4f4e5300491f395c2f51b | use jcommon.*_langdb instead of self | Mstrodl/jose,lnmds/jose,Mstrodl/jose | ext/joselang.py | ext/joselang.py | #!/usr/bin/env python3
import discord
import asyncio
import sys
sys.path.append("..")
import jauxiliar as jaux
import joseerror as je
import josecommon as jcommon
class JoseLanguage(jaux.Auxiliar):
def __init__(self, cl):
jaux.Auxiliar.__init__(self, cl)
self.LANGLIST = [
'pt', 'en'
]
async def savedb(self):
await jcommon.save_langdb()
async def ext_load(self):
status = await jcommon.load_langdb()
return status
async def ext_unload(self):
status = await jcommon.save_langdb()
return status
async def c_reloadlangdb(self, message, args, cxt):
await self.savedb()
await jcommon.load_langdb()
await cxt.say(":speech_left: langdb reloaded")
async def c_language(self, message, args, cxt):
'''`!language lang` - sets language for a server'''
if message.server is None:
await cxt.say("Language support is not available for DMs")
if len(args) < 2:
await cxt.say(self.c_language.__doc__)
return
language = args[1]
if language not in self.LANGLIST:
await cxt.say("%s: Language not found" % language)
return
await jcommon.langdb_set(message.server.id, language)
await cxt.say(":speech_left: Set language to %s" % language)
await self.savedb()
async def c_listlang(self, message, args, cxt):
'''`!listlang` - lists all available languages'''
await cxt.say(self.codeblock("", " ".join(self.LANGLIST)))
| #!/usr/bin/env python3
import discord
import asyncio
import sys
sys.path.append("..")
import jauxiliar as jaux
import joseerror as je
import josecommon as jcommon
class JoseLanguage(jaux.Auxiliar):
def __init__(self, cl):
jaux.Auxiliar.__init__(self, cl)
self.LANGLIST = [
'pt', 'en'
]
async def savedb(self):
await jcommon.save_langdb()
async def ext_load(self):
status = await self.load_langdb()
return status
async def ext_unload(self):
status = await self.save_langdb()
return status
async def c_reloadlangdb(self, message, args, cxt):
await self.savedb()
await jcommon.load_langdb()
await cxt.say(":speech_left: langdb reloaded")
async def c_language(self, message, args, cxt):
'''`!language lang` - sets language for a server'''
if message.server is None:
await cxt.say("Language support is not available for DMs")
if len(args) < 2:
await cxt.say(self.c_language.__doc__)
return
language = args[1]
if language not in self.LANGLIST:
await cxt.say("%s: Language not found" % language)
return
await jcommon.langdb_set(message.server.id, language)
await cxt.say(":speech_left: Set language to %s" % language)
await self.savedb()
async def c_listlang(self, message, args, cxt):
'''`!listlang` - lists all available languages'''
await cxt.say(self.codeblock("", " ".join(self.LANGLIST)))
| mit | Python |
011579ec11b97a1d1752bf59c60694019838c309 | fix existance call to xml_config | davisd50/sparc.db | sparc/db/zodb/database.py | sparc/db/zodb/database.py | from ZODB import config
from zope.component import createObject
from zope.component import getUtility
from zope.component.factory import Factory
from zope.interface import alsoProvides
from zope.interface import implements
from interfaces import IZODBDatabase
from zope.component._api import createObject
from sparc.configuration.xml import IAppElementTreeConfig
class zodbDatabaseFactoryHelper(object):
implements(IZODBDatabase)
def __new__(self, *args, **kwargs):
db = None
if 'string' in kwargs:
db = config.databaseFromString(kwargs['string'])
if 'file' in kwargs:
db = config.databaseFromFile(kwargs['file'])
if 'url' in kwargs:
db = config.databaseFromURL(kwargs['url'])
if not db:
raise ValueError('unable to obtain ZODB object from arguments')
alsoProvides(db, IZODBDatabase)
return db
zodbDatabaseFactory = Factory(zodbDatabaseFactoryHelper)
class zodbDatabaseFromConfigHelper(object):
config_map = {} # {url: IZODBDatabase}
def __new__(self, xml_config = None):
url = None
if not xml_config or not len(xml_config):
xml_config = getUtility(IAppElementTreeConfig)
for sparc in xml_config.findall('sparc'):
for db in sparc.findall('db'):
for zodb in db.findall('zodb'):
url = zodb.attrib['url']
if not url:
raise LookupError('unable to find configuration for sparc::db::zodb::url')
if url not in zodbDatabaseFromConfigHelper.config_map:
db = createObject(u'sparc.db.zodb.database', url=url)
zodbDatabaseFromConfigHelper.config_map[url] = db
return zodbDatabaseFromConfigHelper.config_map[url]
zodbFromConfigFactory = Factory(zodbDatabaseFromConfigHelper) | from ZODB import config
from zope.component import createObject
from zope.component import getUtility
from zope.component.factory import Factory
from zope.interface import alsoProvides
from zope.interface import implements
from interfaces import IZODBDatabase
from zope.component._api import createObject
from sparc.configuration.xml import IAppElementTreeConfig
class zodbDatabaseFactoryHelper(object):
implements(IZODBDatabase)
def __new__(self, *args, **kwargs):
db = None
if 'string' in kwargs:
db = config.databaseFromString(kwargs['string'])
if 'file' in kwargs:
db = config.databaseFromFile(kwargs['file'])
if 'url' in kwargs:
db = config.databaseFromURL(kwargs['url'])
if not db:
raise ValueError('unable to obtain ZODB object from arguments')
alsoProvides(db, IZODBDatabase)
return db
zodbDatabaseFactory = Factory(zodbDatabaseFactoryHelper)
class zodbDatabaseFromConfigHelper(object):
config_map = {} # {url: IZODBDatabase}
def __new__(self):
url = None
xml_config = getUtility(IAppElementTreeConfig)
for sparc in xml_config.findall('sparc'):
for db in sparc.findall('db'):
for zodb in db.findall('zodb'):
url = zodb.attrib['url']
if not url:
raise LookupError('unable to find configuration for sparc::db::zodb::url')
if url not in zodbDatabaseFromConfigHelper.config_map:
db = createObject(u'sparc.db.zodb.database', url=url)
zodbDatabaseFromConfigHelper.config_map[url] = db
return zodbDatabaseFromConfigHelper.config_map[url]
zodbFromConfigFactory = Factory(zodbDatabaseFromConfigHelper) | mit | Python |
30c627c97b14eaecbf4f61a2d3ed01dd5ea1282d | Set version 2.5.0 | atztogo/phonopy,atztogo/phonopy,atztogo/phonopy,atztogo/phonopy | phonopy/version.py | phonopy/version.py | # Copyright (C) 2013 Atsushi Togo
# All rights reserved.
#
# This file is part of phonopy.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the phonopy project nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__version__ = "2.5.0"
| # Copyright (C) 2013 Atsushi Togo
# All rights reserved.
#
# This file is part of phonopy.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the phonopy project nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__version__ = "2.4.3"
| bsd-3-clause | Python |
29f10624bd398442ab3530215e0a73b362e0559d | Change url+checksums for libpng to official sourceforge archives (#23767) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/libpng/package.py | var/spack/repos/builtin/packages/libpng/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libpng(AutotoolsPackage):
"""libpng is the official PNG reference library."""
homepage = "http://www.libpng.org/pub/png/libpng.html"
url = "https://prdownloads.sourceforge.net/libpng/libpng-1.6.37.tar.xz"
git = "https://github.com/glennrp/libpng.git"
version('1.6.37', sha256='505e70834d35383537b6491e7ae8641f1a4bed1876dbfe361201fc80868d88ca')
# From http://www.libpng.org/pub/png/libpng.html (2019-04-15)
# libpng versions 1.6.36 and earlier have a use-after-free bug in the
# simplified libpng API png_image_free(). It has been assigned ID
# CVE-2019-7317. The vulnerability is fixed in version 1.6.37,
# released on 15 April 2019.
# Required for qt@3
version('1.2.57', sha256='0f4620e11fa283fedafb474427c8e96bf149511a1804bdc47350963ae5cf54d8')
depends_on('zlib@1.0.4:') # 1.2.5 or later recommended
def configure_args(self):
args = [
# not honored, see
# https://sourceforge.net/p/libpng/bugs/210/#33f1
# '--with-zlib=' + self.spec['zlib'].prefix,
'CPPFLAGS={0}'.format(self.spec['zlib'].headers.include_flags),
'LDFLAGS={0}'.format(self.spec['zlib'].libs.search_flags)
]
return args
def check(self):
# Libpng has both 'check' and 'test' targets that are aliases.
# Only need to run the tests once.
make('check')
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libpng(AutotoolsPackage):
"""libpng is the official PNG reference library."""
homepage = "http://www.libpng.org/pub/png/libpng.html"
url = "https://github.com/glennrp/libpng/archive/v1.6.37.tar.gz"
git = "https://github.com/glennrp/libpng.git"
version('1.6.37', sha256='ca74a0dace179a8422187671aee97dd3892b53e168627145271cad5b5ac81307')
# From http://www.libpng.org/pub/png/libpng.html (2019-04-15)
# libpng versions 1.6.36 and earlier have a use-after-free bug in the
# simplified libpng API png_image_free(). It has been assigned ID
# CVE-2019-7317. The vulnerability is fixed in version 1.6.37,
# released on 15 April 2019.
# Required for qt@3
version('1.2.57', sha256='7f415186d38ca71c23058386d7cf5135c8beda821ee1beecdc2a7a26c0356615')
depends_on('zlib@1.0.4:') # 1.2.5 or later recommended
def configure_args(self):
args = [
# not honored, see
# https://sourceforge.net/p/libpng/bugs/210/#33f1
# '--with-zlib=' + self.spec['zlib'].prefix,
'CPPFLAGS={0}'.format(self.spec['zlib'].headers.include_flags),
'LDFLAGS={0}'.format(self.spec['zlib'].libs.search_flags)
]
return args
def check(self):
# Libpng has both 'check' and 'test' targets that are aliases.
# Only need to run the tests once.
make('check')
| lgpl-2.1 | Python |
218e76eab9ffdb3ce1cd0d58e6e8a02bab592251 | Add moving wall to materials | mkondratyev85/pgm | gui/materials.py | gui/materials.py | import numpy as np
materials = {
"default": {"mu": 1, "rho": 1, "eta": 1, "C": 1, "sinphi": 1},
"magma": {"mu": 8 * 10**10, "rho": 2800, "eta": 10**16, "C": 10**7, "sinphi": 45},
"light magma": {"mu": 8 * 10**10, "rho": 2600, "eta": 10**13, "C": 10**7, "sinphi": 45},
"heavy magma": {"mu": 8 * 10**10, "rho": 3200, "eta": 10**16, "C": 10**7, "sinphi": 45},
"sand": {"mu": 10**6, "rho": 1560, "eta": 10**9, "C": 10, "sinphi": np.sin(np.radians(36))},
"viso-elastic slab": {"mu": 10**10, "rho": 4000, "eta": 10**27, "C": 10, "sinphi": np.sin(np.radians(36))},
"viso-elastic medium": {"mu": 10**20, "rho": 1, "eta": 10**24, "C": 10, "sinphi": np.sin(np.radians(36))},
"sticky air": {"mu": 10**6, "rho": 1, "eta": 10**2, "C": 10, "sinphi": 0},
"mobile wall": {"mu": 10**16, "rho": 2520, "eta": 10**12, "C": 10, "sinphi": 0},
}
| import numpy as np
materials = {
"default": {"mu": 1, "rho": 1, "eta": 1, "C": 1, "sinphi": 1},
"magma": {"mu": 8 * 10**10, "rho": 2800, "eta": 10**16, "C": 10**7, "sinphi": 45},
"light magma": {"mu": 8 * 10**10, "rho": 2600, "eta": 10**13, "C": 10**7, "sinphi": 45},
"heavy magma": {"mu": 8 * 10**10, "rho": 3200, "eta": 10**16, "C": 10**7, "sinphi": 45},
"sand": {"mu": 10**6, "rho": 1560, "eta": 10**9, "C": 10, "sinphi": np.sin(np.radians(36))},
"viso-elastic slab": {"mu": 10**10, "rho": 4000, "eta": 10**27, "C": 10, "sinphi": np.sin(np.radians(36))},
"viso-elastic medium": {"mu": 10**20, "rho": 1, "eta": 10**24, "C": 10, "sinphi": np.sin(np.radians(36))},
"sticky air": {"mu": 10**6, "rho": 1, "eta": 10**2, "C": 10, "sinphi": 0},
}
| mit | Python |
fb6a2b5cabe284ecac5088c94fb2dba36f4a7e2e | edit prefix in Makefile for macOS (#10606) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/xxhash/package.py | var/spack/repos/builtin/packages/xxhash/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Xxhash(MakefilePackage):
"""xxHash is an Extremely fast Hash algorithm, running at RAM speed
limits. It successfully completes the SMHasher test suite which
evaluates collision, dispersion and randomness qualities of hash
functions. Code is highly portable, and hashes are identical on all
platforms (little / big endian).
"""
homepage = "https://github.com/Cyan4973/xxHash"
url = "https://github.com/Cyan4973/xxHash/archive/v0.6.5.tar.gz"
version('0.6.5', '6af3a964f3c2accebce66e54b44b6446')
version('0.6.4', '3c071c95e31bd601cca149cc354e6f19')
version('0.6.3', 'f2ec1497317c0eb89addd7f333c83228')
version('0.6.2', 'b2d12d99094b824e0a5f3ab63abc6c58')
version('0.6.1', 'f4ced3767aad8384b1ecb73bd5f992ca')
version('0.6.0', 'e0fd163b07ab0038f389a180dc263cf2')
version('0.5.1', '9417fd8a4d88204b680e21a60f0ccada')
version('0.5.0', '42e9a31a2cfc2f626fde17e84a0b6bb7')
def edit(self, spec, prefix):
makefile = FileFilter("Makefile")
makefile.filter('/usr/local', prefix)
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Xxhash(MakefilePackage):
"""xxHash is an Extremely fast Hash algorithm, running at RAM speed
limits. It successfully completes the SMHasher test suite which
evaluates collision, dispersion and randomness qualities of hash
functions. Code is highly portable, and hashes are identical on all
platforms (little / big endian).
"""
homepage = "https://github.com/Cyan4973/xxHash"
url = "https://github.com/Cyan4973/xxHash/archive/v0.6.5.tar.gz"
version('0.6.5', '6af3a964f3c2accebce66e54b44b6446')
version('0.6.4', '3c071c95e31bd601cca149cc354e6f19')
version('0.6.3', 'f2ec1497317c0eb89addd7f333c83228')
version('0.6.2', 'b2d12d99094b824e0a5f3ab63abc6c58')
version('0.6.1', 'f4ced3767aad8384b1ecb73bd5f992ca')
version('0.6.0', 'e0fd163b07ab0038f389a180dc263cf2')
version('0.5.1', '9417fd8a4d88204b680e21a60f0ccada')
version('0.5.0', '42e9a31a2cfc2f626fde17e84a0b6bb7')
def install(self, spec, prefix):
make('prefix={0}'.format(prefix), 'install')
| lgpl-2.1 | Python |
abc22134a59318e2d93e67af09e4282a407fea54 | Create code skeleton | samuel-phan/mssh-copy-id,samuel-phan/mssh-copy-id | msshcopyid/__init__.py | msshcopyid/__init__.py | from __future__ import print_function
import argparse
import getpass
import os
import sys
DEFAULT_SSH_RSA = '~/.ssh/id_rsa'
DEFAULT_SSH_DSA = '~/.ssh/id_dsa'
def main():
mc = Main()
mc.main()
class Main(object):
def __init__(self):
self.args = None
def main(self):
# Parse input arguments
self.args = self.parse_args(sys.argv)
if not self.args.identity:
self.args.identity = os.path.expanduser(DEFAULT_SSH_RSA)
if not os.path.exists(self.args.identity):
self.args.identity = os.path.expanduser(DEFAULT_SSH_DSA)
if not os.path.exists(self.args.identity):
print('Error: Cannot find any SSH keys in {0} and {1}.'.format(DEFAULT_SSH_RSA, DEFAULT_SSH_DSA),
file=sys.stderr)
sys.exit(1)
if not self.args.password:
self.args.password = getpass.getpass('Enter the common password: ')
# Copy the SSH keys to the hosts
for host in self.args.hosts:
self.copy_ssh_keys(host)
def parse_args(self, argv):
parser = argparse.ArgumentParser(description='Massively copy SSH keys.')
parser.add_argument('hosts', metavar='host', nargs='+',
help='the remote hosts to copy the keys to. Syntax: [user@]hostname')
parser.add_argument('-i', '--identity', help='the SSH identity file. Default: {0} or {1}'
.format(DEFAULT_SSH_RSA, DEFAULT_SSH_DSA))
parser.add_argument('-P', '--password',
help='the password to log into the remote hosts. It is NOT SECURED to set the password '
'that way, since it stays in the bash history. Password can also be sent on the '
'STDIN.')
return parser.parse_args(argv[1:])
def copy_ssh_keys(self, host):
# TODO: implement it
print('I copy the SSH keys [{0}] to the host [{1}]...'.format(self.args.identity, host))
| # encoding: utf-8
import os
import re
import sys
import paramiko
#def main():
# print('mssh-copy-id entry point')
# PS : y a pas des cons qui l'ont deja fait ?
# https://pypi.python.org/pypi/ssh-deploy-key/0.1.1
# Constants
hostFile = '/etc/hosts'
# todo: test file exists/readable ?
keyfile = os.getenv('HOME')+'/.ssh/id_rsa.pub'
# todo: notre clé publique est toujours la? ... il y a d'autre types de clés possible, faire mieux....
# todo: write def usage()
# todo: parse args
# test values
hostExp = 'localh*'
username = 'toto'
port = 22
# Retrieve matching hosts/ip from hosts file
info = 'matching hosts : '
hosts = []
for line in open(hostFile).readlines():
lineWithoutEOF = line.splitlines()[0]
lineWithoutComments = lineWithoutEOF.split('#')[0]
for host in lineWithoutComments.split():
try:
if re.match(hostExp, host):
hosts.append(host)
info += host
except re.error as e:
print('ERROR: Host shall be a regular expression.')
sys.exit(1)
print(info)
# retrieve user key
if not os.path.isfile(keyfile):
print('ERROR: No public key file '+keyfile)
sys.exit(1)
# read all lines, keep first, remove EOF,
userkey = open(keyfile).readlines()[0].splitlines()[0]
print('key:'+userkey)
# ask for password
password = raw_input('password for remote user '+username+':')
# manual ssh-copy-id requests - search/find a best/clean way to do this... !!??
for host in hosts:
client = paramiko.Transport((host, port))
client.connect(username=username, password=password)
command = 'mkdir -p $HOME/.ssh; echo "' + userkey + '" >> $HOME/.ssh/authorized_keys'
session = client.open_channel(kind='session')
session.exec_command(command)
while True:
if session.exit_status_ready():
break
print 'exit status: ', session.recv_exit_status()
# todo: compute return code...
| mit | Python |
877b7de300e7290f555578fab9032456a135a51f | Tag new release: 3.1.17 | Floobits/floobits-sublime,Floobits/floobits-sublime | floo/version.py | floo/version.py | PLUGIN_VERSION = '3.1.17'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.11'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| PLUGIN_VERSION = '3.1.16'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.11'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| apache-2.0 | Python |
5f9cdd3df91a6547d5b776c7f4ae8f23c53dfc48 | remove dead line | data-refinery/data_refinery,data-refinery/data_refinery,data-refinery/data_refinery | foreman/data_refinery_foreman/surveyor/utils.py | foreman/data_refinery_foreman/surveyor/utils.py | import collections
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
def requests_retry_session(
retries=3,
backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None,
):
"""
Exponential back off for requests.
via https://www.peterbe.com/plog/best-practice-with-retries-with-requests
"""
session = session or requests.Session()
retry = Retry(
total=retries,
read=retries,
connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist,
)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
return session
def flatten(d, parent_key='', sep='_'):
"""
Flattens a dictionary using a seperator.
via https://stackoverflow.com/a/6027615
"""
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, collections.MutableMapping):
items.extend(flatten(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)
def get_title_and_authors_for_pubmed_id(pmid):
""" Given a PMID, return that PMID's (title, [authors]). """
try:
j_url = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pubmed&id=" + str(pmid) + "&retmode=json&tool=refinebio&email=hello@refine.bio"
resp = requests.get(j_url, timeout=20)
title = resp.json()['result'][str(pmid)]['title']
author_names = []
for author in resp.json()['result'][str(pmid)]['authors']:
author_names.append(author['name'])
return (title, author_names)
except:
# This is fine for a timeout
return ("", [])
| import collections
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
def requests_retry_session(
retries=3,
backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None,
):
"""
Exponential back off for requests.
via https://www.peterbe.com/plog/best-practice-with-retries-with-requests
"""
session = session or requests.Session()
retry = Retry(
total=retries,
read=retries,
connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist,
)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
return session
def flatten(d, parent_key='', sep='_'):
"""
Flattens a dictionary using a seperator.
via https://stackoverflow.com/a/6027615
"""
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, collections.MutableMapping):
items.extend(flatten(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)
def get_title_and_authors_for_pubmed_id(pmid):
""" Given a PMID, return that PMID's (title, [authors]). """
try:
# resp = requests.get("http://www.ncbi.nlm.nih.gov/pubmed/" + str(pmid), timeout=20)
j_url = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pubmed&id=" + str(pmid) + "&retmode=json&tool=refinebio&email=hello@refine.bio"
resp = requests.get(j_url, timeout=20)
title = resp.json()['result'][str(pmid)]['title']
author_names = []
for author in resp.json()['result'][str(pmid)]['authors']:
author_names.append(author['name'])
return (title, author_names)
except:
# This is fine for a timeout
return ("", [])
| bsd-3-clause | Python |
0573a58f859133959b769a0081eca12121ca409f | Sort by priority in the admin changelist and make it editable. | mlavin/django-ad-code,mlavin/django-ad-code | adcode/admin.py | adcode/admin.py | "Admin customization for adcode models."
from django.contrib import admin
from adcode.models import Section, Size, Placement
class PlacementInline(admin.StackedInline):
model = Placement.sections.through
class SectionAdmin(admin.ModelAdmin):
list_display = ('name', 'pattern', 'priority', )
list_editable = ('priority', )
inlines = (PlacementInline, )
prepopulated_fields = {'slug': ('name', )}
ordering = ('-priority', 'name', )
class SizeAdmin(admin.ModelAdmin):
list_display = ('name', 'width', 'height', )
list_filter = ('width', 'height', )
class PlacementAdmin(admin.ModelAdmin):
list_display = ('name', 'size', )
list_filter = ('size', 'sections', )
filter_horizontal = ('sections', )
prepopulated_fields = {'slug': ('name', )}
admin.site.register(Section, SectionAdmin)
admin.site.register(Size, SizeAdmin)
admin.site.register(Placement, PlacementAdmin)
| "Admin customization for adcode models."
from django.contrib import admin
from adcode.models import Section, Size, Placement
class PlacementInline(admin.StackedInline):
model = Placement.sections.through
class SectionAdmin(admin.ModelAdmin):
list_display = ('name', 'pattern', )
inlines = (PlacementInline, )
prepopulated_fields = {'slug': ('name', )}
class SizeAdmin(admin.ModelAdmin):
list_display = ('name', 'width', 'height', )
list_filter = ('width', 'height', )
class PlacementAdmin(admin.ModelAdmin):
list_display = ('name', 'size', )
list_filter = ('size', 'sections', )
filter_horizontal = ('sections', )
prepopulated_fields = {'slug': ('name', )}
admin.site.register(Section, SectionAdmin)
admin.site.register(Size, SizeAdmin)
admin.site.register(Placement, PlacementAdmin)
| bsd-2-clause | Python |
d65a8e09068d66c8e7debeb0469aedc7bf03af13 | Bump django.VERSION for RC 1. | kholidfu/django,ericfc/django,monetate/django,Endika/django,nemesisdesign/django,savoirfairelinux/django,Endika/django,dfunckt/django,AndrewGrossman/django,yigitguler/django,TimBuckley/effective_django,willhardy/django,irwinlove/django,vitan/django,runekaagaard/django-contrib-locking,erikr/django,hackerbot/DjangoDev,ericfc/django,vitaly4uk/django,ajoaoff/django,googleinterns/django,1013553207/django,ghickman/django,aspidites/django,fenginx/django,SoftwareMaven/django,darkryder/django,rtindru/django,lisael/pg-django,alrifqi/django,weiawe/django,syaiful6/django,craynot/django,marckuz/django,gannetson/django,1013553207/django,fafaman/django,takis/django,imtapps/django-imt-fork,varunnaganathan/django,mathspace/django,aisipos/django,mdj2/django,ryanahall/django,nju520/django,yamila-moreno/django,rrrene/django,MoritzS/django,nielsvanoch/django,riklaunim/django-custom-multisite,leeon/annotated-django,nealtodd/django,adelton/django,h4r5h1t/django-hauthy,MikeAmy/django,PetrDlouhy/django,rajsadho/django,hynekcer/django,elky/django,stevenewey/django,vmarkovtsev/django,liavkoren/djangoDev,Anonymous-X6/django,ojengwa/django-1,hackerbot/DjangoDev,intgr/django,deployed/django,MikeAmy/django,akaihola/django,elkingtonmcb/django,MarkusH/django,aleida/django,RevelSystems/django,rsvip/Django,RaoUmer/django,Vixionar/django,tuhangdi/django,avanov/django,apocquet/django,koniiiik/django,b-me/django,schinckel/django,blaze33/django,sopier/django,bitcity/django,fenginx/django,Yong-Lee/django,dfunckt/django,marcelocure/django,ccn-2m/django,ar45/django,joakim-hove/django,petecummings/django,daniponi/django,jaywreddy/django,stewartpark/django,benspaulding/django,tayfun/django,duqiao/django,sopier/django,blindroot/django,jvkops/django,dudepare/django,beni55/django,quamilek/django,intgr/django,vincepandolfo/django,jn7163/django,HonzaKral/django,ccn-2m/django,koniiiik/django,atul-bhouraskar/django,aroche/django,neiudemo1/django,charettes/django,stevenewey/django,kswiat/django,Proggie02/TestRepo,hottwaj/django,edmorley/django,charettes/django,loic/django,AlexHill/django,techdragon/django,yakky/django,dracos/django,ckirby/django,andreif/django,SujaySKumar/django,Y3K/django,frankvdp/django,ojake/django,liuliwork/django,drjeep/django,bikong2/django,andela-ooladayo/django,PetrDlouhy/django,jdelight/django,tanmaythakur/django,jdelight/django,aidanlister/django,simone/django-gb,BrotherPhil/django,theo-l/django,KokareIITP/django,wsmith323/django,tbeadle/django,crazy-canux/django,ecederstrand/django,sbellem/django,kutenai/django,xwolf12/django,syphar/django,erikr/django,davidharrigan/django,guettli/django,Balachan27/django,wkschwartz/django,jyotsna1820/django,guettli/django,pauloxnet/django,roselleebarle04/django,alilotfi/django,mlavin/django,rizumu/django,yograterol/django,BrotherPhil/django,divio/django,iambibhas/django,aerophile/django,carljm/django,timgraham/django,djbaldey/django,filias/django,rockneurotiko/django,stewartpark/django,redhat-openstack/django,xwolf12/django,hobarrera/django,vincepandolfo/django,sdcooke/django,AltSchool/django,yask123/django,Adnn/django,takis/django,spisneha25/django,jejimenez/django,YYWen0o0/python-frame-django,mojeto/django,ebar0n/django,ptoraskar/django,dpetzold/django,petecummings/django,feroda/django,BMJHayward/django,rogerhu/django,MounirMesselmeni/django,jrrembert/django,leereilly/django-1,elky/django,nielsvanoch/django,daniponi/django,nhippenmeyer/django,reinout/django,salamer/django,kamyu104/django,SebasSBM/django,waytai/django,shacker/django,felixxm/django,cainmatt/django,errx/django,dudepare/django,mewtaylor/django,follow99/django,ebar0n/django,monetate/django,mshafiq9/django,hynekcer/django,akaariai/django,manhhomienbienthuy/django,gdub/django,jn7163/django,jgoclawski/django,darjeeling/django,doismellburning/django,hunter007/django,duqiao/django,pquentin/django,lunafeng/django,lwiecek/django,rapilabs/django,kaedroho/django,ojake/django,alexallah/django,Yong-Lee/django,monetate/django,mrfuxi/django,alexallah/django,gchp/django,delhivery/django,tcwicklund/django,seanwestfall/django,ifduyue/django,takis/django,denis-pitul/django,edevil/django,chyeh727/django,georgemarshall/django,andela-ooladayo/django,darjeeling/django,kaedroho/django,oberlin/django,baylee/django,asser/django,BlindHunter/django,BlindHunter/django,Beauhurst/django,filias/django,codepantry/django,vitan/django,sdcooke/django,aleida/django,pjdelport/django,koordinates/django,ar45/django,MounirMesselmeni/django,GaussDing/django,jgoclawski/django,programadorjc/django,SebasSBM/django,pjdelport/django,pipermerriam/django,caotianwei/django,jejimenez/django,sjlehtin/django,pipermerriam/django,DONIKAN/django,mlavin/django,jmcarp/django,leekchan/django_test,gohin/django,ABaldwinHunter/django-clone-classic,mattseymour/django,yamila-moreno/django,jasonbot/django,mlavin/django,uranusjr/django,Beeblio/django,MoritzS/django,tragiclifestories/django,dgladkov/django,syphar/django,sbellem/django,kennethlove/django,shacker/django,jallohm/django,BMJHayward/django,adamchainz/django,Nepherhotep/django,jpic/django,neiudemo1/django,gitaarik/django,ziima/django,jmcarp/django,jgeskens/django,HonzaKral/django,gunchleoc/django,chyeh727/django,b-me/django,ArnossArnossi/django,eugena/django,gitaarik/django,RevelSystems/django,tcwicklund/django,jrrembert/django,cainmatt/django,robhudson/django,jhg/django,vitaly4uk/django,yask123/django,MikeAmy/django,frishberg/django,asser/django,aroche/django,alx-eu/django,Beauhurst/django,supriyantomaftuh/django,PolicyStat/django,JavML/django,joequery/django,szopu/django,ebar0n/django,AltSchool/django,adambrenecki/django,sjlehtin/django,shownomercy/django,dpetzold/django,blueyed/django,googleinterns/django,fpy171/django,bobcyw/django,ckirby/django,nju520/django,jylaxp/django,leereilly/django-1,arun6582/django,yakky/django,hybrideagle/django,vmarkovtsev/django,aerophile/django,jvkops/django,beni55/django,anant-dev/django,andela-ifageyinbo/django,savoirfairelinux/django,alrifqi/django,errx/django,kosz85/django,EmadMokhtar/Django,areski/django,kholidfu/django,ojake/django,gdi2290/django,atul-bhouraskar/django,mmardini/django,dfdx2/django,riteshshrv/django,pauloxnet/django,elkingtonmcb/django,apocquet/django,mitchelljkotler/django,liuliwork/django,baylee/django,MarcJoan/django,alimony/django,tanmaythakur/django,andrewsmedina/django,duqiao/django,pelme/django,kevintaw/django,jgoclawski/django,frePPLe/django,TimYi/django,marqueedev/django,peterlauri/django,ArnossArnossi/django,vsajip/django,taaviteska/django,mjtamlyn/django,mattseymour/django,poiati/django,jpic/django,unaizalakain/django,elijah513/django,yewang15215/django,jsoref/django,kaedroho/django,mrfuxi/django,ABaldwinHunter/django-clone,liu602348184/django,Beeblio/django,huang4fstudio/django,denis-pitul/django,avneesh91/django,django-nonrel/django-nonrel,dudepare/django,rockneurotiko/django,henryfjordan/django,frankvdp/django,pquentin/django,labcodes/django,zulip/django,poiati/django,bobcyw/django,uranusjr/django,eyohansa/django,mcrowson/django,labcodes/django,auready/django,gchp/django,jallohm/django,dwightgunning/django,mbox/django,rsalmaso/django,petecummings/django,Mixser/django,vincepandolfo/django,rizumu/django,roselleebarle04/django,ziima/django,sdcooke/django,camilonova/django,mmardini/django,adelton/django,koordinates/django,runekaagaard/django-contrib-locking,akintoey/django,synasius/django,alimony/django,jdelight/django,liavkoren/djangoDev,tanmaythakur/django,rogerhu/django,mrbox/django,epandurski/django,erikr/django,fpy171/django,dex4er/django,feroda/django,jasonwzhy/django,fpy171/django,ericholscher/django,fenginx/django,z0by/django,unaizalakain/django,evansd/django,double-y/django,solarissmoke/django,katrid/django,mttr/django,gannetson/django,mlavin/django,koordinates/django,epandurski/django,django-nonrel/django,dfdx2/django,adrianholovaty/django,WillGuan105/django,hellhovnd/django,sergei-maertens/django,akshatharaj/django,nemesisdesign/django,marqueedev/django,DrMeers/django,darkryder/django,mewtaylor/django,ryangallen/django,vitan/django,bitcity/django,Korkki/django,barbuza/django,weiawe/django,x111ong/django,1013553207/django,mitchelljkotler/django,koniiiik/django,savoirfairelinux/django,rhertzog/django,liu602348184/django,Y3K/django,yceruto/django,lunafeng/django,quamilek/django,tuhangdi/django,apollo13/django,ivandevp/django,bak1an/django,pjdelport/django,blaze33/django,z0by/django,MounirMesselmeni/django,nealtodd/django,ivandevp/django,oberlin/django,postrational/django,alilotfi/django,rwillmer/django,erikr/django,andela-ifageyinbo/django,seocam/django,krishna-pandey-git/django,takeshineshiro/django,hobarrera/django,marctc/django,mattseymour/django,denys-duchier/django,felixxm/django,whs/django,mrfuxi/django,megaumi/django,dydek/django,helenst/django,tbeadle/django,benjaminjkraft/django,hcsturix74/django,druuu/django,knifenomad/django,mshafiq9/django,ifduyue/django,GhostThrone/django,mjtamlyn/django,ccn-2m/django,baylee/django,sarthakmeh03/django,oberlin/django,hkchenhongyi/django,lisael/pg-django,AndrewGrossman/django,mttr/django,pauloxnet/django,hellhovnd/django,Nepherhotep/django,helenst/django,chrisfranzen/django,elijah513/django,poiati/django,marctc/django,codepantry/django,delinhabit/django,Korkki/django,gohin/django,adamchainz/django,ivandevp/django,rajsadho/django,yakky/django,lisael/pg-django,tbeadle/django,kisna72/django,rmboggs/django,b-me/django,django-nonrel/django-nonrel,ccn-2m/django,andreif/django,tanmaythakur/django,rlugojr/django,myang321/django,knifenomad/django,ar45/django,saydulk/django,mammique/django,barbuza/django,dhruvagarwal/django,whs/django,Yong-Lee/django,SebasSBM/django,auvipy/django,Argon-Zhou/django,extremewaysback/django,akshatharaj/django,aspidites/django,hobarrera/django,nealtodd/django,xadahiya/django,adelton/django,jhoos/django,blindroot/django,wsmith323/django,TimBuckley/effective_django,willharris/django,rajsadho/django,runekaagaard/django-contrib-locking,himleyb85/django,lsqtongxin/django,theo-l/django,atul-bhouraskar/django,vsajip/django,twz915/django,WillGuan105/django,kisna72/django,haxoza/django,myang321/django,andela-ifageyinbo/django,jgoclawski/django,waytai/django,webgeodatavore/django,andresgz/django,tragiclifestories/django,saydulk/django,Leila20/django,aspidites/django,JavML/django,kutenai/django,phalt/django,z0by/django,dex4er/django,krishna-pandey-git/django,andela-ooladayo/django,hynekcer/django,fpy171/django,avanov/django,TridevGuha/django,zhoulingjun/django,ytjiang/django,treyhunner/django,shaistaansari/django,harisibrahimkv/django,twz915/django,georgemarshall/django,lwiecek/django,loic/django,RaoUmer/django,beck/django,ajoaoff/django,duqiao/django,arun6582/django,charettes/django,sam-tsai/django,mattrobenolt/django,dpetzold/django,haxoza/django,areski/django,supriyantomaftuh/django,rlugojr/django,caotianwei/django,alimony/django,mitar/django,mitya57/django,shaib/django,nemesisdesign/django,KokareIITP/django,joequery/django,labcodes/django,HonzaKral/django,ajaali/django,ziima/django,sgzsh269/django,x111ong/django,camilonova/django,BrotherPhil/django,double-y/django,blighj/django,rwillmer/django,piquadrat/django,t0in4/django,mttr/django,extremewaysback/django,rizumu/django,eugena/django,scorphus/django,rsalmaso/django,dwightgunning/django,yask123/django,davidharrigan/django,Vixionar/django,gengue/django,twz915/django,NullSoldier/django,mdj2/django,sbellem/django,gohin/django,whs/django,blueyed/django,takis/django,alexallah/django,oscaro/django,supriyantomaftuh/django,rockneurotiko/django,nealtodd/django,hellhovnd/django,rmboggs/django,theo-l/django,mrbox/django,makinacorpus/django,willharris/django,craynot/django,bikong2/django,gchp/django,piquadrat/django,hybrideagle/django,dursk/django,jn7163/django,supriyantomaftuh/django,zerc/django,tayfun/django,sephii/django,alexallah/django,unaizalakain/django,mcardillo55/django,etos/django,DONIKAN/django,makinacorpus/django,gannetson/django,nielsvanoch/django,ptoraskar/django,druuu/django,kisna72/django,github-account-because-they-want-it/django,vmarkovtsev/django,maxsocl/django,ironbox360/django,ojake/django,jaywreddy/django,szopu/django,tragiclifestories/django,django-nonrel/django-nonrel,digimarc/django,jhg/django,varunnaganathan/django,elijah513/django,zedr/django,beckastar/django,dracos/django,scorphus/django,yewang15215/django,caotianwei/django,shaistaansari/django,RaoUmer/django,dsanders11/django,katrid/django,kholidfu/django,django-nonrel/django,ironbox360/django,eyohansa/django,curtisstpierre/django,lsqtongxin/django,kholidfu/django,bak1an/django,freakboy3742/django,tysonclugg/django,syphar/django,gcd0318/django,zerc/django,avneesh91/django,benspaulding/django,simone/django-gb,frankvdp/django,beck/django,apollo13/django,felixxm/django,sgzsh269/django,sergei-maertens/django,Vixionar/django,drjeep/django,avanov/django,rapilabs/django,jhoos/django,hottwaj/django,beni55/django,digimarc/django,litchfield/django,crazy-canux/django,hellhovnd/django,taaviteska/django,rapilabs/django,MatthewWilkes/django,etos/django,JavML/django,ASCrookes/django,akshatharaj/django,felixjimenez/django,programadorjc/django,andresgz/django,auvipy/django,taaviteska/django,schinckel/django,rtindru/django,edevil/django,neiudemo1/django,Y3K/django,dhruvagarwal/django,poiati/django,DasIch/django,joequery/django,seanwestfall/django,marcelocure/django,BMJHayward/django,ytjiang/django,ojengwa/django-1,jasonwzhy/django,jenalgit/django,syaiful6/django,edevil/django,feroda/django,litchfield/django,ABaldwinHunter/django-clone,archen/django,whs/django,leeon/annotated-django,gunchleoc/django,programadorjc/django,t0in4/django,pasqualguerrero/django,kevintaw/django,hassanabidpk/django,jnovinger/django,tomchristie/django,felixxm/django,auready/django,nhippenmeyer/django,Korkki/django,chrisfranzen/django,dwightgunning/django,kosz85/django,zhoulingjun/django,andela-ooladayo/django,elky/django,weiawe/django,jscn/django,hynekcer/django,errx/django,beck/django,yigitguler/django,petecummings/django,risicle/django,abomyi/django,delinhabit/django,b-me/django,aidanlister/django,zsiciarz/django,tcwicklund/django,HousekeepLtd/django,DrMeers/django,andreif/django,NullSoldier/django,wweiradio/django,karyon/django,mitya57/django,auvipy/django,jnovinger/django,ajoaoff/django,carljm/django,IRI-Research/django,rlugojr/django,jaywreddy/django,wetneb/django,alx-eu/django,zhoulingjun/django,sephii/django,curtisstpierre/django,beckastar/django,anant-dev/django,hcsturix74/django,zulip/django,reinout/django,sarthakmeh03/django,ABaldwinHunter/django-clone,bliti/django-nonrel-1.5,dhruvagarwal/django,ifduyue/django,hcsturix74/django,extremewaysback/django,denys-duchier/django,MarkusH/django,marissazhou/django,ebar0n/django,felixjimenez/django,beckastar/django,claudep/django,jasonwzhy/django,sadaf2605/django,benspaulding/django,payeldillip/django,lmorchard/django,henryfjordan/django,bspink/django,zsiciarz/django,himleyb85/django,marissazhou/django,YYWen0o0/python-frame-django,wsmith323/django,kcpawan/django,gdi2290/django,nhippenmeyer/django,ojengwa/django-1,matiasb/django,BlindHunter/django,hybrideagle/django,kennethlove/django,lwiecek/django,jasonbot/django,RossBrunton/django,divio/django,ajaali/django,krisys/django,cobalys/django,seocam/django,irwinlove/django,frdb194/django,Argon-Zhou/django,oinopion/django,risicle/django,rsalmaso/django,lunafeng/django,yamila-moreno/django,hasadna/django,alimony/django,kosz85/django,tcwicklund/django,django-nonrel/django,yask123/django,MatthewWilkes/django,denis-pitul/django,EliotBerriot/django,solarissmoke/django,zhaodelong/django,moreati/django,jmcarp/django,Endika/django,etos/django,Nepherhotep/django,matiasb/django,hcsturix74/django,github-account-because-they-want-it/django,jhg/django,tuhangdi/django,imtapps/django-imt-fork,hassanabidpk/django,hobarrera/django,andrewsmedina/django,taaviteska/django,jn7163/django,andresgz/django,ckirby/django,spisneha25/django,seanwestfall/django,SoftwareMaven/django,SujaySKumar/django,jmcarp/django,schinckel/django,reinout/django,ecederstrand/django,aleida/django,doismellburning/django,joakim-hove/django,MarkusH/django,coldmind/django,aisipos/django,Beauhurst/django,Anonymous-X6/django,mjtamlyn/django,oscaro/django,ivandevp/django,NullSoldier/django,tayfun/django,tomchristie/django,techdragon/django,sadaf2605/django,adambrenecki/django,mcardillo55/django,rapilabs/django,HonzaKral/django,Proggie02/TestRepo,Mixser/django,uranusjr/django,kutenai/django,denisenkom/django,kswiat/django,GaussDing/django,rhertzog/django,vitan/django,seanwestfall/django,tomchristie/django,dbaxa/django,EliotBerriot/django,HousekeepLtd/django,huang4fstudio/django,stevenewey/django,redhat-openstack/django,syphar/django,haxoza/django,jpic/django,Proggie02/TestRepo,RossBrunton/django,avneesh91/django,hottwaj/django,deployed/django,mcella/django,mcella/django,coldmind/django,frePPLe/django,oinopion/django,aroche/django,vsajip/django,double-y/django,yakky/django,shownomercy/django,lsqtongxin/django,GaussDing/django,takeshineshiro/django,darjeeling/django,BlindHunter/django,felixjimenez/django,piquadrat/django,mitya57/django,daniponi/django,chrishas35/django-travis-ci,roselleebarle04/django,jejimenez/django,wsmith323/django,indevgr/django,ryangallen/django,programadorjc/django,payeldillip/django,quxiaolong1504/django,blighj/django,takeshineshiro/django,bobcyw/django,hasadna/django,caotianwei/django,archen/django,mammique/django,mitchelljkotler/django,cobalys/django,JorgeCoock/django,marissazhou/django,marqueedev/django,avanov/django,huang4fstudio/django,blindroot/django,alilotfi/django,rrrene/django,sam-tsai/django,spisneha25/django,pelme/django,frePPLe/django,EmadMokhtar/Django,pauloxnet/django,unaizalakain/django,hnakamur/django,webgeodatavore/django,freakboy3742/django,spisneha25/django,akaariai/django,delhivery/django,labcodes/django,RaoUmer/django,PolicyStat/django,WSDC-NITWarangal/django,Anonymous-X6/django,olasitarska/django,akintoey/django,epandurski/django,dursk/django,ASCrookes/django,github-account-because-they-want-it/django,barbuza/django,solarissmoke/django,koniiiik/django,dydek/django,rwillmer/django,moreati/django,sam-tsai/django,elena/django,epandurski/django,DrMeers/django,dfunckt/django,sjlehtin/django,NullSoldier/django,treyhunner/django,ziima/django,indevgr/django,yamila-moreno/django,druuu/django,adambrenecki/django,alexmorozov/django,frishberg/django,ArnossArnossi/django,mcardillo55/django,elijah513/django,maxsocl/django,ASCrookes/django,yograterol/django,irwinlove/django,mttr/django,AndrewGrossman/django,synasius/django,bliti/django-nonrel-1.5,zhaodelong/django,kamyu104/django,auvipy/django,jarshwah/django,scorphus/django,googleinterns/django,areski/django,Balachan27/django,alexmorozov/django,jsoref/django,Adnn/django,riteshshrv/django,xwolf12/django,henryfjordan/django,evansd/django,katrid/django,mattrobenolt/django,YangSongzhou/django,memtoko/django,devops2014/djangosite,ryanahall/django,piquadrat/django,xrmx/django,Argon-Zhou/django,peterlauri/django,Yong-Lee/django,ulope/django,bitcity/django,karyon/django,dfunckt/django,peterlauri/django,krisys/django,jrrembert/django,django/django,kangfend/django,mshafiq9/django,donkirkby/django,Matt-Deacalion/django,oinopion/django,elkingtonmcb/django,ericholscher/django,mbox/django,dracos/django,hunter007/django,apollo13/django,reinout/django,akaariai/django,harisibrahimkv/django,loic/django,stevenewey/django,zulip/django,krishna-pandey-git/django,YangSongzhou/django,fafaman/django,synasius/django,TridevGuha/django,shaib/django,yewang15215/django,frishberg/django,davgibbs/django,matiasb/django,pipermerriam/django,jyotsna1820/django,pasqualguerrero/django,lzw120/django,edmorley/django,camilonova/django,dgladkov/django,asser/django,sdcooke/django,megaumi/django,quxiaolong1504/django,ticosax/django,YYWen0o0/python-frame-django,quxiaolong1504/django,kswiat/django,ifduyue/django,adelton/django,auready/django,aisipos/django,crazy-canux/django,mathspace/django,dudepare/django,asser/django,aspidites/django,gitaarik/django,shtouff/django,dsanders11/django,GhostThrone/django,kennethlove/django,ataylor32/django,beckastar/django,benjaminjkraft/django,harisibrahimkv/django,akaihola/django,aisipos/django,feroda/django,JavML/django,tuhangdi/django,gcd0318/django,takeshineshiro/django,MounirMesselmeni/django,apollo13/django,mrfuxi/django,dbaxa/django,simonw/django,elky/django,sbellem/django,seocam/django,knifenomad/django,gdub/django,robhudson/django,AndrewGrossman/django,uranusjr/django,rmboggs/django,ckirby/django,frdb194/django,treyhunner/django,shaistaansari/django,yceruto/django,wkschwartz/django,ptoraskar/django,marctc/django,marctc/django,andreif/django,twz915/django,delhivery/django,rynomster/django,crazy-canux/django,kisna72/django,jyotsna1820/django,waytai/django,alx-eu/django,ABaldwinHunter/django-clone,dsanders11/django,Korkki/django,anant-dev/django,SujaySKumar/django,xwolf12/django,rtindru/django,GitAngel/django,ojengwa/django-1,jasonbot/django,darkryder/django,hkchenhongyi/django,frishberg/django,mewtaylor/django,denys-duchier/django,coldmind/django,raphaelmerx/django,sgzsh269/django,wkschwartz/django,EliotBerriot/django,jarshwah/django,Nepherhotep/django,codepantry/django,gitaarik/django,rsvip/Django,KokareIITP/django,frdb194/django,TridevGuha/django,zedr/django,shtouff/django,jeezybrick/django,Matt-Deacalion/django,double-y/django,t0in4/django,donkirkby/django,raphaelmerx/django,kosz85/django,GhostThrone/django,roselleebarle04/django,frePPLe/django,davidharrigan/django,SebasSBM/django,mitchelljkotler/django,Y3K/django,drjeep/django,RossBrunton/django,elena/django,rsvip/Django,gengue/django,yceruto/django,wweiradio/django,lmorchard/django,mjtamlyn/django,oscaro/django,blaze33/django,digimarc/django,RevelSystems/django,lmorchard/django,dpetzold/django,mshafiq9/django,davgibbs/django,jallohm/django,ajaali/django,jarshwah/django,jhg/django,zhoulingjun/django,blueyed/django,seocam/django,ASCrookes/django,mattseymour/django,Endika/django,github-account-because-they-want-it/django,phalt/django,rwillmer/django,risicle/django,jenalgit/django,sarvex/django,ytjiang/django,liu602348184/django,vincepandolfo/django,varunnaganathan/django,guettli/django,x111ong/django,jgeskens/django,mcrowson/django,rhertzog/django,krisys/django,WSDC-NITWarangal/django,marcelocure/django,delinhabit/django,jeezybrick/django,salamer/django,GaussDing/django,marcelocure/django,dfdx2/django,gunchleoc/django,gannetson/django,ataylor32/django,yograterol/django,Adnn/django,mrbox/django,jscn/django,areski/django,iambibhas/django,benjaminjkraft/django,scorphus/django,DONIKAN/django,ulope/django,peterlauri/django,coldmind/django,olasitarska/django,mmardini/django,auready/django,kcpawan/django,evansd/django,beck/django,joequery/django,eyohansa/django,karyon/django,akaihola/django,sadaf2605/django,kcpawan/django,archen/django,doismellburning/django,litchfield/django,AlexHill/django,adrianholovaty/django,waytai/django,ar45/django,bak1an/django,sgzsh269/django,alexmorozov/django,cainmatt/django,sarvex/django,syaiful6/django,blighj/django,bspink/django,denisenkom/django,zedr/django,monetate/django,megaumi/django,Anonymous-X6/django,gcd0318/django,nemesisdesign/django,krisys/django,mathspace/django,mojeto/django,carljm/django,rajsadho/django,vitaly4uk/django,dracos/django,Sonicbids/django,hnakamur/django,payeldillip/django,hackerbot/DjangoDev,riteshshrv/django,TimYi/django,druuu/django,JorgeCoock/django,x111ong/django,jylaxp/django,jylaxp/django,WSDC-NITWarangal/django,oscaro/django,eugena/django,curtisstpierre/django,litchfield/django,Leila20/django,jejimenez/django,sarvex/django,wweiradio/django,simonw/django,divio/django,h4r5h1t/django-hauthy,Balachan27/django,marqueedev/django,wkschwartz/django,salamer/django,djbaldey/django,willhardy/django,Mixser/django,leeon/annotated-django,TimBuckley/effective_django,simone/django-gb,cobalys/django,EmadMokhtar/Django,aroche/django,tysonclugg/django,maxsocl/django,hnakamur/django,sopier/django,mcardillo55/django,bobcyw/django,sam-tsai/django,akintoey/django,pipermerriam/django,gunchleoc/django,IRI-Research/django,andela-ifageyinbo/django,PetrDlouhy/django,YangSongzhou/django,rsvip/Django,zhaodelong/django,PetrDlouhy/django,WillGuan105/django,jscn/django,codepantry/django,kangfend/django,charettes/django,django-nonrel/django,neiudemo1/django,rynomster/django,dursk/django,lmorchard/django,camilonova/django,barbuza/django,ticosax/django,daniponi/django,SoftwareMaven/django,chrishas35/django-travis-ci,quxiaolong1504/django,Beeblio/django,vitaly4uk/django,ABaldwinHunter/django-clone-classic,Sonicbids/django,zanderle/django,indevgr/django,himleyb85/django,mrbox/django,jhoos/django,karyon/django,Matt-Deacalion/django,hybrideagle/django,follow99/django,ghedsouza/django,ryangallen/django,postrational/django,curtisstpierre/django,devops2014/djangosite,PolicyStat/django,akshatharaj/django,synasius/django,quamilek/django,zanderle/django,KokareIITP/django,wetneb/django,cainmatt/django,jaywreddy/django,googleinterns/django,donkirkby/django,jhoos/django,kutenai/django,solarissmoke/django,DasIch/django,mbox/django,shaib/django,hackerbot/DjangoDev,dwightgunning/django,jvkops/django,AltSchool/django,ghickman/django,craynot/django,moreati/django,rizumu/django,adrianholovaty/django,davgibbs/django,jenalgit/django,kangfend/django,aerophile/django,raphaelmerx/django,MarkusH/django,MarcJoan/django,postrational/django,mathspace/django,dfdx2/django,Beauhurst/django,zulip/django,edmorley/django,nju520/django,dursk/django,alrifqi/django,zerc/django,timgraham/django,salamer/django,shownomercy/django,BrotherPhil/django,yewang15215/django,indevgr/django,georgemarshall/django,craynot/django,rrrene/django,blindroot/django,iambibhas/django,memtoko/django,mcella/django,szopu/django,rsalmaso/django,stewartpark/django,ulope/django,joakim-hove/django,willharris/django,freakboy3742/django,jarshwah/django,Sonicbids/django,mcella/django,davgibbs/django,ryangallen/django,zerc/django,mcrowson/django,ecederstrand/django,abomyi/django,willharris/django,sadaf2605/django,rrrene/django,xadahiya/django,mojeto/django,MoritzS/django,jnovinger/django,jasonbot/django,pasqualguerrero/django,Argon-Zhou/django,djbaldey/django,jsoref/django,blueyed/django,willhardy/django,jyotsna1820/django,Leila20/django,TimYi/django,phalt/django,denisenkom/django,himleyb85/django,bliti/django-nonrel-1.5,syaiful6/django,liuliwork/django,mewtaylor/django,henryfjordan/django,rhertzog/django,Adnn/django,krishna-pandey-git/django,darjeeling/django,pelme/django,WSDC-NITWarangal/django,makinacorpus/django,xrmx/django,carljm/django,h4r5h1t/django-hauthy,ghedsouza/django,hunter007/django,abomyi/django,mcrowson/django,techdragon/django,georgemarshall/django,wweiradio/django,gdi2290/django,frankvdp/django,claudep/django,huang4fstudio/django,ataylor32/django,gengue/django,rynomster/django,chrisfranzen/django,eyohansa/django,yograterol/django,alilotfi/django,manhhomienbienthuy/django,gdub/django,bak1an/django,AlexHill/django,jgeskens/django,fenginx/django,akaariai/django,Matt-Deacalion/django,shacker/django,dsanders11/django,jallohm/django,intgr/django,kangfend/django,GitAngel/django,xadahiya/django,TimYi/django,MatthewWilkes/django,shownomercy/django,sjlehtin/django,denys-duchier/django,bitcity/django,myang321/django,hasadna/django,delinhabit/django,ataylor32/django,megaumi/django,MikeAmy/django,z0by/django,saydulk/django,akintoey/django,dex4er/django,MarcJoan/django,Proggie02/TestRepo,ecederstrand/django,moreati/django,adamchainz/django,saydulk/django,alx-eu/django,zsiciarz/django,ticosax/django,darkryder/django,dgladkov/django,bspink/django,joakim-hove/django,leekchan/django_test,mdj2/django,apocquet/django,robhudson/django,shaistaansari/django,risicle/django,ghedsouza/django,xrmx/django,maxsocl/django,RossBrunton/django,adamchainz/django,h4r5h1t/django-hauthy,zhaodelong/django,mattrobenolt/django,simonw/django,varunnaganathan/django,koordinates/django,leekchan/django_test,intgr/django,jeezybrick/django,raphaelmerx/django,tbeadle/django,JorgeCoock/django,timgraham/django,techdragon/django,GitAngel/django,ABaldwinHunter/django-clone-classic,pasqualguerrero/django,ytjiang/django,aidanlister/django,dbaxa/django,sarthakmeh03/django,treyhunner/django,MoritzS/django,harisibrahimkv/django,alexmorozov/django,lzw120/django,mitya57/django,andrewsmedina/django,jscn/django,delhivery/django,chrishas35/django-travis-ci,avneesh91/django,davidharrigan/django,xadahiya/django,claudep/django,extremewaysback/django,shacker/django,fafaman/django,YangSongzhou/django,kevintaw/django,lzw120/django,filias/django,mmardini/django,vmarkovtsev/django,devops2014/djangosite,Beeblio/django,tayfun/django,theo-l/django,gcd0318/django,drjeep/django,weiawe/django,imtapps/django-imt-fork,Leila20/django,liavkoren/djangoDev,zanderle/django,jrrembert/django,beni55/django,andresgz/django,nju520/django,anant-dev/django,jvkops/django,katrid/django,jsoref/django,elkingtonmcb/django,tysonclugg/django,hkchenhongyi/django,schinckel/django,follow99/django,denis-pitul/django,dgladkov/django,divio/django,marckuz/django,mitar/django,sopier/django,hnakamur/django,Mixser/django,MatthewWilkes/django,baylee/django,DONIKAN/django,chrisfranzen/django,redhat-openstack/django,mattrobenolt/django,benjaminjkraft/django,tysonclugg/django,MarcJoan/django,ericholscher/django,django/django,kamyu104/django,RevelSystems/django,ghickman/django,Balachan27/django,jpic/django,quamilek/django,shtouff/django,hunter007/django,elena/django,leereilly/django-1,rtindru/django,lsqtongxin/django,ironbox360/django,tomchristie/django,digimarc/django,andyzsf/django,hassanabidpk/django,arun6582/django,bikong2/django,DasIch/django,oberlin/django,sergei-maertens/django,rlugojr/django,dydek/django,abomyi/django,ericfc/django,SoftwareMaven/django,haxoza/django,gengue/django,liu602348184/django,kcpawan/django,djbaldey/django,apocquet/django,EliotBerriot/django,sergei-maertens/django,frdb194/django,rmboggs/django,kamyu104/django,gohin/django,webgeodatavore/django,jylaxp/django,mammique/django,1013553207/django,ryanahall/django,nhippenmeyer/django,sarvex/django,HousekeepLtd/django,bikong2/django,chyeh727/django,phalt/django,helenst/django,manhhomienbienthuy/django,GitAngel/django,ryanahall/django,oinopion/django,marckuz/django,xrmx/django,mojeto/django,matiasb/django,BMJHayward/django,WillGuan105/django,jnovinger/django,filias/django,ghedsouza/django,edmorley/django,dhruvagarwal/django,guettli/django,TridevGuha/django,ghickman/django,django/django,knifenomad/django,redhat-openstack/django,loic/django,webgeodatavore/django,SujaySKumar/django,etos/django,ericfc/django,gdub/django,evansd/django,shtouff/django,sarthakmeh03/django,shaib/django,aerophile/django,django/django,hassanabidpk/django,andyzsf/django,blighj/django,JorgeCoock/django,liuliwork/django,rogerhu/django,gchp/django,donkirkby/django,lwiecek/django,ticosax/django,riklaunim/django-custom-multisite,savoirfairelinux/django,ArnossArnossi/django,lunafeng/django,aidanlister/django,marissazhou/django,olasitarska/django,marckuz/django,bspink/django,DasIch/django,deployed/django,stewartpark/django,felixjimenez/django,hkchenhongyi/django,tragiclifestories/django,elena/django,ironbox360/django,willhardy/django,yigitguler/django,hottwaj/django,riteshshrv/django,GhostThrone/django,ajaali/django,AltSchool/django,kevintaw/django,rockneurotiko/django,arun6582/django,myang321/django,wetneb/django,jasonwzhy/django,dbaxa/django,rynomster/django,simonw/django,jdelight/django,mitar/django,pquentin/django,t0in4/django,wetneb/django,robhudson/django,ABaldwinHunter/django-clone-classic,alrifqi/django,atul-bhouraskar/django,Vixionar/django,ptoraskar/django,dydek/django,irwinlove/django,claudep/django,follow99/django,HousekeepLtd/django,riklaunim/django-custom-multisite,zanderle/django,jenalgit/django,ajoaoff/django,andyzsf/django,sephii/django,IRI-Research/django,chyeh727/django,payeldillip/django,memtoko/django,manhhomienbienthuy/django,timgraham/django,zsiciarz/django,jeezybrick/django,fafaman/django,eugena/django | django/__init__.py | django/__init__.py | VERSION = (1, 1, 0, 'rc', 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
| VERSION = (1, 1, 0, 'beta', 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
| bsd-3-clause | Python |
9c1dfaefa9f670abbb04daae5f9e6680c3ba14c6 | read ser | cdutsov/meteo,cdutsov/meteo | add_to_files.py | add_to_files.py | from Adafruit_BME280 import *
import paho.mqtt.client as paho
import veml6070
import time
broker = "127.0.0.1"
port = 1883
def on_publish(client, userdata, result): # create function for callback
print("data published \n")
pass
def main():
sensor = BME280(p_mode=BME280_OSAMPLE_8, t_mode=BME280_OSAMPLE_2, h_mode=BME280_OSAMPLE_1, filter=BME280_FILTER_16)
tstart = time.time()
client1 = paho.Client("control1") # create client object
client1.on_publish = on_publish # assign function to callback
client1.connect(broker, port) # establish connection
veml = veml6070.Veml6070()
veml.set_integration_time(veml6070.INTEGRATIONTIME_1T)
# file_t = open(“temp.txt”, ”w”)
file_p = open(“pressure.txt”, ”w”)
file_h = open(“humidity.txt”, ”w”)
file_uv = open(“uv.txt”, ”w”)
while True:
time = time.strftime("%H:%M:%S")
degrees = sensor.read_temperature()
pascals = sensor.read_pressure()
hectopascals = pascals / 100
humidity = sensor.read_humidity()
dew_point = sensor.read_dewpoint()
uv_raw = veml.get_uva_light_intensity_raw()
uv = veml.get_uva_light_intensity()
ret = client1.publish("sensors/temperature", "%0.3f" % (degrees))
ret = client1.publish("sensors/pressure", "%0.3f" % (hectopascals))
ret = client1.publish("sensors/humidity", "%0.3f" % (humidity))
ret = client1.publish("sensors/dewpoint", "%0.3f" % (dew_point))
ret = client1.publish("sensors/uv", "%0.3f" % (uv))
ret = client1.publish("sensors/uv_raw", "%0.3f" % (uv_raw))
file_t.write(time.strftime("%H:%M:%S"), degrees)
file_p.write(time.strftime("%H:%M:%S"), hectopascals)
file_h.write(time.strftime("%H:%M:%S"), humidity)
file_uv.write(time.strftime("%H:%M:%S"), uv)
file_t.close()
file_p.close()
file_h.close()
file_uv.close()
time.sleep(3)
main()
| from Adafruit_BME280 import *
import paho.mqtt.client as paho
import veml6070
import time
import os, sys
broker = "127.0.0.1"
port = 1883
def on_publish(client, userdata, result): # create function for callback
print("data published \n")
pass
def main():
sensor = BME280(p_mode=BME280_OSAMPLE_8, t_mode=BME280_OSAMPLE_2, h_mode=BME280_OSAMPLE_1, filter=BME280_FILTER_16)
tstart = time.time()
client1 = paho.Client("control1") # create client object
client1.on_publish = on_publish # assign function to callback
client1.connect(broker, port) # establish connection
veml = veml6070.Veml6070()
veml.set_integration_time(veml6070.INTEGRATIONTIME_1T)
file_t = open(“temperature.txt”, ”w”)
file_p = open(“pressure.txt”, ”w”)
file_h = open(“humidity.txt”, ”w”)
file_uv = open(“uv.txt”, ”w”)
while True:
time = time.strftime("%H:%M:%S")
degrees = sensor.read_temperature()
pascals = sensor.read_pressure()
hectopascals = pascals / 100
humidity = sensor.read_humidity()
dew_point = sensor.read_dewpoint()
uv_raw = veml.get_uva_light_intensity_raw()
uv = veml.get_uva_light_intensity()
ret = client1.publish("sensors/temperature", "%0.3f" % (degrees))
ret = client1.publish("sensors/pressure", "%0.3f" % (hectopascals))
ret = client1.publish("sensors/humidity", "%0.3f" % (humidity))
ret = client1.publish("sensors/dewpoint", "%0.3f" % (dew_point))
ret = client1.publish("sensors/uv", "%0.3f" % (uv))
ret = client1.publish("sensors/uv_raw", "%0.3f" % (uv_raw))
file_t.write(time.strftime("%H:%M:%S"), degrees)
file_p.write(time.strftime("%H:%M:%S"), hectopascals)
file_h.write(time.strftime("%H:%M:%S"), humidity)
file_uv.write(time.strftime("%H:%M:%S"), uv)
file_t.close()
file_p.close()
file_h.close()
file_uv.close()
time.sleep(3)
main()
| mit | Python |
3800ce7132945424016c930a4e90e81dbb0afb37 | bump version | aetros/aetros-cli | aetros/const.py | aetros/const.py | __version__ = '0.13.0'
__prog__ = "aetros"
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
class JOB_STATUS:
PROGRESS_STATUS_CREATED = 0
PROGRESS_STATUS_QUEUED = 1
PROGRESS_STATUS_STARTED = 2
PROGRESS_STATUS_DONE = 3
PROGRESS_STATUS_ABORTED = 4
PROGRESS_STATUS_FAILED = 5 | __version__ = '0.12.0'
__prog__ = "aetros"
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
class JOB_STATUS:
PROGRESS_STATUS_CREATED = 0
PROGRESS_STATUS_QUEUED = 1
PROGRESS_STATUS_STARTED = 2
PROGRESS_STATUS_DONE = 3
PROGRESS_STATUS_ABORTED = 4
PROGRESS_STATUS_FAILED = 5 | mit | Python |
c28ca58537102a5caf549ac94bd3a1e49d0b0351 | Add missing attributes to Poll and PollOption `__init__` | carpedm20/fbchat | fbchat/_poll.py | fbchat/_poll.py | # -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import attr
@attr.s(cmp=False)
class Poll(object):
"""Represents a poll"""
#: Title of the poll
title = attr.ib()
#: List of :class:`PollOption`, can be fetched with :func:`fbchat.Client.fetchPollOptions`
options = attr.ib()
#: Options count
options_count = attr.ib(None)
#: ID of the poll
uid = attr.ib(None)
@classmethod
def _from_graphql(cls, data):
return cls(
uid=int(data["id"]),
title=data.get("title") if data.get("title") else data.get("text"),
options=[PollOption._from_graphql(m) for m in data.get("options")],
options_count=data.get("total_count"),
)
@attr.s(cmp=False)
class PollOption(object):
"""Represents a poll option"""
#: Text of the poll option
text = attr.ib()
#: Whether vote when creating or client voted
vote = attr.ib(False)
#: ID of the users who voted for this poll option
voters = attr.ib(None)
#: Votes count
votes_count = attr.ib(None)
#: ID of the poll option
uid = attr.ib(None)
@classmethod
def _from_graphql(cls, data):
if data.get("viewer_has_voted") is None:
vote = None
elif isinstance(data["viewer_has_voted"], bool):
vote = data["viewer_has_voted"]
else:
vote = data["viewer_has_voted"] == "true"
return cls(
uid=int(data["id"]),
text=data.get("text"),
vote=vote,
voters=(
[m.get("node").get("id") for m in data.get("voters").get("edges")]
if isinstance(data.get("voters"), dict)
else data.get("voters")
),
votes_count=(
data.get("voters").get("count")
if isinstance(data.get("voters"), dict)
else data.get("total_count")
),
)
| # -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import attr
@attr.s(cmp=False)
class Poll(object):
"""Represents a poll"""
#: ID of the poll
uid = attr.ib(None, init=False)
#: Title of the poll
title = attr.ib()
#: List of :class:`PollOption`, can be fetched with :func:`fbchat.Client.fetchPollOptions`
options = attr.ib()
#: Options count
options_count = attr.ib(None, init=False)
@classmethod
def _from_graphql(cls, data):
return cls(
uid=int(data["id"]),
title=data.get("title") if data.get("title") else data.get("text"),
options=[PollOption._from_graphql(m) for m in data.get("options")],
options_count=data.get("total_count"),
)
@attr.s(cmp=False)
class PollOption(object):
"""Represents a poll option"""
#: ID of the poll option
uid = attr.ib(None, init=False)
#: Text of the poll option
text = attr.ib()
#: Whether vote when creating or client voted
vote = attr.ib(False)
#: ID of the users who voted for this poll option
voters = attr.ib(None, init=False)
#: Votes count
votes_count = attr.ib(None, init=False)
@classmethod
def _from_graphql(cls, data):
if data.get("viewer_has_voted") is None:
vote = None
elif isinstance(data["viewer_has_voted"], bool):
vote = data["viewer_has_voted"]
else:
vote = data["viewer_has_voted"] == "true"
return cls(
uid=int(data["id"]),
text=data.get("text"),
vote=vote,
voters=(
[m.get("node").get("id") for m in data.get("voters").get("edges")]
if isinstance(data.get("voters"), dict)
else data.get("voters")
),
votes_count=(
data.get("voters").get("count")
if isinstance(data.get("voters"), dict)
else data.get("total_count")
),
)
| bsd-3-clause | Python |
ace4c19a9e97ff74a1ee1f221e417fa95601c2b1 | fix bug | wannaphongcom/fbchat,Bankde/fbchat,carpedm20/fbchat,JohnathonNow/fbchat,madsmtm/fbchat | fbchat/utils.py | fbchat/utils.py | import re
import json
from time import time
from random import random, choice
USER_AGENTS = [
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/601.1.10 (KHTML, like Gecko) Version/8.0.5 Safari/601.1.10",
"Mozilla/5.0 (Windows NT 6.3; WOW64; ; NCT50_AAP285C84A1328) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6"
]
def now():
return int(time()*1000)
def get_json(text):
return json.loads(re.sub(r"for.*(.*;.*;.*).*;", '', text.encode('utf-8').decode("unicode-escape"), 1))
def digit_to_char(digit):
if digit < 10:
return str(digit)
return chr(ord('a') + digit - 10)
def str_base(number,base):
if number < 0:
return '-' + str_base(-number, base)
(d, m) = divmod(number, base)
if d > 0:
return str_base(d, base) + digit_to_char(m)
return digit_to_char(m)
def generateMessageID(client_id=None):
k = now()
l = int(random() * 4294967295)
return ("<%s:%s-%s@mail.projektitan.com>" % (k, l, client_id));
| import re
import json
from time import time
from random import random, choice
USER_AGENTS = [
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/601.1.10 (KHTML, like Gecko) Version/8.0.5 Safari/601.1.10",
"Mozilla/5.0 (Windows NT 6.3; WOW64; ; NCT50_AAP285C84A1328) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6"
]
def now():
return int(time()*1000)
def get_json(text):
return json.loads(re.sub(r"for.*(;;).*;", '', text.decode("unicode-escape"), 1)) # have bug !!! AttributeError: 'str' object has no attribute 'decode'
def digit_to_char(digit):
if digit < 10:
return str(digit)
return chr(ord('a') + digit - 10)
def str_base(number,base):
if number < 0:
return '-' + str_base(-number, base)
(d, m) = divmod(number, base)
if d > 0:
return str_base(d, base) + digit_to_char(m)
return digit_to_char(m)
def generateMessageID(client_id=None):
k = now()
l = int(random() * 4294967295)
return ("<%s:%s-%s@mail.projektitan.com>" % (k, l, client_id));
| bsd-3-clause | Python |
bb104ac04e27e3354c4aebee7a0ca7e539232490 | Use click.echo() for python 2.7 compatibility | eregs/regulations-parser,tadhg-ohiggins/regulations-parser,eregs/regulations-parser,tadhg-ohiggins/regulations-parser | regparser/commands/outline_depths.py | regparser/commands/outline_depths.py | import logging
from regparser.tree.depth import optional_rules
from regparser.tree.depth.derive import derive_depths
import click
logger = logging.getLogger(__name__)
@click.command()
@click.argument('markers', type=click.STRING, required=True)
def outline_depths(markers) -> None:
"""
Infer an outline's structure.
Return a list of outline depths for a given list of space-separated markers.
"""
# Input is space-separated.
marker_list = markers.split(' ')
all_solutions = derive_depths(
marker_list,
[optional_rules.limit_sequence_gap(1)]
)
depths = {tuple(str(a.depth) for a in s) for s in all_solutions}.pop()
# Expected output is space-separated.
formatted_output = ' '.join(depths)
click.echo(formatted_output)
if __name__ == '__main__':
"""Enable running this command directly. E.g.,
`$ python regparser/commands/outline_depths.py`. This can save 1.5 seconds
or more of startup time.
"""
outline_depths()
| import logging
from regparser.tree.depth import optional_rules
from regparser.tree.depth.derive import derive_depths
import click
logger = logging.getLogger(__name__)
@click.command()
@click.argument('markers', type=click.STRING, required=True)
def outline_depths(markers) -> None:
"""
Infer an outline's structure.
Return a list of outline depths for a given list of space-separated markers.
"""
# Input is space-separated.
marker_list = markers.split(' ')
all_solutions = derive_depths(
marker_list,
[optional_rules.limit_sequence_gap(1)]
)
depths = {tuple(str(a.depth) for a in s) for s in all_solutions}.pop()
# Expected output is space-separated.
formatted_output = ' '.join(depths)
print(formatted_output)
if __name__ == '__main__':
"""Enable running this command directly. E.g.,
`$ python regparser/commands/outline_depths.py`. This can save 1.5 seconds
or more of startup time.
"""
outline_depths()
| cc0-1.0 | Python |
2bddfb82dde0ca4c9edd0e303a43e95618faf826 | Add __hash__ so objects may be used in hashtable based datastructures | xenserver/python-libs,xenserver/python-libs | net/ifrename/macpci.py | net/ifrename/macpci.py | #!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
"""
MACPCI object.
Used extensivly for interface rename logic.
"""
__version__ = "1.0.1"
__author__ = "Andrew Cooper"
from xcp.pci import PCI
from xcp.net.mac import MAC
class MACPCI(object):
def __init__(self, mac, pci, kname=None, tname=None, order=0,
ppn=None, label=None):
if isinstance(mac, MAC):
self.mac = mac
else:
self.mac = MAC(mac)
if isinstance(pci, PCI):
self.pci = pci
else:
self.pci = PCI(pci)
self.kname = kname
self.tname = tname
self.order = order
self.ppn = ppn
self.label = label
def __str__(self):
res = ""
if self.kname:
res += "%s->" % (self.kname,)
res += "(%s,%s)" % (self.mac, self.pci)
if self.tname:
res += "->%s" % (self.tname,)
return res
def __repr__(self):
return str(self)
def __hash__(self):
return hash("%s-%s" % (self.mac, self.pci))
def __eq__(self, other):
return ( self.mac == other.mac and
self.pci == other.pci )
def __ne__(self, other):
return ( self.mac != other.mac or
self.pci != other.pci )
def __lt__(self, other):
return self.order < other.order
| #!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
"""
MACPCI object.
Used extensivly for interface rename logic.
"""
__version__ = "1.0.0"
__author__ = "Andrew Cooper"
from xcp.pci import PCI
from xcp.net.mac import MAC
class MACPCI(object):
def __init__(self, mac, pci, kname=None, tname=None, order=0,
ppn=None, label=None):
if isinstance(mac, MAC):
self.mac = mac
else:
self.mac = MAC(mac)
if isinstance(pci, PCI):
self.pci = pci
else:
self.pci = PCI(pci)
self.kname = kname
self.tname = tname
self.order = order
self.ppn = ppn
self.label = label
def __str__(self):
res = ""
if self.kname:
res += "%s->" % (self.kname,)
res += "(%s,%s)" % (self.mac, self.pci)
if self.tname:
res += "->%s" % (self.tname,)
return res
def __repr__(self):
return str(self)
def __eq__(self, other):
return ( self.mac == other.mac and
self.pci == other.pci )
def __ne__(self, other):
return ( self.mac != other.mac or
self.pci != other.pci )
def __lt__(self, other):
return self.order < other.order
| bsd-2-clause | Python |
4d18e524f4ad3275492bfa6dfcf2d0a113d6dede | fix validate function not allowing exp, log, sin, ... | Neurosim-lab/netpyne,Neurosim-lab/netpyne,thekerrlab/netpyne | netpyne/specs/utils.py | netpyne/specs/utils.py | """
specs/utils.py
Helper functions for high-level specifications
Contributors: salvador dura@gmail.com
"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from numbers import Number
from neuron import h
def validateFunction(strFunc, netParamsVars):
''' returns True if "strFunc" can be evaluated'''
from math import exp, log, sqrt, sin, cos, tan, asin, acos, atan, sinh, cosh, tanh, pi, e
rand = h.Random()
stringFuncRandMethods = ['binomial', 'discunif', 'erlang', 'geometric', 'hypergeo',
'lognormal', 'negexp', 'normal', 'poisson', 'uniform', 'weibull']
for randmeth in stringFuncRandMethods: strFunc = strFunc.replace(randmeth, 'rand.'+randmeth)
variables = {
"pre_x" : 1, "pre_y" : 1, "pre_z" : 1,
"post_x" : 1, "post_y" : 1, "post_z" : 1,
"dist_x" : 1, "dist_y" : 1, "dist_z" : 1,
"pre_xnorm" : 1, "pre_ynorm" : 1, "pre_znorm" : 1,
"post_xnorm" : 1, "post_ynorm" : 1, "post_znorm" : 1,
"dist_xnorm" : 1, "dist_ynorm" : 1, "dist_znorm" : 1,
"dist_3D" : 1, "dist_3D_border" : 1, "dist_2D" : 1,
"dist_norm3D": 1, "dist_norm2D" : 1, "rand": rand,
"exp": exp, "log":log, "sqrt": sqrt,
"sin":sin, "cos":cos, "tan":tan, "asin":asin,
"acos":acos, "atan":atan, "sinh":sinh, "cosh":cosh,
"tanh":tanh, "pi":pi,"e": e
}
# add netParams variables
for k, v in netParamsVars.items():
if isinstance(v, Number):
variables[k] = v
try:
eval(strFunc, variables)
return True
except:
return False
| """
specs/utils.py
Helper functions for high-level specifications
Contributors: salvador dura@gmail.com
"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from numbers import Number
from neuron import h
def validateFunction(strFunc, netParamsVars):
''' returns True if "strFunc" can be evaluated'''
from math import exp, log, sqrt, sin, cos, tan, asin, acos, atan, sinh, cosh, tanh, pi, e
rand = h.Random()
stringFuncRandMethods = ['binomial', 'discunif', 'erlang', 'geometric', 'hypergeo',
'lognormal', 'negexp', 'normal', 'poisson', 'uniform', 'weibull']
for randmeth in stringFuncRandMethods: strFunc = strFunc.replace(randmeth, 'rand.'+randmeth)
variables = {
"pre_x" : 1, "pre_y" : 1, "pre_z" : 1,
"post_x" : 1, "post_y" : 1, "post_z" : 1,
"dist_x" : 1, "dist_y" : 1, "dist_z" : 1,
"pre_xnorm" : 1, "pre_ynorm" : 1, "pre_znorm" : 1,
"post_xnorm" : 1, "post_ynorm" : 1, "post_znorm" : 1,
"dist_xnorm" : 1, "dist_ynorm" : 1, "dist_znorm" : 1,
"dist_3D" : 1, "dist_3D_border" : 1, "dist_2D" : 1,
"dist_norm3D": 1, "dist_norm2D" : 1, "rand": rand
}
# add netParams variables
for k, v in netParamsVars.items():
if isinstance(v, Number):
variables[k] = v
try:
eval(strFunc, variables)
return True
except:
return False
| mit | Python |
12bacecd27ecc49bf02567ff4d8404adb1ecd36c | remove API, find HLS via simple regex | bastimeyer/streamlink,streamlink/streamlink,melmorabity/streamlink,chhe/streamlink,gravyboat/streamlink,streamlink/streamlink,melmorabity/streamlink,chhe/streamlink,bastimeyer/streamlink,gravyboat/streamlink | src/streamlink/plugins/tv8.py | src/streamlink/plugins/tv8.py | import logging
import re
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(
r'https?://www\.tv8\.com\.tr/canli-yayin'
))
class TV8(Plugin):
_re_hls = re.compile(r"""file\s*:\s*(["'])(?P<hls_url>https?://.*?\.m3u8.*?)\1""")
def get_title(self):
return 'TV8'
def _get_streams(self):
hls_url = self.session.http.get(self.url, schema=validate.Schema(
validate.transform(self._re_hls.search),
validate.any(None, validate.get("hls_url"))
))
if hls_url is not None:
return HLSStream.parse_variant_playlist(self.session, hls_url)
__plugin__ = TV8
| import logging
import re
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(
r'https?://www\.tv8\.com\.tr/canli-yayin'
))
class TV8(Plugin):
_player_schema = validate.Schema(validate.all({
'servers': {
validate.optional('manifest'): validate.url(),
'hlsmanifest': validate.url(),
}},
validate.get('servers')))
API_URL = 'https://static.personamedia.tv/player/config/tv8.json'
def get_title(self):
return 'TV8'
def _get_streams(self):
res = self.session.http.get(self.API_URL)
data = self.session.http.json(res, schema=self._player_schema)
log.debug('{0!r}'.format(data))
return HLSStream.parse_variant_playlist(self.session, data['hlsmanifest'])
__plugin__ = TV8
| bsd-2-clause | Python |
18c24c724603f443e0d42846696571d9a27c18e7 | fix bad help text | dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq | corehq/apps/userreports/management/commands/rebuild_indicator_table.py | corehq/apps/userreports/management/commands/rebuild_indicator_table.py | from django.core.management.base import LabelCommand, CommandError
from corehq.apps.userreports import tasks
class Command(LabelCommand):
help = "Rebuild a user configurable reporting table"
args = '<indicator_config_id>'
label = ""
def handle(self, *args, **options):
if len(args) < 1:
raise CommandError('Usage is rebuild_indicator_table %s' % self.args)
config_id = args[0]
tasks.rebuild_indicators(config_id)
| from django.core.management.base import LabelCommand, CommandError
from corehq.apps.userreports import tasks
class Command(LabelCommand):
help = "Rebuild a user configurable reporting table"
args = '<indicator_config_id>'
label = ""
def handle(self, *args, **options):
if len(args) < 1:
raise CommandError('Usage is copy_doc %s' % self.args)
config_id = args[0]
tasks.rebuild_indicators(config_id)
| bsd-3-clause | Python |
1af2d7dbb26b4836d2dad2419468c2e1a8ba7c97 | Call reload settings on handler init. This prevents an exception if we update status msg too quickly. | Floobits/plugin-common-python | handlers/base.py | handlers/base.py | try:
from ... import editor
except ValueError:
from floo import editor
from .. import msg, event_emitter, shared as G, utils
BASE_FLOORC = '''# Floobits config
# Logs messages to Sublime Text console instead of a special view
#log_to_console 1
# Enables debug mode
#debug 1
'''
class BaseHandler(event_emitter.EventEmitter):
BASE_FLOORC = BASE_FLOORC
PROTOCOL = None
def __init__(self):
super(BaseHandler, self).__init__()
G.AGENT = self
self.reload_settings()
def build_protocol(self, *args):
self.proto = self.PROTOCOL(*args)
self.proto.on("data", self.on_data)
self.proto.on("connect", self.on_connect)
return self.proto
def send(self, *args, **kwargs):
self.proto.put(*args, **kwargs)
def on_data(self, name, data):
handler = getattr(self, "_on_%s" % name, None)
if handler:
return handler(data)
msg.debug('unknown name!', name, 'data:', data)
@property
def client(self):
return editor.name()
@property
def codename(self):
return editor.codename()
def _on_error(self, data):
message = 'Error from server! Message: %s' % str(data.get('msg'))
msg.error(message)
if data.get('flash'):
editor.error_message('Error from Floobits server: %s' % str(data.get('msg')))
def _on_disconnect(self, data):
message = 'Disconnected from server! Reason: %s' % str(data.get('reason'))
msg.error(message)
editor.error_message(message)
self.proto.stop()
def is_ready(self):
return G.JOINED_WORKSPACE
def reload_settings(self):
utils.reload_settings()
self.username = G.USERNAME
self.secret = G.SECRET
self.api_key = G.API_KEY
def tick(self):
pass
| try:
from ... import editor
except ValueError:
from floo import editor
from .. import msg, event_emitter, shared as G, utils
BASE_FLOORC = '''# Floobits config
# Logs messages to Sublime Text console instead of a special view
#log_to_console 1
# Enables debug mode
#debug 1
'''
class BaseHandler(event_emitter.EventEmitter):
BASE_FLOORC = BASE_FLOORC
PROTOCOL = None
def __init__(self):
super(BaseHandler, self).__init__()
G.AGENT = self
def build_protocol(self, *args):
self.proto = self.PROTOCOL(*args)
self.proto.on("data", self.on_data)
self.proto.on("connect", self.on_connect)
return self.proto
def send(self, *args, **kwargs):
self.proto.put(*args, **kwargs)
def on_data(self, name, data):
handler = getattr(self, "_on_%s" % name, None)
if handler:
return handler(data)
msg.debug('unknown name!', name, 'data:', data)
@property
def client(self):
return editor.name()
@property
def codename(self):
return editor.codename()
def _on_error(self, data):
message = 'Error from server! Message: %s' % str(data.get('msg'))
msg.error(message)
if data.get('flash'):
editor.error_message('Error from Floobits server: %s' % str(data.get('msg')))
def _on_disconnect(self, data):
message = 'Disconnected from server! Reason: %s' % str(data.get('reason'))
msg.error(message)
editor.error_message(message)
self.proto.stop()
def is_ready(self):
return G.JOINED_WORKSPACE
def reload_settings(self):
utils.reload_settings()
self.username = G.USERNAME
self.secret = G.SECRET
self.api_key = G.API_KEY
def tick(self):
pass
| apache-2.0 | Python |
b1b504ca45a1d685f6c8650f15fc2e907ccdf9f9 | Update question_5.py | Nauqcaj/quiz-itp-w1 | quiz-questions/question_5.py | quiz-questions/question_5.py | """Intro to Python - Week 1 - Quiz."""
# Question 5
def calculate_tax(income):
"""Implement the code required to make this function work.
Write a function `calculate_tax` that receives a number (`income`) and
calculates how much of Federal taxes is due,
according to the following table:
| Income | Tax Percentage |
| ------------- | ------------- |
| <= $50,000 | 15% |
| <= $75,000 | 25% |
| <= $100,000 | 30% |
| > $100,000 | 35% |
Example:
income = 30000 # $30,000 is less than $50,000
calculate_tax(income) # $30,000 * 0.15 = 4500 = $4,500
income = 80000 # $80,000 is more than $75,000 but less than $80,000
calculate_tax(income) # $80,000 * 0.25 = 20000 = $20,000
income = 210000 # $210,000 is more than $100,000
calculate_tax(income) # $210,000 * 0.35 = 73500 = $73,500
"""
# Write your code here
income = int(input()
while income <= 50000:
return income * 0.15
elif income <= 75000:
return income * 0.25
elif income <= 100000:
return income * .30
else:
return income * .35
| """Intro to Python - Week 1 - Quiz."""
# Question 5
def calculate_tax(income):
"""Implement the code required to make this function work.
Write a function `calculate_tax` that receives a number (`income`) and
calculates how much of Federal taxes is due,
according to the following table:
| Income | Tax Percentage |
| ------------- | ------------- |
| <= $50,000 | 15% |
| <= $75,000 | 25% |
| <= $100,000 | 30% |
| > $100,000 | 35% |
Example:
income = 30000 # $30,000 is less than $50,000
calculate_tax(income) # $30,000 * 0.15 = 4500 = $4,500
income = 80000 # $80,000 is more than $75,000 but less than $80,000
calculate_tax(income) # $80,000 * 0.25 = 20000 = $20,000
income = 210000 # $210,000 is more than $100,000
calculate_tax(income) # $210,000 * 0.35 = 73500 = $73,500
"""
# Write your code here
pass
| mit | Python |
a50cbf369a1eb17ce5dd23882af96148ea262172 | fix imcompatibility between json and model | AllanNozomu/tecsaladeaula,AllanNozomu/tecsaladeaula,mupi/tecsaladeaula,GustavoVS/timtec,AllanNozomu/tecsaladeaula,AllanNozomu/tecsaladeaula,virgilio/timtec,virgilio/timtec,mupi/tecsaladeaula,virgilio/timtec,hacklabr/timtec,mupi/timtec,GustavoVS/timtec,hacklabr/timtec,mupi/tecsaladeaula,GustavoVS/timtec,mupi/escolamupi,hacklabr/timtec,GustavoVS/timtec,mupi/timtec,mupi/tecsaladeaula,hacklabr/timtec,mupi/timtec,mupi/timtec,virgilio/timtec,mupi/escolamupi | forum/models.py | forum/models.py | # -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from django.template.defaultfilters import slugify
from autoslug import AutoSlugField
from core.models import Course, Lesson
class Question(models.Model):
title = models.CharField(_('Title'), max_length=255)
text = models.TextField(_('Question'))
slug = AutoSlugField(_('Slug'), populate_from='title', max_length=255, editable=False, unique=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('User'))
correct_answer = models.OneToOneField('Answer', verbose_name=_('Correct answer'), related_name='+', null=True, blank=True)
timestamp = models.DateTimeField(auto_now_add=True, editable=False)
course = models.ForeignKey(Course, verbose_name=_('Course'))
lesson = models.ForeignKey(Lesson, verbose_name=_('Lesson'), null=True, blank=True)
def save(self, **kwargs):
if not self.id and self.title:
self.slug = slugify(self.title)
super(Question, self).save(**kwargs)
def __unicode__(self):
return self.title
@property
def count_votes(self):
return self.votes.aggregate(models.Sum('value'))['value__sum'] or 0
class Answer(models.Model):
question = models.ForeignKey(Question, related_name='answers', verbose_name=_('Question'))
text = models.TextField(_('Answer'))
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('User'), related_name='forum_answers')
timestamp = models.DateTimeField(auto_now_add=True, editable=False)
def __unicode__(self):
return self.text
@property
def count_votes(self):
return self.votes.aggregate(models.Sum('value'))['value__sum'] or 0
class Vote(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('User'))
timestamp = models.DateTimeField(auto_now_add=True, editable=False)
# Defines vote up or vote down. Vote up:1; Vote down: -1.
value = models.IntegerField(null=False, blank=False)
class QuestionVote(Vote):
question = models.ForeignKey(Question, related_name='votes', verbose_name=_('Question'))
# class Meta:
# unique_together = ('question', 'user')
class AnswerVote(Vote):
answer = models.ForeignKey(Answer, related_name='votes', verbose_name=_('Answer'))
# class Meta:
# unique_together = ('answer', 'user')
| # -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from django.template.defaultfilters import slugify
from autoslug import AutoSlugField
from core.models import Course, Lesson
class Question(models.Model):
title = models.CharField(_('Title'), max_length=255)
text = models.TextField(_('Question'))
slug = AutoSlugField(_('Slug'), populate_from='title', max_length=100, editable=False, unique=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('User'))
correct_answer = models.OneToOneField('Answer', verbose_name=_('Correct answer'), related_name='+', null=True, blank=True)
timestamp = models.DateTimeField(auto_now_add=True, editable=False)
course = models.ForeignKey(Course, verbose_name=_('Course'))
lesson = models.ForeignKey(Lesson, verbose_name=_('Lesson'), null=True, blank=True)
def save(self, **kwargs):
if not self.id and self.title:
self.slug = slugify(self.title)
super(Question, self).save(**kwargs)
def __unicode__(self):
return self.title
@property
def count_votes(self):
return self.votes.aggregate(models.Sum('value'))['value__sum'] or 0
class Answer(models.Model):
question = models.ForeignKey(Question, related_name='answers', verbose_name=_('Question'))
text = models.TextField(_('Answer'))
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('User'), related_name='forum_answers')
timestamp = models.DateTimeField(auto_now_add=True, editable=False)
def __unicode__(self):
return self.text
@property
def count_votes(self):
return self.votes.aggregate(models.Sum('value'))['value__sum'] or 0
class Vote(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('User'))
timestamp = models.DateTimeField(auto_now_add=True, editable=False)
# Defines vote up or vote down. Vote up:1; Vote down: -1.
value = models.IntegerField(null=False, blank=False)
class QuestionVote(Vote):
question = models.ForeignKey(Question, related_name='votes', verbose_name=_('Question'))
# class Meta:
# unique_together = ('question', 'user')
class AnswerVote(Vote):
answer = models.ForeignKey(Answer, related_name='votes', verbose_name=_('Answer'))
# class Meta:
# unique_together = ('answer', 'user')
| agpl-3.0 | Python |
a39bb013179b0b4f3749c1f9fc99f8f90706b36e | Add a couple more donation options | LukasBoersma/readthedocs.org,royalwang/readthedocs.org,nikolas/readthedocs.org,clarkperkins/readthedocs.org,GovReady/readthedocs.org,Tazer/readthedocs.org,singingwolfboy/readthedocs.org,gjtorikian/readthedocs.org,emawind84/readthedocs.org,techtonik/readthedocs.org,titiushko/readthedocs.org,espdev/readthedocs.org,raven47git/readthedocs.org,kdkeyser/readthedocs.org,techtonik/readthedocs.org,laplaceliu/readthedocs.org,safwanrahman/readthedocs.org,titiushko/readthedocs.org,kenwang76/readthedocs.org,atsuyim/readthedocs.org,safwanrahman/readthedocs.org,mhils/readthedocs.org,sunnyzwh/readthedocs.org,kdkeyser/readthedocs.org,wanghaven/readthedocs.org,Tazer/readthedocs.org,Tazer/readthedocs.org,fujita-shintaro/readthedocs.org,michaelmcandrew/readthedocs.org,michaelmcandrew/readthedocs.org,sunnyzwh/readthedocs.org,pombredanne/readthedocs.org,SteveViss/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,kenshinthebattosai/readthedocs.org,stevepiercy/readthedocs.org,soulshake/readthedocs.org,singingwolfboy/readthedocs.org,hach-que/readthedocs.org,takluyver/readthedocs.org,titiushko/readthedocs.org,clarkperkins/readthedocs.org,LukasBoersma/readthedocs.org,stevepiercy/readthedocs.org,tddv/readthedocs.org,kenshinthebattosai/readthedocs.org,laplaceliu/readthedocs.org,VishvajitP/readthedocs.org,fujita-shintaro/readthedocs.org,GovReady/readthedocs.org,emawind84/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,takluyver/readthedocs.org,asampat3090/readthedocs.org,laplaceliu/readthedocs.org,jerel/readthedocs.org,istresearch/readthedocs.org,attakei/readthedocs-oauth,nikolas/readthedocs.org,soulshake/readthedocs.org,davidfischer/readthedocs.org,pombredanne/readthedocs.org,clarkperkins/readthedocs.org,atsuyim/readthedocs.org,sunnyzwh/readthedocs.org,stevepiercy/readthedocs.org,atsuyim/readthedocs.org,soulshake/readthedocs.org,kenwang76/readthedocs.org,royalwang/readthedocs.org,techtonik/readthedocs.org,fujita-shintaro/readthedocs.org,tddv/readthedocs.org,cgourlay/readthedocs.org,istresearch/readthedocs.org,VishvajitP/readthedocs.org,sils1297/readthedocs.org,dirn/readthedocs.org,LukasBoersma/readthedocs.org,VishvajitP/readthedocs.org,agjohnson/readthedocs.org,atsuyim/readthedocs.org,attakei/readthedocs-oauth,gjtorikian/readthedocs.org,cgourlay/readthedocs.org,espdev/readthedocs.org,sils1297/readthedocs.org,d0ugal/readthedocs.org,agjohnson/readthedocs.org,sils1297/readthedocs.org,emawind84/readthedocs.org,nikolas/readthedocs.org,davidfischer/readthedocs.org,nikolas/readthedocs.org,cgourlay/readthedocs.org,CedarLogic/readthedocs.org,CedarLogic/readthedocs.org,dirn/readthedocs.org,davidfischer/readthedocs.org,hach-que/readthedocs.org,istresearch/readthedocs.org,hach-que/readthedocs.org,sid-kap/readthedocs.org,kdkeyser/readthedocs.org,d0ugal/readthedocs.org,sid-kap/readthedocs.org,sunnyzwh/readthedocs.org,gjtorikian/readthedocs.org,dirn/readthedocs.org,agjohnson/readthedocs.org,GovReady/readthedocs.org,raven47git/readthedocs.org,emawind84/readthedocs.org,safwanrahman/readthedocs.org,wijerasa/readthedocs.org,wanghaven/readthedocs.org,gjtorikian/readthedocs.org,d0ugal/readthedocs.org,tddv/readthedocs.org,wijerasa/readthedocs.org,asampat3090/readthedocs.org,espdev/readthedocs.org,royalwang/readthedocs.org,sid-kap/readthedocs.org,laplaceliu/readthedocs.org,techtonik/readthedocs.org,Tazer/readthedocs.org,istresearch/readthedocs.org,titiushko/readthedocs.org,mhils/readthedocs.org,kenwang76/readthedocs.org,wijerasa/readthedocs.org,espdev/readthedocs.org,agjohnson/readthedocs.org,asampat3090/readthedocs.org,CedarLogic/readthedocs.org,safwanrahman/readthedocs.org,takluyver/readthedocs.org,royalwang/readthedocs.org,wanghaven/readthedocs.org,davidfischer/readthedocs.org,takluyver/readthedocs.org,CedarLogic/readthedocs.org,kenshinthebattosai/readthedocs.org,sid-kap/readthedocs.org,attakei/readthedocs-oauth,singingwolfboy/readthedocs.org,michaelmcandrew/readthedocs.org,mhils/readthedocs.org,raven47git/readthedocs.org,fujita-shintaro/readthedocs.org,rtfd/readthedocs.org,asampat3090/readthedocs.org,michaelmcandrew/readthedocs.org,cgourlay/readthedocs.org,singingwolfboy/readthedocs.org,dirn/readthedocs.org,jerel/readthedocs.org,hach-que/readthedocs.org,wanghaven/readthedocs.org,rtfd/readthedocs.org,sils1297/readthedocs.org,VishvajitP/readthedocs.org,SteveViss/readthedocs.org,stevepiercy/readthedocs.org,kenshinthebattosai/readthedocs.org,SteveViss/readthedocs.org,pombredanne/readthedocs.org,jerel/readthedocs.org,d0ugal/readthedocs.org,mhils/readthedocs.org,wijerasa/readthedocs.org,GovReady/readthedocs.org,soulshake/readthedocs.org,kdkeyser/readthedocs.org,rtfd/readthedocs.org,attakei/readthedocs-oauth,LukasBoersma/readthedocs.org,espdev/readthedocs.org,SteveViss/readthedocs.org,rtfd/readthedocs.org | readthedocs/donate/models.py | readthedocs/donate/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
AMOUNT_CHOICES = (
(5, '$5'),
(10, '$10'),
(25, '$25'),
(50, '1 Hour ($50)'),
(100, '2 Hours ($100)'),
(200, '4 Hours ($200)'),
(400, '1 Day ($400)'),
(800, '2 Days ($800)'),
(1200, '3 Days ($1200)'),
(1600, '4 Days ($1600)'),
(2000, '5 Days ($2000)'),
(4000, '2 Weeks ($4000)'),
(6000, '3 Weeks ($6000)'),
(8000, '4 Weeks ($8000)'),
)
class Supporter(models.Model):
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
public = models.BooleanField(_('Public'), default=True)
name = models.CharField(_('name'), max_length=200, blank=True)
email = models.EmailField(_('Email'), max_length=200, blank=True)
user = models.ForeignKey('auth.User', verbose_name=_('User'),
related_name='goldonce', blank=True, null=True)
dollars = models.IntegerField(_('Amount'), max_length=30, default=50)
logo_url = models.URLField(_('Logo URL'), max_length=255, blank=True,
null=True)
site_url = models.URLField(_('Site URL'), max_length=255, blank=True,
null=True)
last_4_digits = models.CharField(max_length=4)
stripe_id = models.CharField(max_length=255)
subscribed = models.BooleanField(default=False)
def __str__(self):
return self.name
| from django.db import models
from django.utils.translation import ugettext_lazy as _
AMOUNT_CHOICES = (
(5, '$5'),
(10, '$10'),
(25, '$25'),
(50, '1 Hour ($50)'),
(100, '2 Hours ($100)'),
(200, '4 Hours ($200)'),
(400, '1 Day ($400)'),
(800, '2 Days ($800)'),
(1200, '3 Days ($1200)'),
(1600, '4 Days ($1600)'),
(2000, '5 Days ($2000)'),
)
class Supporter(models.Model):
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
public = models.BooleanField(_('Public'), default=True)
name = models.CharField(_('name'), max_length=200, blank=True)
email = models.EmailField(_('Email'), max_length=200, blank=True)
user = models.ForeignKey('auth.User', verbose_name=_('User'),
related_name='goldonce', blank=True, null=True)
dollars = models.IntegerField(_('Amount'), max_length=30,
choices=AMOUNT_CHOICES, default=50)
logo_url = models.URLField(_('Logo URL'), max_length=255, blank=True,
null=True)
site_url = models.URLField(_('Site URL'), max_length=255, blank=True,
null=True)
last_4_digits = models.CharField(max_length=4)
stripe_id = models.CharField(max_length=255)
subscribed = models.BooleanField(default=False)
def __str__(self):
return self.name
| mit | Python |
9e8009b501ca7001f5346b278cd59ad596e3ebe0 | Bump version number to 0.2.1 | Dalloriam/engel,Dalloriam/engel,Dalloriam/engel | popeui/__init__.py | popeui/__init__.py | __version__ = "0.2.1"
from .application import Application, View
| __version__ = "0.2.0"
from .application import Application, View
| mit | Python |
5724ea0aa9d09b1af42f52e86a87dad4abda58e0 | Remove search autosync from tests | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | readthedocs/settings/test.py | readthedocs/settings/test.py | from __future__ import absolute_import
import os
from .dev import CommunityDevSettings
class CommunityTestSettings(CommunityDevSettings):
SLUMBER_USERNAME = 'test'
SLUMBER_PASSWORD = 'test'
SLUMBER_API_HOST = 'http://localhost:8000'
# A bunch of our tests check this value in a returned URL/Domain
PRODUCTION_DOMAIN = 'readthedocs.org'
GROK_API_HOST = 'http://localhost:8888'
DEBUG = False
TEMPLATE_DEBUG = False
ES_PAGE_IGNORE_SIGNALS = False
ELASTICSEARCH_DSL_AUTOSYNC = False
@property
def ES_INDEXES(self): # noqa - avoid pep8 N802
es_indexes = super(CommunityTestSettings, self).ES_INDEXES
for index_conf in es_indexes.values():
index_conf['name'] = "test_{}".format(index_conf['name'])
return es_indexes
@property
def LOGGING(self): # noqa - avoid pep8 N802
logging = super(CommunityDevSettings, self).LOGGING
return logging
CommunityTestSettings.load_settings(__name__)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'PREFIX': 'docs',
}
}
if not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):
try:
from .local_settings import * # noqa
except ImportError:
pass
| from __future__ import absolute_import
import os
from .dev import CommunityDevSettings
class CommunityTestSettings(CommunityDevSettings):
SLUMBER_USERNAME = 'test'
SLUMBER_PASSWORD = 'test'
SLUMBER_API_HOST = 'http://localhost:8000'
# A bunch of our tests check this value in a returned URL/Domain
PRODUCTION_DOMAIN = 'readthedocs.org'
GROK_API_HOST = 'http://localhost:8888'
DEBUG = False
TEMPLATE_DEBUG = False
ES_PAGE_IGNORE_SIGNALS = False
ELASTICSEARCH_DSL_AUTOSYNC = True
@property
def ES_INDEXES(self): # noqa - avoid pep8 N802
es_indexes = super(CommunityTestSettings, self).ES_INDEXES
for index_conf in es_indexes.values():
index_conf['name'] = "test_{}".format(index_conf['name'])
return es_indexes
@property
def LOGGING(self): # noqa - avoid pep8 N802
logging = super(CommunityDevSettings, self).LOGGING
return logging
CommunityTestSettings.load_settings(__name__)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'PREFIX': 'docs',
}
}
if not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):
try:
from .local_settings import * # noqa
except ImportError:
pass
| mit | Python |
19fff3f8c5e7cda590ad578638aec09349d09f11 | make username unique | tschaume/global_gitfeed_api,tschaume/global_gitfeed_api | api/__init__.py | api/__init__.py | import os, bcrypt
from eve import Eve
from flask.ext.bootstrap import Bootstrap
from eve_docs import eve_docs
from eve.auth import BasicAuth
class BCryptAuth(BasicAuth):
def check_auth(self, username, password, allowed_roles, resource, method):
accounts = app.data.driver.db['accounts']
account = accounts.find_one({'username': username})
return (
account and
bcrypt.hashpw(password, account['password']) == account['password']
)
accounts = {
'schema': {
'username': {
'type': 'string',
'minlength': 5,
'required': True
'unique': True
},
'password': {
'type': 'string',
'required': True
}
}
}
gitcommits = {
'datasource': {
'default_sort': [('datetime',1)],
},
'schema': {
'project': {
'type': 'objectid',
'required': True,
'data_relation': {
'resource': 'projects',
'field': '_id',
'embeddable': True
}
},
'message': {
'type': 'string',
'minlength': 5,
'required': True,
},
'datetime': {
'type': 'datetime',
'required': True,
},
}
}
gitprojects = {
'additional_lookup': {
'url': 'regex("[\w]+")',
'field': 'name'
},
'schema': {
'name': {
'type': 'string',
'minlength': 3,
'maxlength': 20,
'required': True,
'unique': True,
},
}
}
settings = {
#'SERVER_NAME': '127.0.0.1:5000', # dev
'SERVER_NAME': 'api.the-huck.com', # prod
'MONGO_HOST': 'localhost',
'MONGO_PORT': '27017',
#'MONGO_USERNAME': 'user',
#'MONGO_PASSWORD': 'user',
'MONGO_DBNAME': 'test',
'RESOURCE_METHODS': ['GET', 'POST', 'DELETE'],
'ITEM_METHODS': ['GET', 'PATCH', 'PUT', 'DELETE'],
'DATE_FORMAT': '%c',
'PUBLIC_METHODS': ['GET'],
'PUBLIC_ITEM_METHODS': ['GET'],
'DOMAIN': {
'accounts': accounts,
'gitcommits': gitcommits,
'gitprojects': gitprojects
}
}
app = Eve(auth=BCryptAuth, settings=settings)
Bootstrap(app)
app.register_blueprint(eve_docs, url_prefix='/docs')
| import os, bcrypt
from eve import Eve
from flask.ext.bootstrap import Bootstrap
from eve_docs import eve_docs
from eve.auth import BasicAuth
class BCryptAuth(BasicAuth):
def check_auth(self, username, password, allowed_roles, resource, method):
accounts = app.data.driver.db['accounts']
account = accounts.find_one({'username': username})
return (
account and
bcrypt.hashpw(password, account['password']) == account['password']
)
accounts = {
'schema': {
'username': {
'type': 'string',
'minlength': 5,
'required': True
},
'password': {
'type': 'string',
'required': True
}
}
}
gitcommits = {
'datasource': {
'default_sort': [('datetime',1)],
},
'schema': {
'project': {
'type': 'objectid',
'required': True,
'data_relation': {
'resource': 'projects',
'field': '_id',
'embeddable': True
}
},
'message': {
'type': 'string',
'minlength': 5,
'required': True,
},
'datetime': {
'type': 'datetime',
'required': True,
},
}
}
gitprojects = {
'additional_lookup': {
'url': 'regex("[\w]+")',
'field': 'name'
},
'schema': {
'name': {
'type': 'string',
'minlength': 3,
'maxlength': 20,
'required': True,
'unique': True,
},
}
}
settings = {
#'SERVER_NAME': '127.0.0.1:5000', # dev
'SERVER_NAME': 'api.the-huck.com', # prod
'MONGO_HOST': 'localhost',
'MONGO_PORT': '27017',
#'MONGO_USERNAME': 'user',
#'MONGO_PASSWORD': 'user',
'MONGO_DBNAME': 'test',
'RESOURCE_METHODS': ['GET', 'POST', 'DELETE'],
'ITEM_METHODS': ['GET', 'PATCH', 'PUT', 'DELETE'],
'DATE_FORMAT': '%c',
'PUBLIC_METHODS': ['GET'],
'PUBLIC_ITEM_METHODS': ['GET'],
'DOMAIN': {
'accounts': accounts,
'gitcommits': gitcommits,
'gitprojects': gitprojects
}
}
app = Eve(auth=BCryptAuth, settings=settings)
Bootstrap(app)
app.register_blueprint(eve_docs, url_prefix='/docs')
| mit | Python |
4fe3840d3297df2a158a3fd15d3de7a2e4da86aa | reorganize url patterns | desec-io/desec-stack,desec-io/desec-stack,desec-io/desec-stack,desec-io/desec-stack | api/api/urls.py | api/api/urls.py | from django.conf.urls import include, url
from desecapi import views
from rest_framework.routers import SimpleRouter
tokens_router = SimpleRouter()
tokens_router.register(r'', views.TokenViewSet, base_name='token')
auth_urls = [
url(r'^users/create/$', views.UserCreateView.as_view(), name='user-create'), # deprecated
url(r'^token/create/$', views.TokenCreateView.as_view(), name='token-create'), # deprecated
url(r'^token/destroy/$', views.TokenDestroyView.as_view(), name='token-destroy'), # deprecated
url(r'^users/$', views.UserCreateView.as_view(), name='register'),
url(r'^token/login/$', views.TokenCreateView.as_view(), name='login'),
url(r'^token/logout/$', views.TokenDestroyView.as_view(), name='logout'),
url(r'^tokens/', include(tokens_router.urls)),
url(r'^', include('djoser.urls')),
url(r'^', include('djoser.urls.authtoken')),
]
api_urls = [
url(r'^$', views.Root.as_view(), name='root'),
url(r'^domains/$', views.DomainList.as_view(), name='domain-list'),
url(r'^domains/(?P<name>[a-zA-Z\.\-_0-9]+)/$', views.DomainDetail.as_view(), name='domain-detail'),
url(r'^domains/(?P<name>[a-zA-Z\.\-_0-9]+)/rrsets/$', views.RRsetList.as_view(), name='rrsets'),
url(r'^domains/(?P<name>[a-zA-Z\.\-_0-9]+)/rrsets/(?P<subname>(\*)?[a-zA-Z\.\-_0-9=]*)\.\.\./(?P<type>[A-Z][A-Z0-9]*)/$', views.RRsetDetail.as_view(), name='rrset'),
url(r'^domains/(?P<name>[a-zA-Z\.\-_0-9]+)/rrsets/(?P<subname>[*@]|[a-zA-Z\.\-_0-9=]+)/(?P<type>[A-Z][A-Z0-9]*)/$', views.RRsetDetail.as_view(), name='rrset@'),
url(r'^dns$', views.DnsQuery.as_view(), name='dns-query'),
url(r'^dyndns/update$', views.DynDNS12Update.as_view(), name='dyndns12update'),
url(r'^donation/', views.DonationList.as_view(), name='donation'),
url(r'^unlock/user/(?P<email>.+)$', views.unlock, name='unlock/byEmail'),
url(r'^unlock/done', views.unlock_done, name='unlock/done'),
]
urlpatterns = [
url(r'^api/v1/auth/', include(auth_urls)),
url(r'^api/v1/', include(api_urls)),
]
| from django.conf.urls import include, url
from rest_framework.urlpatterns import format_suffix_patterns
from desecapi import views
from rest_framework.routers import SimpleRouter
tokens_router = SimpleRouter()
tokens_router.register(r'', views.TokenViewSet, base_name='token')
tokens_urls = tokens_router.urls
apiurls = [
url(r'^$', views.Root.as_view(), name='root'),
url(r'^domains/$', views.DomainList.as_view(), name='domain-list'),
url(r'^domains/(?P<name>[a-zA-Z\.\-_0-9]+)/$', views.DomainDetail.as_view(), name='domain-detail'),
url(r'^domains/(?P<name>[a-zA-Z\.\-_0-9]+)/rrsets/$', views.RRsetList.as_view(), name='rrsets'),
url(r'^domains/(?P<name>[a-zA-Z\.\-_0-9]+)/rrsets/(?P<subname>(\*)?[a-zA-Z\.\-_0-9=]*)\.\.\./(?P<type>[A-Z][A-Z0-9]*)/$', views.RRsetDetail.as_view(), name='rrset'),
url(r'^domains/(?P<name>[a-zA-Z\.\-_0-9]+)/rrsets/(?P<subname>[*@]|[a-zA-Z\.\-_0-9=]+)/(?P<type>[A-Z][A-Z0-9]*)/$', views.RRsetDetail.as_view(), name='rrset@'),
url(r'^dns$', views.DnsQuery.as_view(), name='dns-query'),
url(r'^dyndns/update$', views.DynDNS12Update.as_view(), name='dyndns12update'),
url(r'^donation/', views.DonationList.as_view(), name='donation'),
url(r'^unlock/user/(?P<email>.+)$', views.unlock, name='unlock/byEmail'),
url(r'^unlock/done', views.unlock_done, name='unlock/done'),
]
urlpatterns = [
url(r'^api/v1/auth/users/create/$', views.UserCreateView.as_view(), name='user-create'), # deprecated
url(r'^api/v1/auth/token/create/$', views.TokenCreateView.as_view(), name='token-create'), # deprecated
url(r'^api/v1/auth/token/destroy/$', views.TokenDestroyView.as_view(), name='token-destroy'), # deprecated
url(r'^api/v1/auth/users/$', views.UserCreateView.as_view(), name='register'),
url(r'^api/v1/auth/token/login/$', views.TokenCreateView.as_view(), name='login'),
url(r'^api/v1/auth/token/logout/$', views.TokenDestroyView.as_view(), name='logout'),
url(r'^api/v1/auth/tokens/', include(tokens_urls)),
url(r'^api/v1/auth/', include('djoser.urls')),
url(r'^api/v1/auth/', include('djoser.urls.authtoken')),
url(r'^api/v1/', include(apiurls)),
]
| mit | Python |
a49e09ab00311a08296ce830443cc1b06338a502 | Update comments | etgalloway/powershellmagic | powershellmagic.py | powershellmagic.py | """IPython magics for Windows PowerShell.
"""
__version__ = '0.1'
import atexit
import os
from subprocess import Popen, PIPE
import sys
import tempfile
from IPython.core.magic import (cell_magic, Magics, magics_class)
from IPython.core.magic_arguments import (
argument, magic_arguments, parse_argstring)
@magics_class
class PowerShellMagics(Magics):
"""IPython magics class for Windows PowerShell.
"""
# This class is patterned after
# IPython.core.magics.script.ScriptMagics.
def __init__(self, shell=None):
super(PowerShellMagics, self).__init__(shell=shell)
tf = tempfile.NamedTemporaryFile(suffix='.ps1', delete=False)
self._input_file_name = tf.name
atexit.register(self._delete_powershell_input_file)
def _delete_powershell_input_file(self):
"""Delete PowerShell input file."""
os.remove(self._input_file_name)
@magic_arguments()
@argument(
'--out',
type=str,
help="Redirect stdout to a variable."
)
@argument(
'--err',
type=str,
help="Redirect stderr to a variable."
)
@cell_magic
def powershell(self, line, cell):
"""Execute a cell written in PowerShell by spawning a process
that invokes the command:
PowerShell -ExecutionPolicy RemoteSigned -File tempfile.ps1
where the argument to '-File' is a file that contains the contents
of the cell.
"""
# This function is patterned after
# IPython.core.magics.ScriptMagics.shebang.
args = parse_argstring(self.powershell, line)
with open(self._input_file_name, mode='w') as f:
f.write(cell)
cmd = 'PowerShell -ExecutionPolicy RemoteSigned -File {}\r\n'
cmd = cmd.format(self._input_file_name)
p = Popen(cmd.split(), stdout=PIPE, stderr=PIPE, stdin=PIPE)
out, err = p.communicate()
out = out.decode()
err = err.decode()
if args.out:
self.shell.user_ns[args.out] = out
else:
sys.stdout.write(out)
sys.stdout.flush()
if args.err:
self.shell.user_ns[args.err] = err
else:
sys.stderr.write(err)
sys.stderr.flush()
def load_ipython_extension(ip):
"""Load PowerShellMagics extension"""
ip.register_magics(PowerShellMagics)
| """IPython magics for Windows PowerShell.
"""
__version__ = '0.1'
import atexit
import os
from subprocess import Popen, PIPE
import sys
import tempfile
from IPython.core.magic import (cell_magic, Magics, magics_class)
from IPython.core.magic_arguments import (
argument, magic_arguments, parse_argstring)
@magics_class
class PowerShellMagics(Magics):
"""IPython magics class for Windows PowerShell.
This class is patterned after IPython.core.magics.script.ScriptMagics.
"""
def __init__(self, shell=None):
super(PowerShellMagics, self).__init__(shell=shell)
tf = tempfile.NamedTemporaryFile(suffix='.ps1', delete=False)
self._input_file_name = tf.name
atexit.register(self._delete_powershell_input_file)
def _delete_powershell_input_file(self):
"""Delete PowerShell input file."""
os.remove(self._input_file_name)
@magic_arguments()
@argument('--out', type=str)
@argument('--err', type=str)
@cell_magic
def powershell(self, line, cell):
"""Execute a cell body using Powershell.
This function is patterned after
IPython.core.magics.ScriptMagics.shebang.
"""
args = parse_argstring(self.powershell, line)
with open(self._input_file_name, mode='w') as f:
f.write(cell)
cmd = 'powershell -ExecutionPolicy RemoteSigned -File {}\r\n'
cmd = cmd.format(self._input_file_name)
p = Popen(cmd.split(), stdout=PIPE, stderr=PIPE, stdin=PIPE)
out, err = p.communicate()
out = out.decode()
err = err.decode()
if args.out:
self.shell.user_ns[args.out] = out
else:
sys.stdout.write(out)
sys.stdout.flush()
if args.err:
self.shell.user_ns[args.err] = err
else:
sys.stderr.write(err)
sys.stderr.flush()
def load_ipython_extension(ip):
"""Load PowerShellMagics extension"""
ip.register_magics(PowerShellMagics)
| bsd-3-clause | Python |
d156f31e901887beb444d9b3eb6a3f5da1ec3394 | remove periodic task from indicators until we can fix it | dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq | mvp/tasks.py | mvp/tasks.py | from celery.schedules import crontab, schedule
from celery.task import periodic_task, task
from mvp.management.commands import mvp_update_existing
#@periodic_task(run_every=crontab(minute=0, hour=[0, 12]))
def update_mvp_indicators():
update_existing = mvp_update_existing.Command()
update_existing.handle()
| from celery.schedules import crontab, schedule
from celery.task import periodic_task, task
from mvp.management.commands import mvp_update_existing
@periodic_task(run_every=crontab(minute=0, hour=[0, 12]))
def update_mvp_indicators():
update_existing = mvp_update_existing.Command()
update_existing.handle()
| bsd-3-clause | Python |
abd1e46a0c3862977a1333cb1ce567c28c02a0a6 | Make files attachments not required | Hackfmi/Diaphanum,Hackfmi/Diaphanum | projects/models.py | projects/models.py | # -*- encoding:utf-8 -*-
from datetime import date
from django.db import models
class Project(models.Model):
STATUS = (
('unrevised', u'Неразгледан'),
('returned', u'Върнат за корекция'),
('pending', u'Предстои да бъде разгледан на СИС'),
('approved', u'Разгледан и одобрен на СИС'),
('rejected', u'Разгледан и неодобрен на СИС'))
user = models.ForeignKey('members.User', related_name='projects')
name = models.CharField(max_length=100)
flp = models.ForeignKey('members.User', related_name='flp')
team = models.ManyToManyField('members.User', related_name='team')
description = models.TextField()
targets = models.TextField()
tasks = models.TextField()
target_group = models.TextField()
schedule = models.TextField()
resources = models.TextField()
finance_description = models.TextField()
partners = models.TextField(blank=True, null=True)
files = models.ManyToManyField('attachments.Attachment', blank=True)
status = models.CharField(max_length=50,
choices=STATUS,
default='unrevised')
discussed_at = models.DateField(blank=True, null=True)
attitute = models.TextField(blank=True, null=True)
number = models.CharField(max_length=30, blank=True, null=True)
created_at = models.DateField(default=date.today())
def __unicode__(self):
return self.name
| # -*- encoding:utf-8 -*-
from datetime import date
from django.db import models
class Project(models.Model):
STATUS = (
('unrevised', u'Неразгледан'),
('returned', u'Върнат за корекция'),
('pending', u'Предстои да бъде разгледан на СИС'),
('approved', u'Разгледан и одобрен на СИС'),
('rejected', u'Разгледан и неодобрен на СИС'))
user = models.ForeignKey('members.User', related_name='projects')
name = models.CharField(max_length=100)
flp = models.ForeignKey('members.User', related_name='flp')
team = models.ManyToManyField('members.User', related_name='team')
description = models.TextField()
targets = models.TextField()
tasks = models.TextField()
target_group = models.TextField()
schedule = models.TextField()
resources = models.TextField()
finance_description = models.TextField()
partners = models.TextField(blank=True, null=True)
files = models.ManyToManyField('attachments.Attachment')
status = models.CharField(max_length=50,
choices=STATUS,
default='unrevised')
discussed_at = models.DateField(blank=True, null=True)
attitute = models.TextField(blank=True, null=True)
number = models.CharField(max_length=30, blank=True, null=True)
created_at = models.DateField(default=date.today())
def __unicode__(self):
return self.name
| mit | Python |
4e30a21a4e17304582724714f0be6c7a8e2c1852 | Add image field to project | City-of-Helsinki/devheldev,terotic/devheldev,terotic/devheldev,terotic/devheldev,City-of-Helsinki/devheldev,City-of-Helsinki/devheldev | projects/models.py | projects/models.py | from django.db import models
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailadmin.edit_handlers import FieldPanel, InlinePanel
from wagtail.wagtailsearch import index
from modelcluster.fields import ParentalKey
class ProjectPage(Orderable, Page):
STATUSES = (
('discovery', 'Discovery'),
('alpha', 'Alpha'),
('beta', 'Beta'),
('live', 'LIVE')
)
short_description = models.TextField()
full_description = RichTextField(blank=True)
image = models.ImageField(upload_to='project_images', blank=True, null=True)
status = models.CharField(max_length=20, choices=STATUSES, default='discovery')
def save(self, *args, **kwargs):
if not self.title:
if self.project:
self.title = self.project.name
return super().save(*args, **kwargs)
search_fields = Page.search_fields + (
index.SearchField('short_description'),
index.SearchField('full_description'),
)
content_panels = Page.content_panels + [
FieldPanel('status'),
FieldPanel('short_description'),
FieldPanel('full_description'),
FieldPanel('image'),
InlinePanel('kpis', label="Key performance indicators"),
InlinePanel('roles', label="Contact us"),
InlinePanel('links', label="Links"),
]
class ProjectRole(models.Model):
TYPES = (
('owner', 'Product owner'),
('tech', 'Tech lead'),
)
project = ParentalKey('projects.ProjectPage', related_name='roles')
type = models.CharField(max_length=20, choices=TYPES)
person = ParentalKey('aboutus.PersonPage', related_name='roles')
class ProjectKPI(models.Model):
project = ParentalKey('projects.ProjectPage', related_name='kpis')
name = models.CharField(max_length=20)
description = models.CharField(max_length=200, null=True, blank=True)
value = models.CharField(max_length=200)
class ProjectLink(models.Model):
TYPES = (
('main', 'Main'),
('github', 'GitHub'),
)
project = ParentalKey('projects.ProjectPage', related_name='links')
type = models.CharField(max_length=20, choices=TYPES)
description = models.CharField(max_length=200, null=True, blank=True)
url = models.URLField()
def __str__(self):
return "{0} / {1}".format(str(self.project), self.type)
class ProjectIndexPage(Page):
subpage_types = ['projects.ProjectPage']
def projects(self):
return ProjectPage.objects.live()
| from django.db import models
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailadmin.edit_handlers import FieldPanel, InlinePanel
from wagtail.wagtailsearch import index
from modelcluster.fields import ParentalKey
class ProjectPage(Orderable, Page):
STATUSES = (
('discovery', 'Discovery'),
('alpha', 'Alpha'),
('beta', 'Beta'),
('live', 'LIVE')
)
short_description = models.TextField()
full_description = RichTextField(blank=True)
status = models.CharField(max_length=20, choices=STATUSES, default='discovery')
def save(self, *args, **kwargs):
if not self.title:
if self.project:
self.title = self.project.name
return super().save(*args, **kwargs)
search_fields = Page.search_fields + (
index.SearchField('short_description'),
index.SearchField('full_description'),
)
content_panels = Page.content_panels + [
FieldPanel('status'),
FieldPanel('short_description'),
FieldPanel('full_description'),
InlinePanel('kpis', label="Key performance indicators"),
InlinePanel('roles', label="Contact us"),
InlinePanel('links', label="Links"),
]
class ProjectRole(models.Model):
TYPES = (
('owner', 'Product owner'),
('tech', 'Tech lead'),
)
project = ParentalKey('projects.ProjectPage', related_name='roles')
type = models.CharField(max_length=20, choices=TYPES)
person = ParentalKey('aboutus.PersonPage', related_name='roles')
class ProjectKPI(models.Model):
project = ParentalKey('projects.ProjectPage', related_name='kpis')
name = models.CharField(max_length=20)
description = models.CharField(max_length=200, null=True, blank=True)
value = models.CharField(max_length=200)
class ProjectLink(models.Model):
TYPES = (
('main', 'Main'),
('github', 'GitHub'),
)
project = ParentalKey('projects.ProjectPage', related_name='links')
type = models.CharField(max_length=20, choices=TYPES)
description = models.CharField(max_length=200, null=True, blank=True)
url = models.URLField()
def __str__(self):
return "{0} / {1}".format(str(self.project), self.type)
class ProjectIndexPage(Page):
subpage_types = ['projects.ProjectPage']
def projects(self):
return ProjectPage.objects.live()
| agpl-3.0 | Python |
3f71420b2711bdaf721ca6db87d806093811bb5b | Fix detection of make(1) result | mjhanninen/oldfart,mjhanninen/oldfart,mjhanninen/oldfart | py/oldfart/make.py | py/oldfart/make.py | import os
import re
import subprocess
__all__ = ['NOTHING_DONE', 'SUCCESS', 'NO_RULE', 'FAILURE', 'Maker']
NOTHING_DONE = 1
SUCCESS = 2
NO_RULE = 3
FAILURE = 4
def _occur(fmt, needle, haystack):
return bool(re.search(('^' + fmt + '$').format(needle),
haystack, re.MULTILINE))
class Maker(object):
def __init__(self, project_dir='.', makefile='Makefile'):
self.project_dir = os.path.abspath(project_dir)
self.makefile = os.path.abspath(os.path.join(project_dir, makefile))
def make(self, target):
"""Runs `make(1)` on `target` and returning a tuple `(status, output)`
where `status` is one of:
- `make.SUCCESS`: the target was successfully generated
- `make.NOTHING_DONE`: the target was already up-to-date
- `make.NO_RULE`: there is no rule to build the requested target
- `make.FAILURE`: `make(1)` exited otherwise with a non-zero error code
Returned `output` contains always the mixed output from `stdout` and
`stderr`.
"""
try:
capture = subprocess.check_output(
['make', '--makefile=' + self.makefile, target],
cwd=self.project_dir, stderr=subprocess.STDOUT,
universal_newlines=True)
if _occur("make: `{:s}' is up to date.", target, capture):
return (NOTHING_DONE, capture)
else:
return (SUCCESS, capture)
except subprocess.CalledProcessError as e:
if _occur(r"make: \*\*\* No rule to make target `{:s}'. Stop.",
target, e.output):
return (NO_RULE, e.output)
else:
return (FAILURE, e.output)
| import os
import re
import subprocess
__all__ = ['NOTHING_DONE', 'SUCCESS', 'NO_RULE', 'FAILURE', 'Maker']
NOTHING_DONE = 1
SUCCESS = 2
NO_RULE = 3
FAILURE = 4
class Maker(object):
def __init__(self, project_dir='.', makefile='Makefile'):
self.project_dir = os.path.abspath(project_dir)
self.makefile = os.path.abspath(os.path.join(project_dir, makefile))
def make(self, target):
"""Runs `make(1)` on `target` and returning a tuple `(status, output)`
where `status` is one of:
- `make.SUCCESS`: the target was successfully generated
- `make.NOTHING_DONE`: the target was already up-to-date
- `make.NO_RULE`: there is no rule to build the requested target
- `make.FAILURE`: `make(1)` exited otherwise with a non-zero error code
Returned `output` contains always the mixed output from `stdout` and
`stderr`.
"""
try:
capture = subprocess.check_output(
['make', '--makefile=' + self.makefile, target],
cwd=self.project_dir, stderr=subprocess.STDOUT,
universal_newlines=True)
if re.match(r"make: `[^']*' is up to date.", capture):
return (NOTHING_DONE, capture)
else:
return (SUCCESS, capture)
except subprocess.CalledProcessError as e:
if re.match(r"make: \*\*\* No rule to make target `{:s}'. Stop."
.format(target), e.output):
return (NO_RULE, e.output)
else:
return (FAILURE, e.output)
| bsd-3-clause | Python |
5961a841acbfac7aed64056f9be8078ed2338410 | Update the version ID. | jeremiedecock/pyax12,jeremiedecock/pyax12 | pyax12/__init__.py | pyax12/__init__.py | # PyAX-12
# The MIT License
#
# Copyright (c) 2010,2015 Jeremie DECOCK (http://www.jdhp.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# PEP0440 compatible formatted version, see:
# https://www.python.org/dev/peps/pep-0440/
#
# Generic release markers:
# X.Y
# X.Y.Z # For bugfix releases
#
# Admissible pre-release markers:
# X.YaN # Alpha release
# X.YbN # Beta release
# X.YrcN # Release Candidate
# X.Y # Final release
#
# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
#
__version__ = '0.4.dev3'
__all__ = ['connection',
'instruction_packet',
'packet',
'status_packet',
'utils']
| # PyAX-12
# The MIT License
#
# Copyright (c) 2010,2015 Jeremie DECOCK (http://www.jdhp.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# PEP0440 compatible formatted version, see:
# https://www.python.org/dev/peps/pep-0440/
#
# Generic release markers:
# X.Y
# X.Y.Z # For bugfix releases
#
# Admissible pre-release markers:
# X.YaN # Alpha release
# X.YbN # Beta release
# X.YrcN # Release Candidate
# X.Y # Final release
#
# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
#
__version__ = '0.4.dev2'
__all__ = ['connection',
'instruction_packet',
'packet',
'status_packet',
'utils']
| mit | Python |
e0a212bd9c0729a635ca1adfaa3e18d79e601c03 | Bump version to 1.5.2 | cloudControl/pycclib,cloudControl/pycclib | pycclib/version.py | pycclib/version.py | # -*- coding: utf-8 -*-
__version__ = '1.5.2'
| # -*- coding: utf-8 -*-
__version__ = '1.5.1'
| apache-2.0 | Python |
230ce1c38e1bba7c69fd774d83d17a8014cd4aeb | Use a generator instead of a list in get_unused_ips | agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft | pycroft/lib/net.py | pycroft/lib/net.py | # Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from itertools import islice
from ipaddr import IPv4Address, IPv6Address, IPv4Network, IPv6Network
import sys
class SubnetFullException(Exception):
message = "Subnet full"
class MacExistsException(Exception):
message = "MAC exists"
def get_unused_ips(subnets):
unused = dict()
for subnet in subnets:
reserved_bottom = subnet.reserved_addresses_bottom or 0
reserved_top = subnet.reserved_addresses_top or 0
used_ips = frozenset(ip.address for ip in subnet.ips)
unreserved = islice(
subnet.address.iterhosts(), reserved_bottom,
# Stop argument must be None or an integer: 0 <= x <= sys.maxsize.
# IPv6 subnets can exceed this boundary on 32 bit python builds.
min(subnet.address.numhosts - reserved_top - 2, sys.maxsize))
unused[subnet] = (ip for ip in unreserved if ip not in used_ips)
return unused
def get_free_ip(subnets):
unused = get_unused_ips(subnets)
for subnet, ips in unused.items():
try:
ip = next(ips)
if ip is not None and subnet is not None:
return ip, subnet
except StopIteration:
continue
raise SubnetFullException()
def ptr_name(network, ip_address):
"""
:param IPv4Network|IPv6Network network:
:param IPv4Address|IPv6Address ip_address:
:rtype: str
:return:
"""
hostbits = network.max_prefixlen - network.prefixlen
if isinstance(ip_address, IPv4Address):
num_octets = min((hostbits + 7 // 8), 1)
reversed_octets = reversed(ip_address.exploded.split('.'))
return '.'.join(islice(reversed_octets, num_octets))
elif isinstance(ip_address, IPv6Address):
num_chars = min((hostbits + 3 // 4), 1)
reversed_chars = reversed(ip_address.exploded.replace(':', ''))
return '.'.join(islice(reversed_chars, num_chars))
raise TypeError()
| # Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from itertools import islice
from ipaddr import IPv4Address, IPv6Address, IPv4Network, IPv6Network
import sys
class SubnetFullException(Exception):
message = "Subnet full"
class MacExistsException(Exception):
message = "MAC exists"
def get_unused_ips(subnets):
unused = dict()
for subnet in subnets:
reserved_bottom = subnet.reserved_addresses_bottom or 0
reserved_top = subnet.reserved_addresses_top or 0
used_ips = frozenset(ip.address for ip in subnet.ips)
unreserved = islice(
subnet.address.iterhosts(), reserved_bottom,
# Stop argument must be None or an integer: 0 <= x <= sys.maxsize.
# IPv6 subnets can exceed this boundary on 32 bit python builds.
min(subnet.address.numhosts - reserved_top - 2, sys.maxsize))
unused[subnet] = list(ip for ip in unreserved if ip not in used_ips)
return unused
def get_free_ip(subnets):
unused = get_unused_ips(subnets)
for subnet, ips in unused.items():
try:
ip = next(ips)
if ip is not None and subnet is not None:
return ip, subnet
except StopIteration:
continue
raise SubnetFullException()
def ptr_name(network, ip_address):
"""
:param IPv4Network|IPv6Network network:
:param IPv4Address|IPv6Address ip_address:
:rtype: str
:return:
"""
hostbits = network.max_prefixlen - network.prefixlen
if isinstance(ip_address, IPv4Address):
num_octets = min((hostbits + 7 // 8), 1)
reversed_octets = reversed(ip_address.exploded.split('.'))
return '.'.join(islice(reversed_octets, num_octets))
elif isinstance(ip_address, IPv6Address):
num_chars = min((hostbits + 3 // 4), 1)
reversed_chars = reversed(ip_address.exploded.replace(':', ''))
return '.'.join(islice(reversed_chars, num_chars))
raise TypeError()
| apache-2.0 | Python |
d1ce4a7aba39e76e2c8ab2d4d5cca75b1a55889a | Increase version number. | alejandroautalan/pygubu,alejandroautalan/pygubu | pygubu/__init__.py | pygubu/__init__.py | # encoding: utf8
from __future__ import unicode_literals
__all__ = ['Builder', 'TkApplication', 'BuilderObject', 'register_widget',
'register_property', 'remove_binding', 'ApplicationLevelBindManager']
import pygubu.builder.builderobject
from pygubu.builder import Builder
from pygubu.builder.builderobject import BuilderObject, register_widget
from pygubu.binding import remove_binding, ApplicationLevelBindManager
__version__ = '0.10.4'
def register_property(name, description):
return pygubu.builder.builderobject.register_property(name, description)
class TkApplication:
def __init__(self, master=None):
self.master = master
self.toplevel = master.winfo_toplevel()
self.toplevel.withdraw()
self._init_before()
self._create_ui()
self._init_after()
self.toplevel.deiconify()
def _init_before(self):
pass
def _create_ui(self):
pass
def _init_after(self):
pass
def run(self):
"""Ejecute the main loop."""
self.toplevel.protocol("WM_DELETE_WINDOW", self.__on_window_close)
self.toplevel.mainloop()
def set_resizable(self):
self.toplevel.rowconfigure(0, weight=1)
self.toplevel.columnconfigure(0, weight=1)
def set_title(self, title):
"""Set the window title."""
self.toplevel.title(title)
def set_menu(self, menu):
"""Set the main menu."""
self.toplevel.config(menu=menu)
def __on_window_close(self):
"""Manage WM_DELETE_WINDOW protocol."""
if self.on_close_execute():
self.toplevel.destroy()
def on_close_execute(self):
"""Determine if if the application is ready for quit,
return boolean."""
return True
def quit(self):
"""Exit the app if it is ready for quit."""
self.__on_window_close()
def set_size(self, geom):
self.toplevel.geometry(geom)
| # encoding: utf8
from __future__ import unicode_literals
__all__ = ['Builder', 'TkApplication', 'BuilderObject', 'register_widget',
'register_property', 'remove_binding', 'ApplicationLevelBindManager']
import pygubu.builder.builderobject
from pygubu.builder import Builder
from pygubu.builder.builderobject import BuilderObject, register_widget
from pygubu.binding import remove_binding, ApplicationLevelBindManager
__version__ = '0.10.3'
def register_property(name, description):
return pygubu.builder.builderobject.register_property(name, description)
class TkApplication:
def __init__(self, master=None):
self.master = master
self.toplevel = master.winfo_toplevel()
self.toplevel.withdraw()
self._init_before()
self._create_ui()
self._init_after()
self.toplevel.deiconify()
def _init_before(self):
pass
def _create_ui(self):
pass
def _init_after(self):
pass
def run(self):
"""Ejecute the main loop."""
self.toplevel.protocol("WM_DELETE_WINDOW", self.__on_window_close)
self.toplevel.mainloop()
def set_resizable(self):
self.toplevel.rowconfigure(0, weight=1)
self.toplevel.columnconfigure(0, weight=1)
def set_title(self, title):
"""Set the window title."""
self.toplevel.title(title)
def set_menu(self, menu):
"""Set the main menu."""
self.toplevel.config(menu=menu)
def __on_window_close(self):
"""Manage WM_DELETE_WINDOW protocol."""
if self.on_close_execute():
self.toplevel.destroy()
def on_close_execute(self):
"""Determine if if the application is ready for quit,
return boolean."""
return True
def quit(self):
"""Exit the app if it is ready for quit."""
self.__on_window_close()
def set_size(self, geom):
self.toplevel.geometry(geom)
| mit | Python |
36a9bec0d616d3d0e9ea1aaa6285226492817005 | Comment clarifications in examples/basic_volume_usage.py | joferkington/python-geoprobe | examples/basic_volume_usage.py | examples/basic_volume_usage.py | """
A quick example of viewing data stored in a geoprobe volume file.
"""
import os
import numpy as np
import matplotlib.pyplot as plt
import geoprobe
def main():
# Path to the example data dir relative to the location of this script.
# This is just so that the script can be called from a different directory
datadir = os.path.dirname(__file__) + '/data/'
# Read an existing geoprobe volume
vol = geoprobe.volume(datadir + 'Volumes/example.vol')
# Print some info
print_info(vol)
# Example plots
plot(vol)
def plot(vol):
"""Plot the first inline and first crossline in "vol", a geoprobe.volume
instance."""
# Plot the first inline in the volume
plt.figure()
plt.imshow(vol.XSlice(vol.xmin))
# Note: instead of vol.XSlice, we could have used vol.data[0,:,:].T
plt.title('Inline %i' % vol.xmin)
# Plot the first crossline in the volume
plt.figure()
plt.imshow(vol.YSlice(vol.ymin))
# Note: instead of vol.XSlice, we could have used vol.data[:,0,:].T
plt.title('Crossline %i' % vol.xmin)
plt.show()
def print_info(vol):
"""Print some basic information about "vol", a geoprobe.volume instance."""
# Print out some basic information
print 'The volume has dimensions of (nx, ny, nz):', vol.nx, vol.ny, vol.nz
print 'The inline coordinates range from', vol.xmin, 'to', vol.xmax
print 'The inline spacing is:', vol.dxW, 'world units'
print 'The crossline coordinates range from', vol.ymin, 'to', vol.ymax
print 'The crossline spacing is:', vol.dyW, 'world units'
print 'The depth/time coordinates range from', vol.zmin, 'to', vol.zmax
# Determine the locations of the corners
print 'The world coordinates of the corners of the volume are:'
print ' Lower-left:', vol.model2world(vol.xmin, vol.ymin)
print ' Upper-left:', vol.model2world(vol.xmin, vol.ymax)
print ' Upper-right:', vol.model2world(vol.xmax, vol.ymax)
print ' Lower-right:', vol.model2world(vol.xmax, vol.ymin)
if __name__ == '__main__':
main()
| """
A quick example of viewing data stored in a geoprobe volume file.
"""
import os
import numpy as np
import matplotlib.pyplot as plt
import geoprobe
def main():
# Path to the example data dir relative to the location of this script.
datadir = os.path.dirname(__file__) + '/data/'
# Read an existing geoprobe volume
vol = geoprobe.volume(datadir + 'Volumes/example.vol')
# Print some info
print_info(vol)
# Example plots
plot(vol)
def plot(vol):
"""Plot the first inline and first crossline in "vol", a geoprobe.volume
instance."""
# Plot the first inline in the volume
plt.figure()
plt.imshow(vol.XSlice(vol.xmin))
# Note: instead of vol.XSlice, we could have used vol.data[0,:,:].T
plt.title('Inline %i' % vol.xmin)
# Plot the first crossline in the volume
plt.figure()
plt.imshow(vol.YSlice(vol.ymin))
# Note: instead of vol.XSlice, we could have used vol.data[:,0,:].T
plt.title('Crossline %i' % vol.xmin)
plt.show()
def print_info(vol):
"""Print some basic information about "vol", a geoprobe.volume instance."""
# Print out some basic information
print 'The volume has dimensions of (nx, ny, nz):', vol.nx, vol.ny, vol.nz
print 'The inline coordinates range from', vol.xmin, 'to', vol.xmax
print 'The inline spacing is:', vol.dxW, 'world units'
print 'The crossline coordinates range from', vol.ymin, 'to', vol.ymax
print 'The crossline spacing is:', vol.dyW, 'world units'
print 'The depth/time coordinates range from', vol.zmin, 'to', vol.zmax
# Determine the locations of the corners
print 'The world coordinates of the corners of the volume are:'
print ' Lower-left:', vol.model2world(vol.xmin, vol.ymin)
print ' Upper-left:', vol.model2world(vol.xmin, vol.ymax)
print ' Upper-right:', vol.model2world(vol.xmax, vol.ymax)
print ' Lower-right:', vol.model2world(vol.xmax, vol.ymin)
if __name__ == '__main__':
main()
| mit | Python |
9da303e48820e95e1bfd206f1c0372f896dac6ec | Allow enum to be created more easily | springload/draftjs_exporter,springload/draftjs_exporter,springload/draftjs_exporter | draftjs_exporter/constants.py | draftjs_exporter/constants.py | from __future__ import absolute_import, unicode_literals
# http://stackoverflow.com/a/22723724/1798491
class Enum(object):
def __init__(self, *elements):
self.elements = tuple(elements)
def __getattr__(self, name):
if name not in self.elements:
raise AttributeError("'Enum' has no attribute '{}'".format(name))
return name
# https://github.com/draft-js-utils/draft-js-utils/blob/master/src/Constants.js
class BLOCK_TYPES:
UNSTYLED = 'unstyled'
HEADER_ONE = 'header-one'
HEADER_TWO = 'header-two'
HEADER_THREE = 'header-three'
HEADER_FOUR = 'header-four'
HEADER_FIVE = 'header-five'
HEADER_SIX = 'header-six'
UNORDERED_LIST_ITEM = 'unordered-list-item'
ORDERED_LIST_ITEM = 'ordered-list-item'
BLOCKQUOTE = 'blockquote'
PULLQUOTE = 'pullquote'
CODE = 'code-block'
ATOMIC = 'atomic'
HORIZONTAL_RULE = 'horizontal-rule'
ENTITY_TYPES = Enum('LINK', 'IMAGE', 'TOKEN')
INLINE_STYLES = Enum('BOLD', 'CODE', 'ITALIC', 'STRIKETHROUGH', 'UNDERLINE')
| from __future__ import absolute_import, unicode_literals
# http://stackoverflow.com/a/22723724/1798491
class Enum(object):
def __init__(self, tuple_list):
self.tuple_list = tuple_list
def __getattr__(self, name):
if name not in self.tuple_list:
raise AttributeError("'Enum' has no attribute '{}'".format(name))
return name
# https://github.com/draft-js-utils/draft-js-utils/blob/master/src/Constants.js
class BLOCK_TYPES:
UNSTYLED = 'unstyled'
HEADER_ONE = 'header-one'
HEADER_TWO = 'header-two'
HEADER_THREE = 'header-three'
HEADER_FOUR = 'header-four'
HEADER_FIVE = 'header-five'
HEADER_SIX = 'header-six'
UNORDERED_LIST_ITEM = 'unordered-list-item'
ORDERED_LIST_ITEM = 'ordered-list-item'
BLOCKQUOTE = 'blockquote'
PULLQUOTE = 'pullquote'
CODE = 'code-block'
ATOMIC = 'atomic'
HORIZONTAL_RULE = 'horizontal-rule'
ENTITY_TYPES = Enum(('LINK', 'IMAGE', 'TOKEN'))
INLINE_STYLES = Enum(('BOLD', 'CODE', 'ITALIC', 'STRIKETHROUGH', 'UNDERLINE'))
| mit | Python |
6cb3d7e9e95cc009579260d67a4525f17c8508cd | comment out the code | yarden-livnat/regulus | regulus/alg/alg.py | regulus/alg/alg.py | # from regulus.core.cache import Cache
# from regulus.models import NullModel
#
#
# def model_cache(model):
# return Cache(key=lambda n: n.data.id,
# factory=lambda n: model(n.data) if n.data.id is not -1 else NullModel())
#
#
# def compute_model(dataset, model, cache=None):
# if cache is None:
# cache = model_cache(model)
#
# for node in dataset.tree:
# cache[node] = model(node.data)
# return cache
#
#
# def apply_model(model_name, model, regulus):
# regulus.attrs[model_name] = compute_model(model, regulus.tree)
#
#
# def apply_measure(measure, tree):
# local = tree.attrs
# context = tree.regulus.attrs
# for node in tree:
# measure(node, local, context)
#
#
# def compute_measure(func, tree):
# local = tree.attrs
# context = tree.regulus.attrs
# cache = dict()
# for node in tree:
# cache[node.id] = func(node, local, context)
# return cache
#
| from regulus.topo.cache import Cache
from regulus.models import NullModel
# from regulus.tree import reduce_tree Node
# from regulus.topo import Partition
def model_cache(model):
return Cache(key=lambda n: n.data.id,
factory=lambda n: model(n.data) if n.data.id is not -1 else NullModel())
def compute_model(dataset, model, cache=None):
if cache is None:
cache = model_cache(model)
for node in dataset.tree:
cache[node] = model(node.data)
return cache
# def compute_measure(dataset, measure, models, cache=None):
# if cache is None:
# cache = Cache()
# for node in dataset.tree:
# measure(node, cache, models)
# return cache
def apply_model(model_name, model, regulus):
regulus.attrs[model_name] = compute_model(model, regulus.tree)
def apply_measure(measure, tree):
local = tree.attrs
context = tree.regulus.attrs
for node in tree:
measure(node, local, context)
def compute_measure(func, tree):
local = tree.attrs
context = tree.regulus.attrs
cache = dict()
for node in tree:
cache[node.id] = func(node, local, context)
return cache
| bsd-3-clause | Python |
2d3b27d7d4f787513a31b5a3650febd15ffa98ed | fix missing key in repo templates | ghxandsky/ceph-deploy,rtulke/ceph-deploy,zhouyuan/ceph-deploy,branto1/ceph-deploy,alfredodeza/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,alfredodeza/ceph-deploy,codenrhoden/ceph-deploy,zhouyuan/ceph-deploy,isyippee/ceph-deploy,shenhequnying/ceph-deploy,trhoden/ceph-deploy,ghxandsky/ceph-deploy,SUSE/ceph-deploy,branto1/ceph-deploy,ddiss/ceph-deploy,ktdreyer/ceph-deploy,SUSE/ceph-deploy,ceph/ceph-deploy,trhoden/ceph-deploy,ddiss/ceph-deploy,osynge/ceph-deploy,ktdreyer/ceph-deploy,imzhulei/ceph-deploy,isyippee/ceph-deploy,Vicente-Cheng/ceph-deploy,jumpstarter-io/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,rtulke/ceph-deploy,codenrhoden/ceph-deploy,Vicente-Cheng/ceph-deploy,osynge/ceph-deploy,ceph/ceph-deploy,jumpstarter-io/ceph-deploy,imzhulei/ceph-deploy,shenhequnying/ceph-deploy | ceph_deploy/util/templates.py | ceph_deploy/util/templates.py |
ceph_repo = """
[ceph]
name=Ceph packages for $basearch
baseurl={repo_url}/$basearch
enabled=1
gpgcheck=1
priority=1
type=rpm-md
gpgkey={gpg_url}
[ceph-noarch]
name=Ceph noarch packages
baseurl={repo_url}/noarch
enabled=1
gpgcheck=1
priority=1
type=rpm-md
gpgkey={gpg_url}
[ceph-source]
name=Ceph source packages
baseurl={repo_url}/SRPMS
enabled=0
gpgcheck=1
type=rpm-md
gpgkey={gpg_url}
"""
def custom_repo(**kw):
"""
Repo files need special care in that a whole line should not be present
if there is no value for it. Because we were using `format()` we could
not conditionally add a line for a repo file. So the end result would
contain a key with a missing value (say if we were passing `None`).
For example, it could look like::
[ceph repo]
name= ceph repo
proxy=
gpgcheck=
Which breaks. This function allows us to conditionally add lines,
preserving an order and be more careful.
Previously, and for historical purposes, this is how the template used
to look::
custom_repo =
[{repo_name}]
name={name}
baseurl={baseurl}
enabled={enabled}
gpgcheck={gpgcheck}
type={_type}
gpgkey={gpgkey}
proxy={proxy}
"""
lines = []
# by using tuples (vs a dict) we preserve the order of what we want to
# return, like starting with a [repo name]
tmpl = (
('reponame', '[%s]'),
('name', 'name=%s'),
('baseurl', 'baseurl=%s'),
('enabled', 'enabled=%s'),
('gpgcheck', 'gpgcheck=%s'),
('_type', 'type=%s'),
('gpgkey', 'gpgkey=%s'),
('proxy', 'proxy=%s'),
('priority', 'priority=%s'),
)
for line in tmpl:
tmpl_key, tmpl_value = line # key values from tmpl
# ensure that there is an actual value (not None nor empty string)
if tmpl_key in kw and kw.get(tmpl_key) not in (None, ''):
lines.append(tmpl_value % kw.get(tmpl_key))
return '\n'.join(lines)
|
ceph_repo = """
[ceph]
name=Ceph packages for $basearch
baseurl={repo_url}/$basearch
enabled=1
gpgcheck=1
priority=1
type=rpm-md
gpgkey={gpg_url}
[ceph-noarch]
name=Ceph noarch packages
baseurl={repo_url}/noarch
enabled=1
gpgcheck=1
priority=1
type=rpm-md
gpgkey={gpg_url}
[ceph-source]
name=Ceph source packages
baseurl={repo_url}/SRPMS
enabled=0
gpgcheck=1
type=rpm-md
gpgkey={gpg_url}
"""
def custom_repo(**kw):
"""
Repo files need special care in that a whole line should not be present
if there is no value for it. Because we were using `format()` we could
not conditionally add a line for a repo file. So the end result would
contain a key with a missing value (say if we were passing `None`).
For example, it could look like::
[ceph repo]
name= ceph repo
proxy=
gpgcheck=
Which breaks. This function allows us to conditionally add lines,
preserving an order and be more careful.
Previously, and for historical purposes, this is how the template used
to look::
custom_repo =
[{repo_name}]
name={name}
baseurl={baseurl}
enabled={enabled}
gpgcheck={gpgcheck}
type={_type}
gpgkey={gpgkey}
proxy={proxy}
"""
lines = []
# by using tuples (vs a dict) we preserve the order of what we want to
# return, like starting with a [repo name]
tmpl = (
('reponame', '[%s]'),
('baseurl', 'baseurl=%s'),
('enabled', 'enabled=%s'),
('gpgcheck', 'gpgcheck=%s'),
('_type', 'type=%s'),
('gpgkey', 'gpgkey=%s'),
('proxy', 'proxy=%s'),
('priority', 'priority=%s'),
)
for line in tmpl:
tmpl_key, tmpl_value = line # key values from tmpl
# ensure that there is an actual value (not None nor empty string)
if tmpl_key in kw and kw.get(tmpl_key) not in (None, ''):
lines.append(tmpl_value % kw.get(tmpl_key))
return '\n'.join(lines)
| mit | Python |
61abbfb28ceb134f514ed2b3fd2757366df01246 | attach isoline filter via timer | RebeccaWPerry/vispy,jdreaver/vispy,QuLogic/vispy,drufat/vispy,inclement/vispy,bollu/vispy,dchilds7/Deysha-Star-Formation,Eric89GXL/vispy,bollu/vispy,srinathv/vispy,jdreaver/vispy,ghisvail/vispy,ghisvail/vispy,QuLogic/vispy,ghisvail/vispy,RebeccaWPerry/vispy,inclement/vispy,inclement/vispy,kkuunnddaannkk/vispy,RebeccaWPerry/vispy,jdreaver/vispy,kkuunnddaannkk/vispy,michaelaye/vispy,srinathv/vispy,QuLogic/vispy,julienr/vispy,Eric89GXL/vispy,dchilds7/Deysha-Star-Formation,drufat/vispy,michaelaye/vispy,michaelaye/vispy,julienr/vispy,drufat/vispy,dchilds7/Deysha-Star-Formation,kkuunnddaannkk/vispy,bollu/vispy,julienr/vispy,srinathv/vispy,Eric89GXL/vispy | examples/basics/scene/contour.py | examples/basics/scene/contour.py | # -*- coding: utf-8 -*-
# vispy: gallery 30
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
Simple use of SceneCanvas to display an Image.
"""
import sys
from vispy import scene, app
from vispy.visuals.filters import IsolineFilter
from vispy.io import load_data_file, read_png
canvas = scene.SceneCanvas(keys='interactive')
canvas.size = 600, 800
canvas.show()
# Set up a viewbox to display the image with interactive pan/zoom
view = canvas.central_widget.add_view()
interpolation = 'bicubic'
img_data = read_png(load_data_file('mona_lisa/mona_lisa_sm.png'))
image = scene.visuals.Image(img_data, interpolation=interpolation,
parent=view.scene, method='impostor')
level = 10
iso = IsolineFilter(level=level, width=1., color='white')
# Set 2D camera (the camera will scale to the contents in the scene)
view.camera = scene.PanZoomCamera(aspect=1)
# flip y-axis to have correct aligment
view.camera.flip = (0, 1, 0)
# select face part
view.camera.rect = (160, 130, 240, 200)
canvas.title = ('Spatial Filtering using %s Filter - Isoline %d level'
% (image.interpolation, iso.level))
# get interpolation functions from Image
names = image.interpolation_functions
act = names.index(interpolation)
# Implement key presses
@canvas.events.key_press.connect
def on_key_press(event):
global act, level, first, interpolation
if event.key in ['Left', 'Right']:
if event.key == 'Right':
step = 1
else:
step = -1
act = (act + step) % len(names)
image.interpolation = names[act]
if event.key in ['Up', 'Down']:
iso.level += 1 if event.key == 'Up' else -1
canvas.title = ('Spatial Filtering using %s Filter - Isoline %d level'
% (image.interpolation, iso.level))
canvas.update()
# attaching of isoline filter via timer
def on_timer1(event):
image.attach(iso)
canvas.update()
timer1 = app.Timer('auto', iterations=1, connect=on_timer1, start=True)
if __name__ == '__main__' and sys.flags.interactive == 0:
app.run()
| # -*- coding: utf-8 -*-
# vispy: gallery 30
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
Simple use of SceneCanvas to display an Image.
"""
import sys
from vispy import scene, app
from vispy.visuals.filters import IsolineFilter
from vispy.io import load_data_file, read_png
canvas = scene.SceneCanvas(keys='interactive')
canvas.size = 600, 800
canvas.show()
# Set up a viewbox to display the image with interactive pan/zoom
view = canvas.central_widget.add_view()
interpolation = 'bicubic'
img_data = read_png(load_data_file('mona_lisa/mona_lisa_sm.png'))
image = scene.visuals.Image(img_data, interpolation=interpolation,
parent=view.scene, method='impostor')
level = 10
iso = IsolineFilter(level=2, width=1., color='white')
image.attach(iso)
# Set 2D camera (the camera will scale to the contents in the scene)
view.camera = scene.PanZoomCamera(aspect=1)
# flip y-axis to have correct aligment
view.camera.flip = (0, 1, 0)
# select face part
view.camera.rect = (160, 130, 240, 200)
canvas.title = ('Spatial Filtering using %s Filter - Isoline %d level'
% (image.interpolation, iso.level))
# get interpolation functions from Image
names = image.interpolation_functions
act = names.index(interpolation)
# Implement key presses
@canvas.events.key_press.connect
def on_key_press(event):
global act, level, first, interpolation
if event.key in ['Left', 'Right']:
if event.key == 'Right':
step = 1
else:
step = -1
act = (act + step) % len(names)
image.interpolation = names[act]
if event.key in ['Up', 'Down']:
iso.level += 1 if event.key == 'Up' else -1
canvas.title = ('Spatial Filtering using %s Filter - Isoline %d level'
% (image.interpolation, iso.level))
canvas.update()
if __name__ == '__main__' and sys.flags.interactive == 0:
app.run()
| bsd-3-clause | Python |
a8e51f540cbfd3ab8800f6d2c5de98200b03d028 | remove redundant components from service | inveniosoftware/invenio-communities,inveniosoftware/invenio-communities,inveniosoftware/invenio-communities,inveniosoftware/invenio-communities | invenio_communities/communities/service_config.py | invenio_communities/communities/service_config.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
# Copyright (C) 2020 Northwestern University.
#
# Invenio-Records-Resources is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""Invenio Communities Service API config."""
from flask_babelex import gettext as _
from invenio_records_resources.services.records.links import RecordLink, \
pagination_links
from invenio_records_resources.services.records.config import \
RecordServiceConfig, SearchOptions as SearchOptionsBase
from invenio_records_resources.services.records.search import terms_filter
from invenio_communities.communities.records.api import CommunityBase
from .permissions import CommunityPermissionPolicy
from .schema import CommunitySchema
class SearchOptions(SearchOptionsBase):
"""Search options."""
facets_options = dict(
aggs={
'type': {
'terms': {'field': 'metadata.type'},
},
'domain': {
'terms': {'field': 'metadata.domain'},
},
},
post_filters={
'type': terms_filter('metadata.type'),
'domain': terms_filter('metadata.domain'),
}
)
class CommunityServiceConfig(RecordServiceConfig):
"""Communities service configuration."""
# Common configuration
permission_policy_cls = CommunityPermissionPolicy
# Record specific configuration
record_cls = CommunityBase
# Search configuration
search = SearchOptions
# Service schema
schema = CommunitySchema
links_item = {
"self": RecordLink("{+api}/communities/{id}"),
}
links_search = pagination_links("{+api}/communities{?args*}")
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
# Copyright (C) 2020 Northwestern University.
#
# Invenio-Records-Resources is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""Invenio Communities Service API config."""
from flask_babelex import gettext as _
from invenio_records_resources.services.records.links import RecordLink, \
pagination_links
from invenio_records_resources.services.records.components import DataComponent
from invenio_records_resources.services.records.config import \
RecordServiceConfig, SearchOptions as SearchOptionsBase
from invenio_records_resources.services.records.search import terms_filter
from invenio_communities.communities.records.api import CommunityBase
from .permissions import CommunityPermissionPolicy
from .schema import CommunitySchema
class SearchOptions(SearchOptionsBase):
"""Search options."""
facets_options = dict(
aggs={
'type': {
'terms': {'field': 'metadata.type'},
},
'domain': {
'terms': {'field': 'metadata.domain'},
},
},
post_filters={
'type': terms_filter('metadata.type'),
'domain': terms_filter('metadata.domain'),
}
)
class CommunityServiceConfig(RecordServiceConfig):
"""Communities service configuration."""
# Common configuration
permission_policy_cls = CommunityPermissionPolicy
# Record specific configuration
record_cls = CommunityBase
# Search configuration
search = SearchOptions
# Service schema
schema = CommunitySchema
links_item = {
"self": RecordLink("{+api}/communities/{id}"),
}
links_search = pagination_links("{+api}/communities{?args*}")
# Service components
components = [
DataComponent
] + RecordServiceConfig.components
| mit | Python |
722c17992ea9cd7949a2ca02d7aa1c8a2d10a312 | Fix path for pathfinder test files. | Bismarrck/pymatgen,setten/pymatgen,mbkumar/pymatgen,tallakahath/pymatgen,dongsenfo/pymatgen,matk86/pymatgen,ndardenne/pymatgen,davidwaroquiers/pymatgen,montoyjh/pymatgen,aykol/pymatgen,richardtran415/pymatgen,richardtran415/pymatgen,mbkumar/pymatgen,fraricci/pymatgen,nisse3000/pymatgen,czhengsci/pymatgen,tschaume/pymatgen,montoyjh/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,gpetretto/pymatgen,dongsenfo/pymatgen,johnson1228/pymatgen,fraricci/pymatgen,nisse3000/pymatgen,ndardenne/pymatgen,setten/pymatgen,gVallverdu/pymatgen,mbkumar/pymatgen,aykol/pymatgen,davidwaroquiers/pymatgen,montoyjh/pymatgen,mbkumar/pymatgen,fraricci/pymatgen,tschaume/pymatgen,tschaume/pymatgen,johnson1228/pymatgen,Bismarrck/pymatgen,xhqu1981/pymatgen,gmatteo/pymatgen,czhengsci/pymatgen,czhengsci/pymatgen,czhengsci/pymatgen,nisse3000/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen,richardtran415/pymatgen,vorwerkc/pymatgen,richardtran415/pymatgen,gpetretto/pymatgen,blondegeek/pymatgen,matk86/pymatgen,blondegeek/pymatgen,davidwaroquiers/pymatgen,matk86/pymatgen,gVallverdu/pymatgen,xhqu1981/pymatgen,johnson1228/pymatgen,aykol/pymatgen,davidwaroquiers/pymatgen,vorwerkc/pymatgen,Bismarrck/pymatgen,tallakahath/pymatgen,gpetretto/pymatgen,setten/pymatgen,gVallverdu/pymatgen,gpetretto/pymatgen,gVallverdu/pymatgen,tschaume/pymatgen,setten/pymatgen,ndardenne/pymatgen,dongsenfo/pymatgen,vorwerkc/pymatgen,matk86/pymatgen,Bismarrck/pymatgen,nisse3000/pymatgen,tallakahath/pymatgen,blondegeek/pymatgen,gmatteo/pymatgen,tschaume/pymatgen,xhqu1981/pymatgen,dongsenfo/pymatgen,Bismarrck/pymatgen,blondegeek/pymatgen | pymatgen/analysis/tests/test_path_finder.py | pymatgen/analysis/tests/test_path_finder.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import unittest
from pymatgen.analysis.path_finder import NEBPathfinder, ChgcarPotential
from pymatgen.io.vasp import Poscar, Chgcar, Element
__author__ = 'Ziqin (Shaun) Rong'
__version__ = '0.1'
__maintainer__ = 'Ziqin (Shaun) Rong'
__email__ = 'rongzq08@gmail.com'
class PathFinderTest(unittest.TestCase):
"""
Uses Li migration in LiFePO4
"""
def setUp(self):
module_dir = os.path.dirname(os.path.abspath(__file__))
test_file_dir = os.path.join(module_dir, "..", "..", "..", "test_files",
"path_finder")
self.start_s = Poscar.from_file(os.path.join(test_file_dir, 'LFP_POSCAR_s')).structure
self.end_s = Poscar.from_file(os.path.join(test_file_dir, 'LFP_POSCAR_e')).structure
self.chg = Chgcar.from_file(os.path.join(test_file_dir, 'LFP_CHGCAR'))
moving_cation_specie = Element('Li')
self.relax_sites = []
for site_i, site in enumerate(self.start_s.sites):
if site.specie == moving_cation_specie:
self.relax_sites.append(site_i)
self.pf = NEBPathfinder(self.start_s, self.end_s, relax_sites=self.relax_sites,
v=ChgcarPotential(self.chg).get_v(), n_images=(8 * 3))
self.images = []
for i, image in enumerate(self.pf.images):
if i % 3 == 0:
self.images.append(image)
def test_image_num(self):
self.assertEqual(len(self.images), 9)
if __file__ == '__main__':
unittest.main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import unittest
from pymatgen.analysis.path_finder import NEBPathfinder, ChgcarPotential
from pymatgen.io.vasp import Poscar, Chgcar, Element
__author__ = 'Ziqin (Shaun) Rong'
__version__ = '0.1'
__maintainer__ = 'Ziqin (Shaun) Rong'
__email__ = 'rongzq08@gmail.com'
class PathFinderTest(unittest.TestCase):
"""
Uses Li migration in LiFePO4
"""
def setUp(self):
test_file_dir = "../../../test_files/path_finder"
self.start_s = Poscar.from_file(os.path.join(test_file_dir, 'LFP_POSCAR_s')).structure
self.end_s = Poscar.from_file(os.path.join(test_file_dir, 'LFP_POSCAR_e')).structure
self.chg = Chgcar.from_file(os.path.join(test_file_dir, 'LFP_CHGCAR'))
moving_cation_specie = Element('Li')
self.relax_sites = []
for site_i, site in enumerate(self.start_s.sites):
if site.specie == moving_cation_specie:
self.relax_sites.append(site_i)
self.pf = NEBPathfinder(self.start_s, self.end_s, relax_sites=self.relax_sites,
v=ChgcarPotential(self.chg).get_v(), n_images=(8 * 3))
self.images = []
for i, image in enumerate(self.pf.images):
if i % 3 == 0:
self.images.append(image)
def test_image_num(self):
self.assertEqual(len(self.images), 9)
if __file__ == '__main__':
unittest.main()
| mit | Python |
4f49394a5bf457de2952194a1900726c3908b6da | build python eggs, too | ImmobilienScout24/python-cloudwatchlogs-logging | build.py | build.py | # CloudWatchLogs Logging
# Copyright 2015 Immobilien Scout GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pybuilder.core import use_plugin, init, Author
use_plugin('python.core')
use_plugin('python.install_dependencies')
use_plugin('python.distutils')
use_plugin('python.flake8')
use_plugin('python.unittest')
use_plugin('python.coverage')
use_plugin('copy_resources')
default_task = ['analyze', 'publish']
name = 'cloudwatchlogs-logging'
version = '0.0.1'
summary = 'Handler for easy logging to AWS CloudWatchLogs.'
description = """
Handler for easy logging to AWS CloudWatchLogs.
"""
authors = [Author('Arne Hilmann', 'arne.hilmann@gmail.com')]
url = 'https://github.com/ImmobilienScout24/python-cloudwatchlogs-logging'
license = 'Apache License 2.0'
@init
def set_properties(project):
project.set_property("verbose", True)
project.depends_on("docopt")
project.depends_on("boto")
project.build_depends_on("mock")
project.set_property("flake8_include_test_sources", True)
project.set_property('coverage_break_build', False)
project.set_property("install_dependencies_upgrade", True)
project.set_property('copy_resources_target', '$dir_dist')
project.get_property('copy_resources_glob').append('setup.cfg')
project.set_property('dir_dist_scripts', 'scripts')
project.get_property('distutils_commands').append('bdist_egg')
project.set_property('distutils_classifiers', [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python',
'Topic :: System :: Networking',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration'
])
@init(environments='teamcity')
def set_properties_for_teamcity_builds(project):
import os
project.version = '%s-%s' % (
project.version, os.environ.get('BUILD_NUMBER', 0))
project.default_task = ['install_build_dependencies', 'publish']
project.set_property(
'install_dependencies_index_url', os.environ.get('PYPIPROXY_URL'))
project.set_property('install_dependencies_use_mirrors', False)
project.set_property('teamcity_output', True)
| # CloudWatchLogs Logging
# Copyright 2015 Immobilien Scout GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pybuilder.core import use_plugin, init, Author
use_plugin('python.core')
use_plugin('python.install_dependencies')
use_plugin('python.distutils')
use_plugin('python.flake8')
use_plugin('python.unittest')
use_plugin('python.coverage')
use_plugin('copy_resources')
default_task = ['analyze', 'publish']
name = 'cloudwatchlogs-logging'
version = '0.0.1'
summary = 'Handler for easy logging to AWS CloudWatchLogs.'
description = """
Handler for easy logging to AWS CloudWatchLogs.
"""
authors = [Author('Arne Hilmann', 'arne.hilmann@gmail.com')]
url = 'https://github.com/ImmobilienScout24/python-cloudwatchlogs-logging'
license = 'Apache License 2.0'
@init
def set_properties(project):
project.set_property("verbose", True)
project.depends_on("docopt")
project.depends_on("boto")
project.build_depends_on("mock")
project.set_property("flake8_include_test_sources", True)
project.set_property('coverage_break_build', False)
project.set_property("install_dependencies_upgrade", True)
project.set_property('copy_resources_target', '$dir_dist')
project.get_property('copy_resources_glob').append('setup.cfg')
project.set_property('dir_dist_scripts', 'scripts')
project.set_property('distutils_classifiers', [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python',
'Topic :: System :: Networking',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration'
])
@init(environments='teamcity')
def set_properties_for_teamcity_builds(project):
import os
project.version = '%s-%s' % (
project.version, os.environ.get('BUILD_NUMBER', 0))
project.default_task = ['install_build_dependencies', 'publish']
project.set_property(
'install_dependencies_index_url', os.environ.get('PYPIPROXY_URL'))
project.set_property('install_dependencies_use_mirrors', False)
project.set_property('teamcity_output', True)
| apache-2.0 | Python |
d1104d581b0b28f086461a826753dc6fa8ac5db0 | Change project name | hekima/zahpee-api-python-client | build.py | build.py | from pybuilder.core import init, use_plugin, task, depends, description
# Core build plugins
use_plugin("python.core")
use_plugin("python.distutils")
use_plugin("python.install_dependencies")
use_plugin("exec")
# Testing plugins
use_plugin("python.unittest")
use_plugin("python.integrationtest")
use_plugin("python.coverage")
use_plugin("python.pytddmon")
# Linting plugins
use_plugin("python.frosted")
use_plugin("python.flake8")
# use_plugin("python.pychecker")
# use_plugin("python.pylint")
# IDE plugins
use_plugin("python.pydev")
use_plugin("python.pycharm")
default_task = "publish"
@init
def initialize(project):
project.name = 'zahpeeapi'
project.version = '0.0.1'
# Build dependencies
project.build_depends_on('pytest')
project.build_depends_on('mockito-without-hardcoded-distribute-version')
# Core Configuration
project.set_property('dir_dist', '$dir_target/dist/zahpee-api-client')
# Flake8 Configuration
project.set_property('flake8_break_build', True)
project.set_property('flake8_include_test_sources', True)
project.set_property('flake8_max_line_length', 120)
project.set_property('coverage_break_build', False)
# flake8_include_test_source Integration test Configuration
project.set_property('integrationtest_additional_commandline', '--with-xunit')
project.set_property('integrationtest_parallel', True)
project.set_property('clean_propagate_stdout', True)
project.set_property('clean_propagate_stderr', True)
| from pybuilder.core import init, use_plugin, task, depends, description
# Core build plugins
use_plugin("python.core")
use_plugin("python.distutils")
use_plugin("python.install_dependencies")
use_plugin("exec")
# Testing plugins
use_plugin("python.unittest")
use_plugin("python.integrationtest")
use_plugin("python.coverage")
use_plugin("python.pytddmon")
# Linting plugins
use_plugin("python.frosted")
use_plugin("python.flake8")
# use_plugin("python.pychecker")
# use_plugin("python.pylint")
# IDE plugins
use_plugin("python.pydev")
use_plugin("python.pycharm")
default_task = "publish"
@init
def initialize(project):
project.name = 'zahpee-api-client'
project.version = '0.0.1'
# Build dependencies
project.build_depends_on('pytest')
project.build_depends_on('mockito-without-hardcoded-distribute-version')
# Core Configuration
project.set_property('dir_dist', '$dir_target/dist/zahpee-api-client')
# Flake8 Configuration
project.set_property('flake8_break_build', True)
project.set_property('flake8_include_test_sources', True)
project.set_property('flake8_max_line_length', 120)
project.set_property('coverage_break_build', False)
# flake8_include_test_source Integration test Configuration
project.set_property('integrationtest_additional_commandline', '--with-xunit')
project.set_property('integrationtest_parallel', True)
project.set_property('clean_propagate_stdout', True)
project.set_property('clean_propagate_stderr', True)
| mit | Python |
f45e38cba0a61815b29ecc0ec9d503e289343961 | add lru_cache import, bump version | arnehilmann/yum-repos,arnehilmann/yumrepos,arnehilmann/yum-repos,arnehilmann/yumrepos | build.py | build.py | from pybuilder.core import use_plugin, init, Author, task
use_plugin("python.core")
use_plugin("python.unittest")
use_plugin("python.integrationtest")
use_plugin("python.install_dependencies")
use_plugin("python.flake8")
use_plugin("python.coverage")
use_plugin("python.distutils")
use_plugin('copy_resources')
use_plugin('filter_resources')
name = "yum-repos"
summary = "yum-repos: simple yum repositories with minimal rest api"
url = "https://github.com/arnehilmann/yum-repos"
version = "0.7.12"
authors = [Author('Arne Hilmann', 'arne.hilmann@gmail.com')]
description = """yum-repos
- serve yum repositories as simple folders
- ... via web server
- offer rest api for
- create/remove/link of repositories
- upload/stage/remove of rpms
"""
default_task = ["clean", "analyze", "publish"]
@task
def gittag(project, logger):
logger.info("The following commands create a new release, triggering all the fun stuff:")
logger.info("git tag -a v{0} -m v{0}".format(project.version))
logger.info("git push --tags")
@init
def set_properties(project):
project.build_depends_on('requests')
project.depends_on("flask")
try:
import functools.lru_cache
except ImportError:
project.depends_on("backports.functools_lru_cache")
project.set_property('copy_resources_target', '$dir_dist')
project.get_property('copy_resources_glob').extend(['setup.*cfg'])
project.get_property('filter_resources_glob').extend(['**/setup.*cfg'])
| from pybuilder.core import use_plugin, init, Author, task
use_plugin("python.core")
use_plugin("python.unittest")
use_plugin("python.integrationtest")
use_plugin("python.install_dependencies")
use_plugin("python.flake8")
use_plugin("python.coverage")
use_plugin("python.distutils")
use_plugin('copy_resources')
use_plugin('filter_resources')
name = "yum-repos"
summary = "yum-repos: simple yum repositories with minimal rest api"
url = "https://github.com/arnehilmann/yum-repos"
version = "0.7.11"
authors = [Author('Arne Hilmann', 'arne.hilmann@gmail.com')]
description = """yum-repos
- serve yum repositories as simple folders
- ... via web server
- offer rest api for
- create/remove/link of repositories
- upload/stage/remove of rpms
"""
default_task = ["clean", "analyze", "publish"]
@task
def gittag(project, logger):
logger.info("The following commands create a new release, triggering all the fun stuff:")
logger.info("git tag -a v{0} -m v{0}".format(project.version))
logger.info("git push --tags")
@init
def set_properties(project):
project.build_depends_on('requests')
project.depends_on("flask")
project.set_property('copy_resources_target', '$dir_dist')
project.get_property('copy_resources_glob').extend(['setup.*cfg'])
project.get_property('filter_resources_glob').extend(['**/setup.*cfg'])
| apache-2.0 | Python |
3c029ad25c2066ecb484ea6dfb7c9887ea0a2101 | Raise exception on docker-build error | dincamihai/salt-toaster,dincamihai/salt-toaster | build.py | build.py | import re
import argparse
from utils import build_docker_image
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--nocache', action='store_true', default=False)
parser.add_argument('--nopull', action='store_true', default=False)
args = parser.parse_args()
content = ''
stream = build_docker_image(nocache=args.nocache, pull=not args.nopull)
for item in stream:
if 'error' in item:
raise Exception(item['error'])
buff = item.get('stream', item.get('status', ''))
if not content or re.search('.+\[[. ]*$', content):
content += buff
if not re.search('.+\[[. ]*$', content):
print(content)
content = ''
if __name__ == '__main__':
main()
| import re
import argparse
from utils import build_docker_image
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--nocache', action='store_true', default=False)
parser.add_argument('--nopull', action='store_true', default=False)
args = parser.parse_args()
content = ''
stream = build_docker_image(nocache=args.nocache, pull=not args.nopull)
for item in stream:
buff = item.get('stream', item.get('status', ''))
if not content or re.search('.+\[[. ]*$', content):
content += buff
if not re.search('.+\[[. ]*$', content):
print(content)
content = ''
if __name__ == '__main__':
main()
| mit | Python |
b3a57443f58caf6be930d00e7f8805fd0e64e80a | bump the version | ceph/remoto,alfredodeza/remoto | remoto/__init__.py | remoto/__init__.py | from .connection import Connection
__version__ = '0.0.10'
| from .connection import Connection
__version__ = '0.0.9'
| mit | Python |
fbccef017f48628c3f2e1457f21c38ba4d4fa90c | fix HAIL_GENETICS_IMAGES variable (#11117) | hail-is/hail,hail-is/hail,hail-is/hail,hail-is/hail,hail-is/hail,hail-is/hail,hail-is/hail,hail-is/hail | hail/python/hailtop/batch/hail_genetics_images.py | hail/python/hailtop/batch/hail_genetics_images.py | HAIL_GENETICS = 'hailgenetics/'
HAIL_GENETICS_IMAGES = [
HAIL_GENETICS + name
for name in ('hail', 'genetics', 'python-dill')]
| HAIL_GENETICS = 'hailgenetics/'
HAIL_GENETICS_IMAGES = (
HAIL_GENETICS + name
for name in ('hail', 'genetics', 'python-dill'))
| mit | Python |
1c28a7cd116363e769dffc17d9b9ae951f0bcf20 | Add test for Literal value property | vmuriart/python-sql | sql/tests/test_literal.py | sql/tests/test_literal.py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2013, Cédric Krier
# Copyright (c) 2011-2013, B2CK
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from sql import Literal
class TestLiteral(unittest.TestCase):
def test_literal(self):
literal = Literal(1)
self.assertEqual(str(literal), '%s')
self.assertEqual(literal.params, (1,))
self.assertEqual(literal.value, 1)
| # -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2013, Cédric Krier
# Copyright (c) 2011-2013, B2CK
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from sql import Literal
class TestLiteral(unittest.TestCase):
def test_literal(self):
literal = Literal(1)
self.assertEqual(str(literal), '%s')
self.assertEqual(literal.params, (1,))
| bsd-3-clause | Python |
e9f4f2ae5ab6585b246668628b0f0eed332b76b4 | Fix error message of assert_one to report the correct expected value | spodkowinski/cassandra-dtest,beobal/cassandra-dtest,riptano/cassandra-dtest,mambocab/cassandra-dtest,iamaleksey/cassandra-dtest,bdeggleston/cassandra-dtest,stef1927/cassandra-dtest,blerer/cassandra-dtest,mambocab/cassandra-dtest,spodkowinski/cassandra-dtest,iamaleksey/cassandra-dtest,aweisberg/cassandra-dtest,carlyeks/cassandra-dtest,thobbs/cassandra-dtest,tjake/cassandra-dtest,blerer/cassandra-dtest,krummas/cassandra-dtest,carlyeks/cassandra-dtest,pcmanus/cassandra-dtest,krummas/cassandra-dtest,thobbs/cassandra-dtest,beobal/cassandra-dtest,riptano/cassandra-dtest,pauloricardomg/cassandra-dtest,yukim/cassandra-dtest,snazy/cassandra-dtest,pauloricardomg/cassandra-dtest,aweisberg/cassandra-dtest,snazy/cassandra-dtest,bdeggleston/cassandra-dtest,stef1927/cassandra-dtest | assertions.py | assertions.py | import re
from cassandra import InvalidRequest, Unavailable, ConsistencyLevel, WriteTimeout, ReadTimeout
from cassandra.query import SimpleStatement
from tools import rows_to_list
def assert_unavailable(fun, *args):
try:
if len(args) == 0:
fun(None)
else:
fun(*args)
except (Unavailable, WriteTimeout, ReadTimeout) as e:
pass
except Exception as e:
assert False, "Expecting unavailable exception, got: " + str(e)
else:
assert False, "Expecting unavailable exception but no exception was raised"
def assert_invalid(session, query, matching=None, expected=InvalidRequest):
try:
res = session.execute(query)
assert False, "Expecting query to be invalid: got %s" % res
except AssertionError as e:
raise e
except expected as e:
msg = str(e)
if matching is not None:
assert re.search(matching, msg), "Error message does not contain " + matching + " (error = " + msg + ")"
def assert_one(cursor, query, expected, cl=ConsistencyLevel.ONE):
simple_query = SimpleStatement(query, consistency_level=cl)
res = cursor.execute(simple_query)
list_res = rows_to_list(res)
assert list_res == [expected], "Expected %s from %s, but got %s" % ([expected], query, list_res)
def assert_none(cursor, query, cl=ConsistencyLevel.ONE):
simple_query = SimpleStatement(query, consistency_level=cl)
res = cursor.execute(simple_query)
list_res = rows_to_list(res)
assert list_res == [], "Expected nothing from %s, but got %s" % (query, list_res)
def assert_all(cursor, query, expected, cl=ConsistencyLevel.ONE):
simple_query = SimpleStatement(query, consistency_level=cl)
res = cursor.execute(simple_query)
list_res = rows_to_list(res)
assert list_res == expected, "Expected %s from %s, but got %s" % (expected, query, list_res)
def assert_almost_equal(*args, **kwargs):
try:
error = kwargs['error']
except KeyError:
error = 0.16
vmax = max(args)
vmin = min(args)
assert vmin > vmax * (1.0 - error), "values not within %.2f%% of the max: %s" % (error * 100, args)
def assert_row_count(cursor, table_name, expected):
""" Function to validate the row count expected in table_name """
query = "SELECT count(*) FROM {};".format(table_name)
res = cursor.execute(query)
count = res[0][0]
assert count == expected, "Expected a row count of {} in table '{}', but got {}".format(
expected, table_name, count
)
| import re
from cassandra import InvalidRequest, Unavailable, ConsistencyLevel, WriteTimeout, ReadTimeout
from cassandra.query import SimpleStatement
from tools import rows_to_list
def assert_unavailable(fun, *args):
try:
if len(args) == 0:
fun(None)
else:
fun(*args)
except (Unavailable, WriteTimeout, ReadTimeout) as e:
pass
except Exception as e:
assert False, "Expecting unavailable exception, got: " + str(e)
else:
assert False, "Expecting unavailable exception but no exception was raised"
def assert_invalid(session, query, matching=None, expected=InvalidRequest):
try:
res = session.execute(query)
assert False, "Expecting query to be invalid: got %s" % res
except AssertionError as e:
raise e
except expected as e:
msg = str(e)
if matching is not None:
assert re.search(matching, msg), "Error message does not contain " + matching + " (error = " + msg + ")"
def assert_one(cursor, query, expected, cl=ConsistencyLevel.ONE):
simple_query = SimpleStatement(query, consistency_level=cl)
res = cursor.execute(simple_query)
list_res = rows_to_list(res)
assert list_res == [expected], "Expected %s from %s, but got %s" % (expected, query, list_res)
def assert_none(cursor, query, cl=ConsistencyLevel.ONE):
simple_query = SimpleStatement(query, consistency_level=cl)
res = cursor.execute(simple_query)
list_res = rows_to_list(res)
assert list_res == [], "Expected nothing from %s, but got %s" % (query, list_res)
def assert_all(cursor, query, expected, cl=ConsistencyLevel.ONE):
simple_query = SimpleStatement(query, consistency_level=cl)
res = cursor.execute(simple_query)
list_res = rows_to_list(res)
assert list_res == expected, "Expected %s from %s, but got %s" % (expected, query, list_res)
def assert_almost_equal(*args, **kwargs):
try:
error = kwargs['error']
except KeyError:
error = 0.16
vmax = max(args)
vmin = min(args)
assert vmin > vmax * (1.0 - error), "values not within %.2f%% of the max: %s" % (error * 100, args)
def assert_row_count(cursor, table_name, expected):
""" Function to validate the row count expected in table_name """
query = "SELECT count(*) FROM {};".format(table_name)
res = cursor.execute(query)
count = res[0][0]
assert count == expected, "Expected a row count of {} in table '{}', but got {}".format(
expected, table_name, count
)
| apache-2.0 | Python |
bbeaf90677970bf39fad21321f9a4b16418b8894 | Complete recur sol | bowen0701/algorithms_data_structures | lc0116_populating_next_right_pointers_in_each_node.py | lc0116_populating_next_right_pointers_in_each_node.py | """Leetcode 116. Populating Next Right Pointers in Each Node
Medium
URL: https://leetcode.com/problems/populating-next-right-pointers-in-each-node/
You are given a perfect binary tree where all leaves are on the same level,
and every parent has two children. The binary tree has the following definition:
struct Node {
int val;
Node *left;
Node *right;
Node *next;
}
Populate each next pointer to point to its next right node.
If there is no next right node, the next pointer should be set to NULL.
Initially, all next pointers are set to NULL.
Input: {"$id":"1","left":{"$id":"2","left":{"$id":"3","left":null,"next":null,"right":null,"val":4},"next":null,"right":{"$id":"4","left":null,"next":null,"right":null,"val":5},"val":2},"next":null,"right":{"$id":"5","left":{"$id":"6","left":null,"next":null,"right":null,"val":6},"next":null,"right":{"$id":"7","left":null,"next":null,"right":null,"val":7},"val":3},"val":1}
Output: {"$id":"1","left":{"$id":"2","left":{"$id":"3","left":null,"next":{"$id":"4","left":null,"next":{"$id":"5","left":null,"next":{"$id":"6","left":null,"next":null,"right":null,"val":7},"right":null,"val":6},"right":null,"val":5},"right":null,"val":4},"next":{"$id":"7","left":{"$ref":"5"},"next":null,"right":{"$ref":"6"},"val":3},"right":{"$ref":"4"},"val":2},"next":null,"right":{"$ref":"7"},"val":1}
Explanation: Given the above perfect binary tree (Figure A),
your function should populate each next pointer to point to its next right node,
just like in Figure B.
Note:
- You may only use constant extra space.
- Recursive approach is fine, implicit stack space does not count as extra space
for this problem.
"""
# Definition for a Node.
class Node(object):
def __init__(self, val, left, right, next):
self.val = val
self.left = left
self.right = right
self.next = next
class SolutionRecur(object):
def _preorder(self, node):
if node and node.left and node.right:
node.left.next = node.right
if node.next:
node.right.next = node.next.left
self.connect(node.left)
self.connect(node.right)
def connect(self, root):
"""
:type root: Node
:rtype: Node
"""
# Apply preorder traversal: root->left->right.
self._preorder(root)
return root
def main():
root = Node(1, None, None, None)
root.left = Node(2, None, None, None)
root.right = Node(3, None, None, None)
root.left.left = Node(4, None, None, None)
root.left.right = Node(5, None, None, None)
root.right.left = Node(6, None, None, None)
root.right.right = Node(7, None, None, None)
SolutionRecur().connect(root)
print root.next # Ans: None
print root.left.next.val # Ans: 3
print root.right.next # Ans: None
print root.left.left.next.val # Ans: 5
print root.left.right.next.val # Ans: 6
print root.right.left.next.val # Ans: 7
print root.right.right.next # Ans: None
if __name__ == '__main__':
main()
| """Leetcode 116. Populating Next Right Pointers in Each Node
Medium
URL: https://leetcode.com/problems/populating-next-right-pointers-in-each-node/
You are given a perfect binary tree where all leaves are on the same level,
and every parent has two children. The binary tree has the following definition:
struct Node {
int val;
Node *left;
Node *right;
Node *next;
}
Populate each next pointer to point to its next right node.
If there is no next right node, the next pointer should be set to NULL.
Initially, all next pointers are set to NULL.
Input: {"$id":"1","left":{"$id":"2","left":{"$id":"3","left":null,"next":null,"right":null,"val":4},"next":null,"right":{"$id":"4","left":null,"next":null,"right":null,"val":5},"val":2},"next":null,"right":{"$id":"5","left":{"$id":"6","left":null,"next":null,"right":null,"val":6},"next":null,"right":{"$id":"7","left":null,"next":null,"right":null,"val":7},"val":3},"val":1}
Output: {"$id":"1","left":{"$id":"2","left":{"$id":"3","left":null,"next":{"$id":"4","left":null,"next":{"$id":"5","left":null,"next":{"$id":"6","left":null,"next":null,"right":null,"val":7},"right":null,"val":6},"right":null,"val":5},"right":null,"val":4},"next":{"$id":"7","left":{"$ref":"5"},"next":null,"right":{"$ref":"6"},"val":3},"right":{"$ref":"4"},"val":2},"next":null,"right":{"$ref":"7"},"val":1}
Explanation: Given the above perfect binary tree (Figure A),
your function should populate each next pointer to point to its next right node,
just like in Figure B.
Note:
- You may only use constant extra space.
- Recursive approach is fine, implicit stack space does not count as extra space
for this problem.
"""
# Definition for a Node.
class Node(object):
def __init__(self, val, left, right, next):
self.val = val
self.left = left
self.right = right
self.next = next
class Solution(object):
def connect(self, root):
"""
:type root: Node
:rtype: Node
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
217cd5e0847031df41274acc2bb5463e09870219 | Add help to memcached | gelbander/blues,andreif/blues,chrippa/blues,chrippa/blues,andreif/blues,Sportamore/blues,jocke-l/blues,adisbladis/blues,jocke-l/blues,chrippa/blues,andreif/blues,adisbladis/blues,Sportamore/blues,5monkeys/blues,Sportamore/blues,gelbander/blues,gelbander/blues,5monkeys/blues,adisbladis/blues,jocke-l/blues,5monkeys/blues | blues/memcached.py | blues/memcached.py | """
Memcached
settings:
memcached:
size: 256 # Cache size in mb (Default: 64)
bind: 1.2.3.4 # Force memcached bind to address (Default: listen to all)
"""
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo, silent
from refabric.contrib import blueprints
from . import debian
__all__ = ['start', 'stop', 'restart', 'status', 'setup', 'configure', 'flush']
blueprint = blueprints.get(__name__)
start = debian.service_task('memcached', 'start')
stop = debian.service_task('memcached', 'stop')
restart = debian.service_task('memcached', 'restart')
status = debian.service_task('memcached', 'status')
@task
def setup():
"""
Install memcached
"""
install()
configure()
def install():
with sudo():
debian.apt_get('install', 'memcached')
@task
def configure():
"""
Configure memcached
"""
context = {
'size': blueprint.get('size', 64),
'bind': blueprint.get('bind', None)
}
blueprint.upload('memcached', '/etc/', context)
@task
def flush():
"""
Delete all cached keys
"""
info('Flushing Memcached...')
with sudo(), silent():
run('echo "flush_all" | /bin/netcat -q 2 127.0.0.1 11211')
info('Down the drain!')
| from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo, silent
from refabric.contrib import blueprints
from . import debian
__all__ = ['start', 'stop', 'restart', 'status', 'setup', 'configure', 'flush']
blueprint = blueprints.get(__name__)
start = debian.service_task('memcached', 'start')
stop = debian.service_task('memcached', 'stop')
restart = debian.service_task('memcached', 'restart')
status = debian.service_task('memcached', 'status')
@task
def setup():
"""
Install memcached
"""
install()
configure()
def install():
with sudo():
debian.apt_get('install', 'memcached')
@task
def configure():
"""
Configure memcached
"""
context = {
'size': blueprint.get('size', 64),
'bind': blueprint.get('bind', None)
}
blueprint.upload('memcached', '/etc/', context)
@task
def flush():
"""
Delete all cached keys
"""
info('Flushing Memcached...')
with sudo(), silent():
run('echo "flush_all" | /bin/netcat -q 2 127.0.0.1 11211')
info('Down the drain!')
| mit | Python |
6b7d7c1ae04cd00f06f05d8c5d55b2f8776bb1b2 | Fix skp listing | bpsinc-native/src_third_party_trace-viewer,bpsinc-native/src_third_party_trace-viewer,bpsinc-native/src_third_party_trace-viewer,bpsinc-native/src_third_party_trace-viewer | run_dev_server.py | run_dev_server.py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import json
from trace_viewer import trace_viewer_project
import tvcm
def do_GET_json_examples(request):
test_data_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'test_data'))
data_files = []
for dirpath, dirnames, filenames in os.walk(test_data_path):
for f in filenames:
data_files.append(f)
data_files.sort()
files_as_json = json.dumps(data_files)
request.send_response(200)
request.send_header('Content-Type', 'application/json')
request.send_header('Content-Length', len(files_as_json))
request.end_headers()
request.wfile.write(files_as_json)
def do_GET_json_examples_skp(request):
skp_data_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'skp_data'))
data_files = []
for dirpath, dirnames, filenames in os.walk(skp_data_path):
for f in filenames:
data_files.append(f)
data_files.sort()
files_as_json = json.dumps(data_files)
request.send_response(200)
request.send_header('Content-Type', 'application/json')
request.send_header('Content-Length', len(files_as_json))
request.end_headers()
request.wfile.write(files_as_json)
def Main(port, args):
project = trace_viewer_project.TraceViewerProject()
server = tvcm.DevServer(port=port, project=project)
server.AddPathHandler('/json/examples', do_GET_json_examples)
server.AddPathHandler('/json/examples/skp', do_GET_json_examples_skp)
server.AddSourcePathMapping(project.trace_viewer_path)
server.AddTestLink('/examples/skia_debugger.html', 'Skia Debugger')
server.AddTestLink('/examples/trace_viewer.html', 'Trace File Viewer')
server.serve_forever()
if __name__ == '__main__':
sys.exit(Main(port=8003, args=sys.argv[1:]))
| #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import json
from trace_viewer import trace_viewer_project
import tvcm
def do_GET_json_examples(request):
test_data_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'test_data'))
data_files = []
for dirpath, dirnames, filenames in os.walk(test_data_path):
for f in filenames:
data_files.append(f)
data_files.sort()
files_as_json = json.dumps(data_files)
request.send_response(200)
request.send_header('Content-Type', 'application/json')
request.send_header('Content-Length', len(files_as_json))
request.end_headers()
request.wfile.write(files_as_json)
def do_GET_json_examples_skp(request):
data_files = []
for dirpath, dirnames, filenames in os.walk(skp_data_path):
for f in filenames:
data_files.append(f)
data_files.sort()
files_as_json = json.dumps(data_files)
request.send_response(200)
request.send_header('Content-Type', 'application/json')
request.send_header('Content-Length', len(files_as_json))
request.end_headers()
request.wfile.write(files_as_json)
def Main(port, args):
project = trace_viewer_project.TraceViewerProject()
server = tvcm.DevServer(port=port, project=project)
server.AddPathHandler('/json/examples', do_GET_json_examples)
server.AddPathHandler('/json/examples/skp', do_GET_json_examples_skp)
server.AddSourcePathMapping(project.trace_viewer_path)
server.AddTestLink('/examples/skia_debugger.html', 'Skia Debugger')
server.AddTestLink('/examples/trace_viewer.html', 'Trace File Viewer')
server.serve_forever()
if __name__ == '__main__':
sys.exit(Main(port=8003, args=sys.argv[1:]))
| bsd-3-clause | Python |
5cc1ff3f4e15fa8a369da6e85dd7c0e01a93c1ee | Improve comments/documentation for driver script | djc/runa,djc/runa,djc/runa,djc/runa | runac/__main__.py | runac/__main__.py | #!/usr/bin/env python
import optparse, sys, os
from runac import util
import runac
def tokens(fn, opts):
'''Print a list of tokens and location info'''
with open(fn) as f:
for x in runac.lex(f.read()):
print (x.name, x.value, (x.source_pos.lineno, x.source_pos.colno))
def parse(fn, opts):
'''Print the syntax tree resulting from parsing the source'''
print runac.parse(fn)
def show(fn, opts):
'''Print syntax tree after processing the pass specified by --last'''
for name, ast in runac.show(fn, opts.last).items():
print ast
def generate(fn, opts):
'''Print LLVM IR as generated by the code generation process'''
ir = runac.ir(fn)
if not opts.test:
print ir
def compile(fn, opts):
'''Compile the given program to a binary of the same name'''
ir = runac.ir(fn)
runac.compile(ir, os.path.basename(fn).rsplit('.rns')[0])
COMMANDS = {
'tokens': tokens,
'parse': parse,
'show': show,
'generate': generate,
'compile': compile,
}
def find(cmd):
if cmd in COMMANDS: return COMMANDS[cmd]
matched = sorted(i for i in COMMANDS if i.startswith(cmd))
if len(matched) == 1:
return COMMANDS[matched[0]]
elif len(matched) > 1:
print 'ambiguous command: %r' % cmd
return lambda x, y: None
else:
print 'no command found: %r' % cmd
return lambda x, y: None
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('--last', help='last pass', default='destruct')
parser.add_option('--test', help='no output', action='store_true')
parser.add_option('--traceback', help='show full traceback',
action='store_true')
opts, args = parser.parse_args()
if len(args) < 1:
print 'The Runa compiler. A command takes a single file as an argument.'
print '\nCommands:\n'
for cmd, fun in sorted(COMMANDS.items()):
print '%s: %s' % (cmd, fun.__doc__)
print '\nAny unique command abbrevation will also work.'
parser.print_help()
sys.exit(1)
try:
find(args[0])(args[1], opts)
except util.Error as e:
if opts.traceback:
raise
sys.stderr.write(e.show())
except util.ParseError as e:
if opts.traceback:
raise
sys.stderr.write(e.show())
| #!/usr/bin/env python
import optparse, sys, os
from runac import util
import runac
def tokens(fn, opts):
with open(fn) as f:
for x in runac.lex(f.read()):
print (x.name, x.value, (x.source_pos.lineno, x.source_pos.colno))
def parse(fn, opts):
print runac.parse(fn)
def show(fn, opts):
for name, ast in runac.show(fn, opts.last).items():
print ast
def generate(fn, opts):
ir = runac.ir(fn)
if not opts.test:
print ir
def compile(fn, opts):
ir = runac.ir(fn)
runac.compile(ir, os.path.basename(fn).rsplit('.rns')[0])
COMMANDS = {
'tokens': tokens,
'parse': parse,
'show': show,
'generate': generate,
'compile': compile,
}
def find(cmd):
if cmd in COMMANDS: return COMMANDS[cmd]
matched = sorted(i for i in COMMANDS if i.startswith(cmd))
if len(matched) == 1:
return COMMANDS[matched[0]]
elif len(matched) > 1:
print 'ambiguous command: %r' % cmd
return lambda x, y: None
else:
print 'no command found: %r' % cmd
return lambda x, y: None
if __name__ == '__main__':
cmdlist = ', '.join(COMMANDS)
description = 'The Runa compiler. Available commands: %s; ' % cmdlist
description += 'any unique abbreviation also works. Each takes a single '
description += 'file name as an argument.'
parser = optparse.OptionParser(description=description)
parser.add_option('--last', help='last pass', default='destruct')
parser.add_option('--test', help='no output', action='store_true')
parser.add_option('--traceback', help='show full traceback',
action='store_true')
opts, args = parser.parse_args()
if len(args) < 1:
parser.print_help()
sys.exit(1)
try:
find(args[0])(args[1], opts)
except util.Error as e:
if opts.traceback:
raise
sys.stderr.write(e.show())
except util.ParseError as e:
if opts.traceback:
raise
sys.stderr.write(e.show())
| mit | Python |
7636d5e054bf77e94942e3ab3019d5633896f185 | Remove duplicate item info. | nriley/LBHue | Hue.lbaction/Contents/Scripts/hue.py | Hue.lbaction/Contents/Scripts/hue.py | #!/Users/nicholas/Documents/Development/Hue/bin/python
__all__ = ('lights', 'light', 'item_for_light', 'toggle_item_for_light')
# XXX replace this with either nothing or something that isn't GPLv2
import collections, qhue
bridge = qhue.Bridge('192.168.0.14', 'USERNAME')
def lights():
lights = dict((light_info['name'], (int(light_id), light_info))
for (light_id, light_info) in bridge.lights().iteritems()
if light_info['state']['reachable'])
return collections.OrderedDict(sorted(lights.items()))
def light(light_id):
return bridge.lights[light_id]
def item_for_light(light_id, light_info):
item = dict(
title=light_info['name'],
icon='font-awesome:fa-lightbulb-o',
iconIsTemplate=True)
return item
def toggle_item_for_light(light_id, light_info):
item = item_for_light(light_id, light_info)
on = bool(light_info['state']['on'])
want_on = 0 if on else 1
url = 'x-launchbar:action/net.sabi.LaunchBar.action.Hue/light?id=%d&on=%d' % (light_id, want_on)
item.update(
action='action.py',
actionReturnsItems=True,
url=url)
if on:
item['badge'] = 'ON'
return item
| #!/Users/nicholas/Documents/Development/Hue/bin/python
__all__ = ('lights', 'light', 'item_for_light', 'toggle_item_for_light')
# XXX replace this with either nothing or something that isn't GPLv2
import collections, qhue
bridge = qhue.Bridge('192.168.0.14', 'USERNAME')
def lights():
lights = dict((light_info['name'], (int(light_id), light_info))
for (light_id, light_info) in bridge.lights().iteritems()
if light_info['state']['reachable'])
return collections.OrderedDict(sorted(lights.items()))
def light(light_id):
return bridge.lights[light_id]
def item_for_light(light_id, light_info):
item = dict(
title=light_info['name'],
icon='font-awesome:fa-lightbulb-o',
iconIsTemplate=True)
return item
def toggle_item_for_light(light_id, light_info):
item = item_for_light(light_id, light_info)
on = bool(light_info['state']['on'])
want_on = 0 if on else 1
url = 'x-launchbar:action/net.sabi.LaunchBar.action.Hue/light?id=%d&on=%d' % (light_id, want_on)
item.update(
title=light_info['name'],
icon='font-awesome:fa-lightbulb-o',
iconIsTemplate=True,
action='action.py',
actionReturnsItems=True,
url=url)
if on:
item['badge'] = 'ON'
return item
| apache-2.0 | Python |
7ab650ff4180ce263d789d2266391a93daa7298f | Use master_config function | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | saltapi/config.py | saltapi/config.py | '''
Manage configuration files in salt-cloud
'''
# Import python libs
import os
# Import salt libs
import salt.config
def api_config(path):
'''
Read in the salt master config file and add additional configs that
need to be stubbed out for cloudapi
'''
opts = {}
opts = salt.config.master_config(path)
if 'include' in opts:
opts = salt.config.include_config(opts, path)
return opts
| '''
Manage configuration files in salt-cloud
'''
# Import python libs
import os
# Import salt libs
import salt.config
def api_config(path):
'''
Read in the salt master config file and add additional configs that
need to be stubbed out for cloudapi
'''
opts = {
'extension_modules': [],
}
salt.config.load_config(opts, path, 'SALT_MASTER_CONFIG')
if 'include' in opts:
opts = salt.config.include_config(opts, path)
return opts
| apache-2.0 | Python |
0c403a18fa12a3bda3a2f51c7d4bbc37c7fb3960 | Patch Release | SylvainCorlay/bqplot,SylvainCorlay/bqplot,ssunkara1/bqplot,ssunkara1/bqplot,ChakriCherukuri/bqplot,SylvainCorlay/bqplot,dmadeka/bqplot,ChakriCherukuri/bqplot,bloomberg/bqplot,dmadeka/bqplot,bloomberg/bqplot,ChakriCherukuri/bqplot,bloomberg/bqplot | bqplot/_version.py | bqplot/_version.py | version_info = (0, 7, 1)
__version__ = '.'.join(map(str, version_info))
| version_info = (0, 8, 0, 'dev0')
__version__ = '.'.join(map(str, version_info))
| apache-2.0 | Python |
2f8e030a879855d8a6a3d7ba6fd881b1341b64ea | Add test for location_sequences method | MikeVasmer/GreenGraphCoursework | greengraph/test/test_graph.py | greengraph/test/test_graph.py | from greengraph.map import Map
from greengraph.graph import Greengraph
from mock import patch
import geopy
from nose.tools import assert_equal
from nose.tools import assert_almost_equal
import os
import yaml
start = "London"
end = "Durham"
def test_Greengraph_init():
#Test instance of Greengraph class instantiated correctly
with patch.object(geopy.geocoders,'GoogleV3') as mock_GoogleV3:
test_Greengraph = Greengraph(start,end)
#Test that GoogleV3 is called with the correct parameters
mock_GoogleV3.assert_called_with(domain="maps.google.co.uk")
#Test that the start and end fields are initialised correctly
assert_equal(test_Greengraph.start,start)
assert_equal(test_Greengraph.end,end)
def test_geolocate():
#Test that the geolocate method returns the correct latitude and longitude coordinates for various places
with open(os.path.join(os.path.dirname(__file__),"fixtures","geolocate.yaml")) as fixtures_file:
test_Greengraph = Greengraph(start,end)
fixtures = yaml.load(fixtures_file)
for fixture in fixtures:
assert_almost_equal(
test_Greengraph.geolocate(fixture.pop("place")),(fixture.pop("lat"),fixture.pop("lon"))
)
def test_location_sequence():
#Test that the location_sequence method computes the steps between two coordinates correctly
test_Greengraph = Greengraph(start,end)
test_sequence = test_Greengraph.location_sequence((0,0),(50,50),6)
expected_results = [0.,10.,20.,30.,40.,50.]
i = 0
for result in expected_results:
test_Greengraph = Greengraph(start,end)
assert_equal(test_sequence[i][0],result)
assert_equal(test_sequence[i][1],result)
i += 1
| from greengraph.map import Map
from greengraph.graph import Greengraph
from mock import patch
import geopy
from nose.tools import assert_equal
from nose.tools import assert_almost_equal
import os
import yaml
start = "London"
end = "Durham"
def test_Greengraph_init():
#Test instance of Greengraph class instantiated correctly
with patch.object(geopy.geocoders,'GoogleV3') as mock_GoogleV3:
test_Greengraph = Greengraph(start,end)
#Test that GoogleV3 is called with the correct parameters
mock_GoogleV3.assert_called_with(domain="maps.google.co.uk")
#Test that the start and end fields are initialised correctly
assert_equal(test_Greengraph.start,start)
assert_equal(test_Greengraph.end,end)
def test_geolocate():
#Test that the geolocate method returns the correct latitude and longitude coordinates for various places
with open(os.path.join(os.path.dirname(__file__),"fixtures","geolocate.yaml")) as fixtures_file:
test_Greengraph = Greengraph(start,end)
fixtures = yaml.load(fixtures_file)
for fixture in fixtures:
assert_almost_equal(
test_Greengraph.geolocate(fixture.pop("place")),(fixture.pop("lat"),fixture.pop("lon"))
)
| mit | Python |
50c306aead2376ba8b307a1731eb952e985ff138 | Move clock imports | california-civic-data-coalition/django-calaccess-downloads-website,california-civic-data-coalition/django-calaccess-downloads-website,california-civic-data-coalition/django-calaccess-downloads-website | clock.py | clock.py | from apscheduler.schedulers.blocking import BlockingScheduler
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=3)
def timed_job():
print('This job is run every three minutes.')
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
import django
django.setup()
from django.core.management import call_command
call_command("check")
sched.start()
| import django
django.setup()
from django.core.management import call_command
from apscheduler.schedulers.blocking import BlockingScheduler
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=3)
def timed_job():
print('This job is run every three minutes.')
call_command("check")
sched.start()
| mit | Python |
bf6247615d6f1799e70171bdd7e174ca2061a296 | Update docstring for Coins | PaulEcoffet/megamachineacaoua | coins.py | coins.py | from collections import Counter
class Coins(Counter):
"""
Class that represents Coins
Usage:
```
> coins = Coins({200: 0, 100: 2, 50: 4, 20: 3, 10: 1})
> coins.value
470
> coins2 = Coins({200:0, 100:0, 50:0, 20:2, 10:0})
> coins + coins2
Coins({200:0, 100:2, 50:4, 20:5, 10:1})
> coins - coins2
Coins({200:0, 100:2, 50:4, 20:1, 10:1})
```
Warning: Negative coins amount is not forbidden (yet)
In place addition and subtraction are also allowed
```
> coins.add(coins2)
> coins
Coins({200:0, 100:2, 50:4, 20:5, 10:1})
```
"""
def __init__(self, coins=None):
if coins:
if isinstance(coins, dict):
if (all(isinstance(key, int) for key in coins)
and all(isinstance(value, int)
for value in coins.values())):
super().__init__(coins)
else:
raise ValueError('Coins works only with integers')
elif isinstance(coins, list):
if all(isinstance(value, int) for value in coins):
super().__init__(coins)
else:
raise ValueError('Coins works only with integers')
else:
raise ValueError('Coins works only with integers')
else:
super().__init__()
@property
def value(self):
total = 0
for value, amount in self.items():
total += value * amount
return total
add = Counter.update # Update for addition is not clear, addition of
# add for symetry with subtract
| from collections import Counter
class Coins(Counter):
"""
Class that represents Coins
Usage:
```
> coins = Coins([0, 2, 4, 3, 1], [200, 100, 50, 20, 10])
> coins.value
470
> coins2 = Coins([0, 0, 0, 2, 0], [200, 100, 50, 20, 10])
> coins + coins2
Coins({200:0, 100:2, 50:4, 20:5, 10:1})
> coins - coins2
Coins({200:0, 100:2, 50:4, 20:1, 10:1})
```
Warning: Negative coins amount is not forbidden (yet)
In place addition and subtraction are also allowed
```
> coins.add(coins2)
> coins
Coins({200:0, 100:2, 50:4, 20:5, 10:1})
```
"""
def __init__(self, coins=None):
if coins:
if isinstance(coins, dict):
if (all(isinstance(key, int) for key in coins)
and all(isinstance(value, int)
for value in coins.values())):
super().__init__(coins)
else:
raise ValueError('Coins works only with integers')
elif isinstance(coins, list):
if all(isinstance(value, int) for value in coins):
super().__init__(coins)
else:
raise ValueError('Coins works only with integers')
else:
raise ValueError('Coins works only with integers')
else:
super().__init__()
@property
def value(self):
total = 0
for value, amount in self.items():
total += value * amount
return total
add = Counter.update # Update for addition is not clear, addition of
# add for symetry with subtract
| apache-2.0 | Python |
3b23a7a3bba469487a4694e0653341740e64cd7a | Update Python path | xieweiAlex/English_Learning,xieweiAlex/English_Learning | count.py | count.py | #!/usr/bin/python3
import sys
import re
import os
line = sys.argv[1]
# line = "the feature **bloated** IDE Emacs is rather **off-putting**"
# line = "**bloated** IDE Emacs is"
# line = "asdfasdf asdf sad fasdf as dfasd f"
# print ('The line is: ', line)
pattern = "\*\*[^*]*\*\*"
matches = re.findall(pattern, line)
# print("the matches is: ", matches)
count = len(matches)
print(count)
# os._exit(count)
| #!/usr/local/bin/python3
import sys
import re
import os
line = sys.argv[1]
# line = "the feature **bloated** IDE Emacs is rather **off-putting**"
# line = "**bloated** IDE Emacs is"
# line = "asdfasdf asdf sad fasdf as dfasd f"
# print ('The line is: ', line)
pattern = "\*\*[^*]*\*\*"
matches = re.findall(pattern, line)
# print("the matches is: ", matches)
count = len(matches)
print(count)
# os._exit(count)
| mit | Python |
19c601bbc36f1e049ee146439d767edd8004008a | add new line at end of file for toy_text __init__ | Farama-Foundation/Gymnasium,Farama-Foundation/Gymnasium | gym/envs/toy_text/__init__.py | gym/envs/toy_text/__init__.py | from gym.envs.toy_text.blackjack import BlackjackEnv
from gym.envs.toy_text.roulette import RouletteEnv
from gym.envs.toy_text.frozen_lake import FrozenLakeEnv
from gym.envs.toy_text.nchain import NChainEnv
from gym.envs.toy_text.hotter_colder import HotterColder
from gym.envs.toy_text.guessing_game import GuessingGame
from gym.envs.toy_text.kellycoinflip import KellyCoinflipEnv
from gym.envs.toy_text.kellycoinflip import KellyCoinflipGeneralizedEnv
from gym.envs.toy_text.cliffwalking import CliffWalkingEnv
| from gym.envs.toy_text.blackjack import BlackjackEnv
from gym.envs.toy_text.roulette import RouletteEnv
from gym.envs.toy_text.frozen_lake import FrozenLakeEnv
from gym.envs.toy_text.nchain import NChainEnv
from gym.envs.toy_text.hotter_colder import HotterColder
from gym.envs.toy_text.guessing_game import GuessingGame
from gym.envs.toy_text.kellycoinflip import KellyCoinflipEnv
from gym.envs.toy_text.kellycoinflip import KellyCoinflipGeneralizedEnv
from gym.envs.toy_text.cliffwalking import CliffWalkingEnv | mit | Python |
d6cae7d5cc88539cb9bd310e5f81ee5c43f338bf | add Proxy xiaoice_storage | WEIZIBIN/PersonalWebsite,WEIZIBIN/PersonalWebsite,WEIZIBIN/PersonalWebsite | flask_website/xiaoice_storage.py | flask_website/xiaoice_storage.py | dict_xiaoice = {}
class Xiaoice():
def __init__(self, weibo):
self._weibo = weibo
self.client_id = None
def getWeibo(self):
return self._weibo
def post_msg(self, msg):
self._weibo.post_msg_to_xiaoice(msg)
def is_avail(self):
if self._weibo.im_ready:
return True
def get_xiaoice_by_username(username):
return dict_xiaoice[username]
def add_xiaoice(weibo):
xiaoice = Xiaoice(weibo)
dict_xiaoice[xiaoice.getWeibo().username] = xiaoice
def get_all_xiaoice():
return dict_xiaoice
def get_avail_xiaoice():
for username, xiaoice in dict_xiaoice.items():
if xiaoice.is_avail():
return xiaoice
| dict_xiaoice = {}
def get_xiaoice_by_username(username):
return dict_xiaoice[username]
def add_xiaoice(xiaoice):
dict_xiaoice[xiaoice.username] = xiaoice
def get_all_xiaoice():
return dict_xiaoice
def get_avail_xiaoice():
# todo check avail
for username, xiaoice in dict_xiaoice.items():
return xiaoice | mit | Python |
a55cfa7870fb821680b132c46011ac92179df0ce | Fix that one test. | jeremycline/fmn,jeremycline/fmn,jeremycline/fmn | fmn/lib/tests/test_recipients.py | fmn/lib/tests/test_recipients.py | from nose.tools import eq_, assert_not_equals
import os
import fmn.lib.models
import fmn.lib.tests
class TestRecipients(fmn.lib.tests.Base):
def create_user_and_context_data(self):
user1 = fmn.lib.models.User.get_or_create(self.sess, username="ralph")
user2 = fmn.lib.models.User.get_or_create(self.sess, username="toshio")
context1 = fmn.lib.models.Context.create(
self.sess, name="irc", description="Internet Relay Chat")
context2 = fmn.lib.models.Context.create(
self.sess, name="gcm", description="Google Cloud Messaging")
def create_preference_data_basic(self):
user = fmn.lib.models.User.get(self.sess, username="ralph")
context = fmn.lib.models.Context.get(self.sess, name="irc")
preference = fmn.lib.models.Preference.create(
self.sess,
user=user,
context=context,
delivery_detail=dict(
ircnick="threebean",
)
)
def test_empty_recipients_list(self):
self.create_user_and_context_data()
incoming_message = {
"wat": "blah",
}
recipients = fmn.lib.recipients(self.sess, incoming_message)
expected_keys = set(['irc', 'gcm'])
eq_(set(recipients.keys()), expected_keys)
eq_(list(recipients['irc']), [])
eq_(list(recipients['gcm']), [])
def test_basic_recipients_list(self):
self.create_user_and_context_data()
self.create_preference_data_basic()
msg = {
"wat": "blah",
}
recipients = fmn.lib.recipients_for_context(self.sess, 'irc', msg)
eq_(list(recipients), [dict(ircnick="threebean", user="ralph")])
def test_miss_recipients_list(self):
self.create_user_and_context_data()
self.create_preference_data_basic()
msg = {
"wat": "blah",
}
recipients = fmn.lib.recipients_for_context(self.sess, 'gcm', msg)
eq_(list(recipients), [])
| from nose.tools import eq_, assert_not_equals
import os
import fmn.lib.models
import fmn.lib.tests
class TestRecipients(fmn.lib.tests.Base):
def create_user_and_context_data(self):
user1 = fmn.lib.models.User.get_or_create(self.sess, username="ralph")
user2 = fmn.lib.models.User.get_or_create(self.sess, username="toshio")
context1 = fmn.lib.models.Context.create(
self.sess, name="irc", description="Internet Relay Chat")
context2 = fmn.lib.models.Context.create(
self.sess, name="gcm", description="Google Cloud Messaging")
def create_preference_data_basic(self):
user = fmn.lib.models.User.get(self.sess, username="ralph")
context = fmn.lib.models.Context.get(self.sess, name="irc")
preference = fmn.lib.models.Preference.create(
self.sess,
user=user,
context=context,
delivery_detail=dict(
ircnick="threebean",
)
)
def test_empty_recipients_list(self):
self.create_user_and_context_data()
incoming_message = {
"wat": "blah",
}
recipients = fmn.lib.recipients(self.sess, incoming_message)
expected_keys = set(['irc', 'gcm'])
eq_(set(recipients.keys()), expected_keys)
eq_(list(recipients['irc']), [])
eq_(list(recipients['gcm']), [])
def test_basic_recipients_list(self):
self.create_user_and_context_data()
self.create_preference_data_basic()
msg = {
"wat": "blah",
}
recipients = fmn.lib.recipients_for_context(self.sess, 'irc', msg)
eq_(list(recipients), [dict(ralph=dict(ircnick="threebean"))])
def test_miss_recipients_list(self):
self.create_user_and_context_data()
self.create_preference_data_basic()
msg = {
"wat": "blah",
}
recipients = fmn.lib.recipients_for_context(self.sess, 'gcm', msg)
eq_(list(recipients), [])
| lgpl-2.1 | Python |
3b7a3755a43c21471aa39c89914179862c66bf8a | Add Sample code | jervisfm/GoogleDrive | gdrive.py | gdrive.py | #!/usr/bin/python
__author__ = 'Jervis Muindi'
__date__ = 'November 2013'
class GFile(object):
"""Encapsulates a GFile Object"""
def __init__(self):
pass
class GDriveAuth(object):
"""Encapsulates OAUTH2 authentication details for Google Drive API. """
def __init__(self, client_id, client_secret, oauth_scope, redirect_uri):
self.client_id = client_id
self.client_secret = client_secret
self.oauth_scope = oauth_scope
self.redirect_uri = redirect_uri
class GDrive(object):
"""Represents a Google Drive object. """
def __init__(self):
pass
def upload(src_file, dest_path=None):
"""Uploads the 'src_file' to the destination file.
Args:
src_file: the source file to be uploaded.
dest_path: the destination folder path."""
pass
def download(src_file, dest_file):
"""Downloads the specified file from Drive onto a local file.
Args:
src_file: the source file to be uploaded
dest_file: the destination file to save downloaded file to.
"""
pass
def list(file_path):
"""Lists files in the given path."""
pass
################################################
# Sample Test Code borrowed from
# https://developers.google.com/drive/quickstart-python#step_3_set_up_the_sample
import httplib2
import pprint
from apiclient.discovery import build
from apiclient.http import MediaFileUpload
from oauth2client.client import OAuth2WebServerFlow
# Copy your credentials from the console
CLIENT_ID = 'YOUR_CLIENT_ID'
CLIENT_SECRET = 'YOUR_CLIENT_SECRET'
# Check https://developers.google.com/drive/scopes for all available scopes
OAUTH_SCOPE = 'https://www.googleapis.com/auth/drive'
# Redirect URI for installed apps
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
# Path to the file to upload
FILENAME = 'document.txt'
# Run through the OAuth flow and retrieve credentials
flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, OAUTH_SCOPE, REDIRECT_URI)
authorize_url = flow.step1_get_authorize_url()
print 'Go to the following link in your browser: ' + authorize_url
code = raw_input('Enter verification code: ').strip()
credentials = flow.step2_exchange(code)
# Create an httplib2.Http object and authorize it with our credentials
http = httplib2.Http()
http = credentials.authorize(http)
drive_service = build('drive', 'v2', http=http)
# Insert a file
media_body = MediaFileUpload(FILENAME, mimetype='text/plain', resumable=True)
body = {
'title': 'My document',
'description': 'A test document',
'mimeType': 'text/plain'
}
file = drive_service.files().insert(body=body, media_body=media_body).execute()
pprint.pprint(file)
| #!/usr/bin/python
__author__ = 'Jervis Muindi'
__date__ = 'November 2013'
class GFile(object):
"""Encapsulates a GFile Object"""
def __init__(self):
pass
class GDriveAuth(object):
"""Encapsulates OAUTH2 authentication details for Google Drive API. """
def __init__(self, client_id, client_secret, oauth_scope, redirect_uri):
self.client_id = client_id
self.client_secret = client_secret
self.oauth_scope = oauth_scope
self.redirect_uri = redirect_uri
class GDrive(object):
"""Represents a Google Drive object. """
def __init__(self):
pass
def upload(src_file, dest_path=None):
"""Uploads the 'src_file' to the destination file.
Args:
src_file: the source file to be uploaded.
dest_path: the destination folder path."""
pass
def download(src_file, dest_file):
"""Downloads the specified file from Drive onto a local file.
Args:
src_file: the source file to be uploaded
dest_file: the destination file to save downloaded file to.
"""
pass
def list(file_path):
"""Lists files in the given path."""
pass
| bsd-3-clause | Python |
c9b035e576459673c2034aa0dc09aa67dde23886 | Add some logging to the getter lambda | bwinterton/alexa-xkcd,bwinterton/alexa-xkcd | getter.py | getter.py | import requests
import xmltodict
import boto3
import botocore
import re
def get_latest_info():
r = requests.get("https://xkcd.com/rss.xml")
rss = xmltodict.parse(r.text)
post = dict()
post["url"] = rss["rss"]["channel"]["item"][0]["link"]
post["num"] = re.search(r"xkcd.com\/([0-9]+)",
rss["rss"]["channel"]["item"][0]["link"]).group(1)
post["image_url"] = re.search(r"<img src=\"(.*)\" title=",
rss["rss"]["channel"]["item"][0]["description"]).group(1)
post["alt"] = re.search(r"alt=\"(.*)\"",
rss["rss"]["channel"]["item"][0]["description"]).group(1)
post["name"] = rss["rss"]["channel"]["item"][0]["title"]
return post
def s3_file_exists(bucket, filename):
try:
obj = boto3.resource('s3').Object(bucket, filename).get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "NoSuchKey":
return False
else:
raise e
else:
return true
def push_to_s3(latest):
filename = latest["num"] + ".png"
path = "/tmp/" + filename
# If the file already exists, then don't re-upload
if s3_file_exists("xkcd-lambda", filename):
print "Latest XKCD has already been cached. Exiting now..."
return
r = requests.get(latest["image_url"], stream=True)
with open(path , 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
bucket = boto3.resource('s3').Bucket("xkcd-lambda")
extraArgs = {
'Metadata' : {
'Title' : latest["name"],
'Alt' : latest["alt"],
"URL" : latest["url"]
}
}
bucket.upload_file(path, filename, extraArgs)
print "Latest XKCD Cached successfully!"
return
def lambda_handler(event, context):
main()
def main():
latest = get_latest_info()
print "Latest XKCD is number " + latest["num"] + ". Caching now..."
push_to_s3(latest)
if __name__ == "__main__":
main()
| import requests
import xmltodict
import boto3
import botocore
import re
def get_latest_info():
r = requests.get("https://xkcd.com/rss.xml")
rss = xmltodict.parse(r.text)
post = dict()
post["url"] = rss["rss"]["channel"]["item"][0]["link"]
post["num"] = re.search(r"xkcd.com\/([0-9]+)",
rss["rss"]["channel"]["item"][0]["link"]).group(1)
post["image_url"] = re.search(r"<img src=\"(.*)\" title=",
rss["rss"]["channel"]["item"][0]["description"]).group(1)
post["alt"] = re.search(r"alt=\"(.*)\"",
rss["rss"]["channel"]["item"][0]["description"]).group(1)
post["name"] = rss["rss"]["channel"]["item"][0]["title"]
return post
def s3_file_exists(bucket, filename):
try:
obj = boto3.resource('s3').Object(bucket, filename).get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "NoSuchKey":
return False
else:
raise e
else:
return true
def push_to_s3(latest):
filename = latest["num"] + ".png"
path = "/tmp/" + filename
# If the file already exists, then don't re-upload
if s3_file_exists("xkcd-lambda", filename):
return
r = requests.get(latest["image_url"], stream=True)
with open(path , 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
bucket = boto3.resource('s3').Bucket("xkcd-lambda")
extraArgs = {
'Metadata' : {
'Title' : latest["name"],
'Alt' : latest["alt"],
"URL" : latest["url"]
}
}
bucket.upload_file(path, filename, extraArgs)
return
def lambda_handler(event, context):
main()
def main():
latest = get_latest_info()
push_to_s3(latest)
if __name__ == "__main__":
main()
| mit | Python |
53a95088e0a0ecbca50c370a7803724a5cc67c6f | Bump version to 18.04.15-4 | charlievieth/GoSubl,charlievieth/GoSubl | gosubl/about.py | gosubl/about.py | import re
import sublime
# GoSublime Globals
ANN = 'a18.04.15-4'
VERSION = 'r18.04.15-4'
VERSION_PAT = re.compile(r'\d{2}[.]\d{2}[.]\d{2}-\d+', re.IGNORECASE)
DEFAULT_GO_VERSION = 'go?'
GO_VERSION_OUTPUT_PAT = re.compile(r'go\s+version\s+(\S+(?:\s+[+]\w+|\s+\([^)]+)?)', re.IGNORECASE)
GO_VERSION_NORM_PAT = re.compile(r'[^\w.+-]+', re.IGNORECASE)
PLATFORM = '%s-%s' % (sublime.platform(), sublime.arch())
MARGO_EXE_PREFIX = 'gosublime.margo_'
MARGO_EXE_SUFFIX = '.exe'
MARGO_EXE = MARGO_EXE_PREFIX+VERSION+'_'+DEFAULT_GO_VERSION+MARGO_EXE_SUFFIX
MARGO_EXE_PAT = re.compile(r'^gosublime\.margo.*\.exe$', re.IGNORECASE)
# CEV: Dev Globals
FORCE_INSTALL = False
| import re
import sublime
# GoSublime Globals
ANN = 'a18.04.15-3'
VERSION = 'r18.04.15-3'
VERSION_PAT = re.compile(r'\d{2}[.]\d{2}[.]\d{2}-\d+', re.IGNORECASE)
DEFAULT_GO_VERSION = 'go?'
GO_VERSION_OUTPUT_PAT = re.compile(r'go\s+version\s+(\S+(?:\s+[+]\w+|\s+\([^)]+)?)', re.IGNORECASE)
GO_VERSION_NORM_PAT = re.compile(r'[^\w.+-]+', re.IGNORECASE)
PLATFORM = '%s-%s' % (sublime.platform(), sublime.arch())
MARGO_EXE_PREFIX = 'gosublime.margo_'
MARGO_EXE_SUFFIX = '.exe'
MARGO_EXE = MARGO_EXE_PREFIX+VERSION+'_'+DEFAULT_GO_VERSION+MARGO_EXE_SUFFIX
MARGO_EXE_PAT = re.compile(r'^gosublime\.margo.*\.exe$', re.IGNORECASE)
# CEV: Dev Globals
FORCE_INSTALL = False
| mit | Python |
47497dee7fd10ebad084638b2f15f8e50e088737 | fix imports | publica-io/django-publica-images,publica-io/django-publica-images | images/models.py | images/models.py | # -*- coding: utf-8 -*-
from django.db import models
from django.contrib.contenttypes import generic
from entropy.mixins import EnabledMixin, OrderingMixin
from .settings import CONTENT_MODELS
class Image(models.Model):
'''
Image URLs that reference an external source; such as FilePicker / S3
[{
"url":"https://www.filepicker.io/api/file/3d6OxllbQi2bfkLhGSrg",
"filename":"m10.png",
"mimetype":"image/png",
"size":166680,
"key":"y5dz1osWQaC89JT8dUJG_m10.png",
"container":"m10-staging","isWriteable":true
}]
'''
title = models.CharField(blank=True, max_length=1024)
alt = models.CharField(blank=True, max_length=1024)
url = models.CharField(max_length=1024)
filename = models.CharField(max_length=1024)
mimetype = models.CharField(max_length=64)
caption = models.TextField(blank=True, default='')
def image_instances(self):
return [
image_instance for image_instance in
self.imageinstance_set.enabled().prefetch_related('content_object')
]
class ImageInstance(EnabledMixin, OrderingMixin):
'''Content for Image'''
# enabled
# order
image = models.ForeignKey('Image')
content_type = models.ForeignKey(
'contenttypes.ContentType',
limit_choices_to={'model__in': CONTENT_MODELS},
)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
is_icon = models.BooleanField(default=False)
is_listing = models.BooleanField(default=False)
_caption = models.TextField('caption', blank=True, default='')
@property
def caption(self):
if self._caption:
return self._caption
return self.image.caption
| # -*- coding: utf-8 -*-
from django.db import models
from django.contrib.contenttypes import generic
from entropy.base import EnabledMixin, OrderingMixin
from .settings import CONTENT_MODELS
class Image(models.Model):
'''
Image URLs that reference an external source; such as FilePicker / S3
[{
"url":"https://www.filepicker.io/api/file/3d6OxllbQi2bfkLhGSrg",
"filename":"m10.png",
"mimetype":"image/png",
"size":166680,
"key":"y5dz1osWQaC89JT8dUJG_m10.png",
"container":"m10-staging","isWriteable":true
}]
'''
title = models.CharField(blank=True, max_length=1024)
alt = models.CharField(blank=True, max_length=1024)
url = models.CharField(max_length=1024)
filename = models.CharField(max_length=1024)
mimetype = models.CharField(max_length=64)
caption = models.TextField(blank=True, default='')
def image_instances(self):
return [
image_instance for image_instance in
self.imageinstance_set.enabled().prefetch_related('content_object')
]
class ImageInstance(EnabledMixin, OrderingMixin):
'''Content for Image'''
# enabled
# order
image = models.ForeignKey('Image')
content_type = models.ForeignKey(
'contenttypes.ContentType',
limit_choices_to={'model__in': CONTENT_MODELS},
)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
is_icon = models.BooleanField(default=False)
is_listing = models.BooleanField(default=False)
_caption = models.TextField('caption', blank=True, default='')
@property
def caption(self):
if self._caption:
return self._caption
return self.image.caption
| bsd-3-clause | Python |
2b0fc1690da9de9d20901531979e760edd9ec023 | Fix cuda.test() | jriehl/numba,stonebig/numba,stefanseefeld/numba,stuartarchibald/numba,pombredanne/numba,seibert/numba,gmarkall/numba,pombredanne/numba,IntelLabs/numba,stonebig/numba,cpcloud/numba,stonebig/numba,seibert/numba,jriehl/numba,numba/numba,numba/numba,pitrou/numba,seibert/numba,pitrou/numba,sklam/numba,pombredanne/numba,jriehl/numba,sklam/numba,stuartarchibald/numba,numba/numba,IntelLabs/numba,sklam/numba,pitrou/numba,sklam/numba,pitrou/numba,stefanseefeld/numba,sklam/numba,IntelLabs/numba,seibert/numba,IntelLabs/numba,seibert/numba,jriehl/numba,pombredanne/numba,cpcloud/numba,cpcloud/numba,gmarkall/numba,stuartarchibald/numba,stonebig/numba,stefanseefeld/numba,numba/numba,IntelLabs/numba,jriehl/numba,stuartarchibald/numba,pitrou/numba,pombredanne/numba,cpcloud/numba,gmarkall/numba,numba/numba,cpcloud/numba,stefanseefeld/numba,gmarkall/numba,gmarkall/numba,stefanseefeld/numba,stuartarchibald/numba,stonebig/numba | numba/cuda/__init__.py | numba/cuda/__init__.py | from __future__ import print_function, absolute_import, division
from numba import config
import numba.testing
if config.ENABLE_CUDASIM:
from .simulator_init import *
else:
from .device_init import *
from .device_init import _auto_device
def test(*args, **kwargs):
if not is_available():
raise cuda_error()
return numba.testing.test("numba.cuda.tests", *args, **kwargs)
| from __future__ import print_function, absolute_import, division
from numba import config
if config.ENABLE_CUDASIM:
from .simulator_init import *
else:
from .device_init import *
from .device_init import _auto_device
def test():
if not is_available():
raise cuda_error()
from .tests.cudapy.runtests import test as test_cudapy
from .tests.cudadrv.runtests import test as test_cudadrv
testseq = [("cudadrv", test_cudadrv),
("cudapy", test_cudapy)]
for name, udt in testseq:
print("Running", name)
if not udt():
print("Test failed", name)
return False
return True
| bsd-2-clause | Python |
20a8b81e1e73417c4a5efed2d65c1819adb41391 | Make the API in glyphslib.py a little more useful. | googlei18n/glyphsLib,googlefonts/glyphsLib | glyphslib.py | glyphslib.py | #!/usr/bin/python
__all__ = [
"load_to_rfonts", "build_instances", "load", "loads",
]
import json
import sys
from parser import Parser
from casting import cast_data, cast_noto_data
from torf import to_robofab
def load(fp, dict_type=dict):
"""Read a .glyphs file. 'fp' should be (readable) file object.
Return the unpacked root object (which usually is a dictionary).
"""
return loads(fp.read(), dict_type=dict_type)
def loads(value, dict_type=dict):
"""Read a .glyphs file from a bytes object.
Return the unpacked root object (which usually is a dictionary).
"""
p = Parser(dict_type=dict_type)
print '>>> Parsing .glyphs file'
data = p.parse(value)
print '>>> Casting parsed values'
cast_data(data)
cast_noto_data(data)
return data
def load_to_rfonts(filename, italic=False, include_instances=False):
"""Load an unpacked .glyphs object to a RoboFab RFont."""
data = load(open(filename, 'rb'))
print '>>> Loading to RFonts'
return to_robofab(data, italic=italic, include_instances=include_instances)
def save_ufo(font):
"""Save an RFont as a UFO."""
if font.path:
print '>>> Compiling %s' % font.path
font.save()
else:
ofile = font.info.postscriptFullName + '.ufo'
print '>>> Compiling %s' % ofile
font.save(ofile)
def save_otf(font):
"""Save an RFont as an OTF, using ufo2fdk."""
from ufo2fdk import OTFCompiler
ofile = font.info.postscriptFullName + '.otf'
print '>>> Compiling ' + ofile
compiler = OTFCompiler()
reports = compiler.compile(font, ofile)
print reports['makeotf']
def save_ttf(font):
"""Save an RFont as a TTF, using the Roboto toolchain."""
from fontbuild.convertCurves import glyphCurvesToQuadratic
from fontbuild.outlineTTF import OutlineTTFCompiler
ofile = font.info.postscriptFullName + '.ttf'
print '>>> Compiling %s' % ofile
for glyph in font:
glyphCurvesToQuadratic(glyph)
compiler = OutlineTTFCompiler(font, ofile)
compiler.compile()
def build_master_files(filename, italic=False):
"""Generate UFOs from the masters defined in a .glyphs file."""
for f in load_to_rfonts(filename, italic):
save_ufo(f)
def build_instance_files(filename, italic=False):
"""Generate UFOs from the instances defined in a .glyphs file."""
from interpolation import build_instances
masters, instance_data = load_to_rfonts(filename, italic, True)
for f in build_instances(masters, instance_data, italic):
save_ufo(f)
def main(argv):
filename = sys.argv[1]
build_instance_files(filename, 'Italic' in filename)
if __name__ == '__main__':
main(sys.argv)
| #!/usr/bin/python
__all__ = [
"load_to_rfonts", "build_instances", "load", "loads",
]
import json
import sys
from fontbuild.convertCurves import glyphCurvesToQuadratic
from fontbuild.outlineTTF import OutlineTTFCompiler
from parser import Parser
from casting import cast_data, cast_noto_data
from interpolation import build_instances
from torf import to_robofab
def load(fp, dict_type=dict):
"""Read a .glyphs file. 'fp' should be (readable) file object.
Return the unpacked root object (which usually is a dictionary).
"""
return loads(fp.read(), dict_type=dict_type)
def loads(value, dict_type=dict):
"""Read a .glyphs file from a bytes object.
Return the unpacked root object (which usually is a dictionary).
"""
p = Parser(dict_type=dict_type)
print '>>> Parsing .glyphs file'
data = p.parse(value)
print '>>> Casting parsed values'
cast_data(data)
cast_noto_data(data)
return data
def load_to_rfonts(filename, italic):
"""Load an unpacked .glyphs object to a RoboFab RFont."""
data = load(open(filename, 'rb'))
print '>>> Loading to RFonts'
return to_robofab(data, italic=italic, include_instances=True)
#return to_robofab(data, debug=True)
def save_ufo(font):
"""Save an RFont as a UFO."""
if font.path:
print '>>> Compiling %s' % font.path
font.save()
else:
ofile = font.info.postscriptFullName + '.ufo'
print '>>> Compiling %s' % ofile
font.save(ofile)
def save_ttf(font):
"""Save an RFont as a TTF."""
ofile = font.info.postscriptFullName + '.ttf'
print '>>> Compiling %s' % ofile
for glyph in font:
glyphCurvesToQuadratic(glyph)
compiler = OutlineTTFCompiler(font, ofile)
compiler.compile()
def main(argv):
#print json.dumps(load(open(sys.argv[1], 'rb')), indent=2, sort_keys=True)
filename = sys.argv[1]
italic = 'Italic' in filename
masters, instance_data = load_to_rfonts(filename, italic)
instances = build_instances(masters, instance_data, italic)
for f in instances:
save_ufo(f)
save_ttf(f)
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 | Python |
8eba46a73280c290aa03f5282da2b423facb9e6b | Add test for "archive list" | basak/glacier-cli,mhubig/glacier-cli,basak/glacier-cli,mhubig/glacier-cli | glacier_test.py | glacier_test.py | #!/usr/bin/env python
# Copyright (c) 2013 Robie Basak
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from __future__ import print_function
import sys
import unittest
from mock import Mock, patch, sentinel
import glacier
class TestCase(unittest.TestCase):
def init_app(self, args):
self.connection = Mock()
self.cache = Mock()
self.app = glacier.App(
args=args,
connection=self.connection,
cache=self.cache)
def run_app(self, args):
self.init_app(args)
self.app.main()
def test_vault_list(self):
self.init_app(['vault', 'list'])
mock_vault = Mock()
mock_vault.name = sentinel.vault_name
self.connection.list_vaults.return_value = [mock_vault]
print_mock = Mock()
with patch('__builtin__.print', print_mock):
self.app.main()
print_mock.assert_called_once_with(sentinel.vault_name, sep=u'\n')
def test_vault_create(self):
self.run_app(['vault', 'create', 'vault_name'])
self.connection.create_vault.assert_called_once_with('vault_name')
def test_archive_list(self):
self.init_app(['archive', 'list', 'vault_name'])
archive_list = [sentinel.archive_one, sentinel.archive_two]
self.cache.get_archive_list.return_value = archive_list
print_mock = Mock()
with patch('__builtin__.print', print_mock):
self.app.main()
print_mock.assert_called_once_with(*archive_list, sep="\n")
def test_stdin_upload(self):
self.run_app(['archive', 'upload', 'vault_name', '-'])
self.connection.get_vault.assert_called_once_with('vault_name')
vault = self.connection.get_vault.return_value
vault.create_archive_from_file.assert_called_once_with(
file_obj=sys.stdin, description='<stdin>')
| #!/usr/bin/env python
# Copyright (c) 2013 Robie Basak
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from __future__ import print_function
import sys
import unittest
from mock import Mock, patch, sentinel
import glacier
class TestCase(unittest.TestCase):
def init_app(self, args):
self.connection = Mock()
self.cache = Mock()
self.app = glacier.App(
args=args,
connection=self.connection,
cache=self.cache)
def run_app(self, args):
self.init_app(args)
self.app.main()
def test_vault_list(self):
self.init_app(['vault', 'list'])
mock_vault = Mock()
mock_vault.name = sentinel.vault_name
self.connection.list_vaults.return_value = [mock_vault]
print_mock = Mock()
with patch('__builtin__.print', print_mock):
self.app.main()
print_mock.assert_called_once_with(sentinel.vault_name, sep=u'\n')
def test_vault_create(self):
self.run_app(['vault', 'create', 'vault_name'])
self.connection.create_vault.assert_called_once_with('vault_name')
def test_stdin_upload(self):
self.run_app(['archive', 'upload', 'vault_name', '-'])
self.connection.get_vault.assert_called_once_with('vault_name')
vault = self.connection.get_vault.return_value
vault.create_archive_from_file.assert_called_once_with(
file_obj=sys.stdin, description='<stdin>')
| mit | Python |
37340d0119254399517920dd9a85d295aee16cab | add newline at end | analysiscenter/dataset | dataset/models/tf/__init__.py | dataset/models/tf/__init__.py | """ Contains tensorflow models and functions """
from .base import TFModel
from .vgg import VGG, VGG16, VGG19, VGG7
from .linknet import LinkNet
from .unet import UNet
from .vnet import VNet
from .fcn import FCN, FCN32, FCN16, FCN8
from .resnet import ResNet, ResNet18, ResNet34, ResNet50, ResNet101, ResNet152
from .inception_v1 import Inception_v1
from .inception_v3 import Inception_v3
from .inception_v4 import Inception_v4
from .squeezenet import SqueezeNet
from .mobilenet import MobileNet
from .densenet import DenseNet, DenseNet121, DenseNet169, DenseNet201, DenseNet264
from .faster_rcnn import FasterRCNN
from .resattention import ResNetAttention, ResNetAttention56, ResNetAttention92
from .densenet_fc import DenseNetFC, DenseNetFC56, DenseNetFC67, DenseNetFC103
| """ Contains tensorflow models and functions """
from .base import TFModel
from .vgg import VGG, VGG16, VGG19, VGG7
from .linknet import LinkNet
from .unet import UNet
from .vnet import VNet
from .fcn import FCN, FCN32, FCN16, FCN8
from .resnet import ResNet, ResNet18, ResNet34, ResNet50, ResNet101, ResNet152
from .inception_v1 import Inception_v1
from .inception_v3 import Inception_v3
from .inception_v4 import Inception_v4
from .squeezenet import SqueezeNet
from .mobilenet import MobileNet
from .densenet import DenseNet, DenseNet121, DenseNet169, DenseNet201, DenseNet264
from .faster_rcnn import FasterRCNN
from .resattention import ResNetAttention, ResNetAttention56, ResNetAttention92
from .densenet_fc import DenseNetFC, DenseNetFC56, DenseNetFC67, DenseNetFC103 | apache-2.0 | Python |
021af9e73e6960347e47cf783c2d90d9e399a8c5 | bump version | charlievieth/GoSubl,charlievieth/GoSubl | gosubl/about.py | gosubl/about.py | import re
import sublime
# GoSublime Globals
ANN = 'a18.04.15-7'
VERSION = 'r18.04.15-7'
VERSION_PAT = re.compile(r'\d{2}[.]\d{2}[.]\d{2}-\d+', re.IGNORECASE)
DEFAULT_GO_VERSION = 'go?'
GO_VERSION_OUTPUT_PAT = re.compile(r'go\s+version\s+(\S+(?:\s+[+]\w+|\s+\([^)]+)?)', re.IGNORECASE)
GO_VERSION_NORM_PAT = re.compile(r'[^\w.+-]+', re.IGNORECASE)
PLATFORM = '%s-%s' % (sublime.platform(), sublime.arch())
MARGO_EXE_PREFIX = 'gosublime.margo_'
MARGO_EXE_SUFFIX = '.exe'
MARGO_EXE = MARGO_EXE_PREFIX+VERSION+'_'+DEFAULT_GO_VERSION+MARGO_EXE_SUFFIX
MARGO_EXE_PAT = re.compile(r'^gosublime\.margo.*\.exe$', re.IGNORECASE)
# CEV: Dev Globals
FORCE_INSTALL = False
| import re
import sublime
# GoSublime Globals
ANN = 'a18.04.15-6'
VERSION = 'r18.04.15-6'
VERSION_PAT = re.compile(r'\d{2}[.]\d{2}[.]\d{2}-\d+', re.IGNORECASE)
DEFAULT_GO_VERSION = 'go?'
GO_VERSION_OUTPUT_PAT = re.compile(r'go\s+version\s+(\S+(?:\s+[+]\w+|\s+\([^)]+)?)', re.IGNORECASE)
GO_VERSION_NORM_PAT = re.compile(r'[^\w.+-]+', re.IGNORECASE)
PLATFORM = '%s-%s' % (sublime.platform(), sublime.arch())
MARGO_EXE_PREFIX = 'gosublime.margo_'
MARGO_EXE_SUFFIX = '.exe'
MARGO_EXE = MARGO_EXE_PREFIX+VERSION+'_'+DEFAULT_GO_VERSION+MARGO_EXE_SUFFIX
MARGO_EXE_PAT = re.compile(r'^gosublime\.margo.*\.exe$', re.IGNORECASE)
# CEV: Dev Globals
FORCE_INSTALL = False
| mit | Python |
86fd9ad2035ab9ce5b7d3e6790b19c1d8e9bf756 | revert changed word | ywryoo/GP2 | gp2-core/app.py | gp2-core/app.py | # -*- coding: utf-8 -*-
"""
app.py
flask application that serves webpages
---
Written by Yangwook Ryoo, 2017
MIT License: see LICENSE at root directory
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def route_root():
return 'Hello, World!'
| # -*- coding: utf-8 -*-
"""
app.py
flask application that serves webpages
---
Written by Yangwook Ryoo, 2017
MIT License: see LICENSE at root directory
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def route_root():
return 'Hello, WWWWWWWWorld!'
| mit | Python |
e33307999d804cb65c60459b74d76a10511f1e63 | Fix an indent in Day 14 | icydoge/AdventOfCodeSolutions | day14.py | day14.py | #Advent of Code December 14
#Written by icydoge - icydoge AT gmail dot com
def reindeer_fly(reindeer, time):
#Give the distance the reindeer covered until that time
cycles = time / (reindeer[2] + reindeer[3])
remainder = time % (reindeer[2] + reindeer[3])
distance = cycles * reindeer[1] * reindeer[2] #Distance travelled in full fly-break cycles
#Distance travelled in the remainder time
if remainder <= reindeer[2]:
distance += remainder * reindeer[1]
else:
distance += reindeer[2] * reindeer[1]
return distance
def look_up_reindeer(name, reindeers):
#Return the index for the reindeer with that name
for reindeer in reindeers:
if reindeer[0] == name:
return reindeers.index(reindeer)
with open('reindeer.txt') as f:
content = f.read().splitlines()
reindeers = []
for reindeer in content:
line = reindeer.split(' ')
reindeers.append([line[0], int(line[3]), int(line[6]), int(line[13])])
#Part One
fly_time = 2503
best_distance = -1
for reindeer in reindeers:
if reindeer_fly(reindeer, fly_time) > best_distance:
best_distance = reindeer_fly(reindeer, fly_time)
part_one_answer = best_distance
print "Distance the winning reindeer travelled (Part One):", part_one_answer
#Part Two
for reindeer in reindeers:
reindeer.append(0) #points
for t in range(1, fly_time+1):
best_distance = -1
best_reindeers = []
for reindeer in reindeers:
flying = reindeer_fly(reindeer, t)
if flying > best_distance: #Only one reindeer in the lead
best_distance = flying
best_reindeers = [reindeer[0]]
elif flying == best_distance: #multiple reindeers in the lead
best_reindeers.append(reindeer[0])
for i in best_reindeers:
reindeers[look_up_reindeer(i,reindeers)][4] += 1 #Add point to reindeer(s) in the lead.
#Find the reindeer with the highest point
highest_point = -1
for reindeer in reindeers:
if reindeer[4] > highest_point:
highest_point = reindeer[4]
part_two_answer = highest_point
print "Points of the winning reindeer (Part Two):", part_two_answer
| #Advent of Code December 14
#Written by icydoge - icydoge AT gmail dot com
def reindeer_fly(reindeer, time):
#Give the distance the reindeer covered until that time
cycles = time / (reindeer[2] + reindeer[3])
remainder = time % (reindeer[2] + reindeer[3])
distance = cycles * reindeer[1] * reindeer[2] #Distance travelled in full fly-break cycles
#Distance travelled in the remainder time
if remainder <= reindeer[2]:
distance += remainder * reindeer[1]
else:
distance += reindeer[2] * reindeer[1]
return distance
def look_up_reindeer(name, reindeers):
#Return the index for the reindeer with that name
for reindeer in reindeers:
if reindeer[0] == name:
return reindeers.index(reindeer)
with open('reindeer.txt') as f:
content = f.read().splitlines()
reindeers = []
for reindeer in content:
line = reindeer.split(' ')
reindeers.append([line[0], int(line[3]), int(line[6]), int(line[13])])
#Part One
fly_time = 2503
best_distance = -1
for reindeer in reindeers:
if reindeer_fly(reindeer, fly_time) > best_distance:
best_distance = reindeer_fly(reindeer, fly_time)
part_one_answer = best_distance
print "Distance the winning reindeer travelled (Part One):", part_one_answer
#Part Two
for reindeer in reindeers:
reindeer.append(0) #points
for t in range(1, fly_time+1):
best_distance = -1
best_reindeers = []
for reindeer in reindeers:
flying = reindeer_fly(reindeer, t)
if flying > best_distance: #Only one reindeer in the lead
best_distance = flying
best_reindeers = [reindeer[0]]
elif flying == best_distance: #multiple reindeers in the lead
best_reindeers.append(reindeer[0])
for i in best_reindeers:
reindeers[look_up_reindeer(i,reindeers)][4] += 1 #Add point to reindeer(s) in the lead.
#Find the reindeer with the highest point
highest_point = -1
for reindeer in reindeers:
if reindeer[4] > highest_point:
highest_point = reindeer[4]
part_two_answer = highest_point
print "Points of the winning reindeer (Part Two):", part_two_answer
| mit | Python |
f7090bfc325f7f343b0dfdf29c943d33a61be092 | Adjust setup file to include library | CiscoPSIRT/openVulnAPI,CiscoPSIRT/openVulnAPI,CiscoPSIRT/openVulnAPI,CiscoPSIRT/openVulnAPI,CiscoPSIRT/openVulnAPI | openVulnQuery/setup.py | openVulnQuery/setup.py | from setuptools import setup, find_packages
setup(name='OpenVulnQuery',
version='1.25',
description='A python-based module(s) to query the Cisco PSIRT openVuln API.',
url='https://github.com/CiscoPSIRT/openVulnAPI/tree/master/openVulnQuery',
author='Bradley Korabik, Parash Ghimire',
author_email='bkorabik@cisco.com, pghimire@cisco.com',
license='The MIT License (MIT)',
packages=find_packages(exclude=["tests"]),
entry_points={
'console_scripts':
['openVulnQuery=openVulnQuery.main:main']
},
install_requires=[
'argparse>=1.4.0',
'requests>=2.10.0'
],
zip_safe=False,)
| from setuptools import setup
setup(name='OpenVulnQuery',
version='1.25',
description='A python-based module(s) to query the Cisco PSIRT openVuln API.',
url='https://github.com/CiscoPSIRT/openVulnAPI/tree/master/openVulnQuery',
author='Bradley Korabik, Parash Ghimire',
author_email='bkorabik@cisco.com, pghimire@cisco.com',
license='The MIT License (MIT)',
packages=['openVulnQuery'],
entry_points={
'console_scripts':
['openVulnQuery=openVulnQuery.main:main']
},
install_requires=[
'argparse>=1.4.0',
'requests>=2.10.0'
],
zip_safe=False,)
| mit | Python |
4bd3f2167b342a1d4a31d85923a07bc0cd2e203e | Update setup.py | CiscoPSIRT/openVulnAPI,CiscoPSIRT/openVulnAPI,CiscoPSIRT/openVulnAPI,CiscoPSIRT/openVulnAPI,CiscoPSIRT/openVulnAPI | openVulnQuery/setup.py | openVulnQuery/setup.py | from setuptools import setup, find_packages
setup(name='OpenVulnQuery',
version='1.26',
description='A python-based module(s) to query the Cisco PSIRT openVuln API.',
url='https://github.com/CiscoPSIRT/openVulnAPI/tree/master/openVulnQuery',
author='Bradley Korabik, Parash Ghimire, Omar Santos',
author_email='bkorabik@cisco.com, pghimire@cisco.com, os@cisco.com',
license='The MIT License (MIT)',
packages=find_packages(exclude=["tests"]),
entry_points={
'console_scripts':
['openVulnQuery=openVulnQuery.main:main']
},
install_requires=[
'argparse>=1.4.0',
'requests>=2.10.0'
],
zip_safe=False,)
| from setuptools import setup, find_packages
setup(name='OpenVulnQuery',
version='1.25',
description='A python-based module(s) to query the Cisco PSIRT openVuln API.',
url='https://github.com/CiscoPSIRT/openVulnAPI/tree/master/openVulnQuery',
author='Bradley Korabik, Parash Ghimire',
author_email='bkorabik@cisco.com, pghimire@cisco.com',
license='The MIT License (MIT)',
packages=find_packages(exclude=["tests"]),
entry_points={
'console_scripts':
['openVulnQuery=openVulnQuery.main:main']
},
install_requires=[
'argparse>=1.4.0',
'requests>=2.10.0'
],
zip_safe=False,)
| mit | Python |
e5c92700533ea021807971a4b2276d405e621160 | make UserUtilityModel extract_features pass | yw374cornell/e-mission-server,e-mission/e-mission-server,joshzarrabi/e-mission-server,yw374cornell/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,sunil07t/e-mission-server,joshzarrabi/e-mission-server,sunil07t/e-mission-server,joshzarrabi/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,joshzarrabi/e-mission-server,e-mission/e-mission-server | CFC_DataCollector/recommender/user_utility_model.py | CFC_DataCollector/recommender/user_utility_model.py | # Phase 1: Build a model for User Utility Function (per Vij, Shankari)
# First, for each trip, we must obtain alternatives through some method
# (currently Google Maps API), alongside the actual trips which were taken.
# Once we have these alternatives, we can extract the features from each of the
# possible trips. Now, we utilize logistic regression with either the simple
# model given by Vij and Shankari, or utilizing their more complicated method.
# At the end, we want user-specific weights for their trip utility function.
# Phase 2: Choose optimal future trips
# Once we have determined a utility function for the user, we can evaluate
# future trips through this metric. This should look similar to our Phase 1,
# where we can obtain a set of alternative routes, and then choose an optimal
# trip based on the user's utility function. However, the key difference is that
# these trips are no longer static--in Phase 1, we can augment each trip with a
# set of alternative routes. In Phase 2, this learning is time-dependent (think traffic).
from get_database import get_utility_model_db
from sklearn import linear_model as lm
class UserUtilityModel(object):
# return user-specific weights for a given user based on logistic regression on
# their past trips and potential alternatives
def __init__(self, user_id, trips, alternatives): # assuming alternatives a list of lists
# TODO: Using list() here removes the performance benefits of an iterator.
# Consider removing/relaxing the assert
print len(list(trips)), len(alternatives)
assert(len(list(trips)) == len(alternatives))
self.user_id = user_id
self.regression = lm.LogisticRegression()
self.update(trips, alternatives)
# update existing model using existing trips
# for now, just create a new model and return it
def update(self, trips = [], alternatives = []):
assert(len(list(trips)) == len(alternatives))
for i in range(len(alternatives)):
trip_features = self.extract_features(trips[i])
alt_features = [self.extract_features(alt) for alt in alternatives[i]]
target_vector = [1] + ([0] * len(alternatives))
print trip_features, alt_features
# TODO: ValueError: X and y have incompatible shapes.
# X has 1 samples, but y has 23.
# self.regression.fit(trip_features + alt_features, target_vector)
# TODO: AttributeError: 'LogisticRegression' object has no attribute 'coef_'
# self.coefficients = self.regression.coef_
# calculate the utility of trip using the model
def predict_utility(self, trip):
trip_features = extract_features(trip)
utility = sum(f * c for f, c in zip(trip_features, self.coefficients))
return utility
# find model params from DB and construct a model using these params
@staticmethod
def find_from_db(user_id):
db_model = get_utility_model_db().find_one({'user_id': user_id})
# contruct and return model using params from DB
# store the object with the correct extracted features in the database
# must be filled out in subclass
def store_in_db(self):
pass
# return an array of feature values for the given trip
# must be filled out in subclass
def extract_features(self, trip):
pass
| # Phase 1: Build a model for User Utility Function (per Vij, Shankari)
# First, for each trip, we must obtain alternatives through some method
# (currently Google Maps API), alongside the actual trips which were taken.
# Once we have these alternatives, we can extract the features from each of the
# possible trips. Now, we utilize logistic regression with either the simple
# model given by Vij and Shankari, or utilizing their more complicated method.
# At the end, we want user-specific weights for their trip utility function.
# Phase 2: Choose optimal future trips
# Once we have determined a utility function for the user, we can evaluate
# future trips through this metric. This should look similar to our Phase 1,
# where we can obtain a set of alternative routes, and then choose an optimal
# trip based on the user's utility function. However, the key difference is that
# these trips are no longer static--in Phase 1, we can augment each trip with a
# set of alternative routes. In Phase 2, this learning is time-dependent (think traffic).
from get_database import get_utility_model_db
from sklearn import linear_model as lm
class UserUtilityModel(object):
# return user-specific weights for a given user based on logistic regression on
# their past trips and potential alternatives
def __init__(self, user_id, trips, alternatives): # assuming alternatives a list of lists
# TODO: Using list() here removes the performance benefits of an iterator.
# Consider removing/relaxing the assert
print len(list(trips)), len(alternatives)
assert(len(list(trips)) == len(alternatives))
self.user_id = user_id
self.regression = lm.LogisticRegression()
self.update(trips, alternatives)
# update existing model using existing trips
# for now, just create a new model and return it
def update(self, trips = [], alternatives = []):
assert(len(list(trips)) == len(alternatives))
for i in range(len(alternatives)):
trip_features = self.extract_features(trips[i])
alt_features = [self.extract_features(alt) for alt in alternatives[i]]
target_vector = [1] + ([0] * len(alternatives))
print trip_features, alt_features
# TODO: ValueError: X and y have incompatible shapes.
# X has 1 samples, but y has 23.
# self.regression.fit(trip_features + alt_features, target_vector)
# TODO: AttributeError: 'LogisticRegression' object has no attribute 'coef_'
# self.coefficients = self.regression.coef_
# calculate the utility of trip using the model
def predict_utility(self, trip):
trip_features = extract_features(trip)
utility = sum(f * c for f, c in zip(trip_features, self.coefficients))
return utility
# find model params from DB and construct a model using these params
@staticmethod
def find_from_db(user_id):
db_model = get_utility_model_db().find_one({'user_id': user_id})
# contruct and return model using params from DB
# store the object with the correct extracted features in the database
# must be filled out in subclass
def store_in_db(self):
pass
# return an array of feature values for the given trip
# must be filled out in subclass
def extract_features(self, trip):
# TODO: Change the pipeline test to use an concrete subclass instead of the
# abstract superclass so that we can go back to passing here
# pass
return list(range(5))
| bsd-3-clause | Python |
dacad103e6f02f1953c6d5a9837e347793d1c52a | Fix cache folder issue. | ggordan/GutterColor,ggordan/GutterColor | gutter_color.py | gutter_color.py | from .file import File
from sublime_plugin import EventListener
from sublime import load_settings
def plugin_loaded():
"""
If the folder exists, and has more than 5MB of icons in the cache, delete
it to clear all the icons then recreate it.
"""
from os.path import getsize, join, isfile, exists
from os import makedirs, listdir
from sublime import cache_path
from shutil import rmtree
# The icon cache path
icon_path = join(cache_path(), "GutterColor")
# The maximum amount of space to take up
limit = 5242880 # 5 MB
if exists(icon_path):
size = sum(getsize(join(icon_path, f)) for f in listdir(icon_path) if isfile(join(icon_path, f)))
if size > limit: rmtree(icon_path)
if not exists(icon_path): makedirs(icon_path)
class GutterColorEventListener(EventListener):
"""Scan the view when it gains focus, and when it is saved."""
def on_activated_async(self, view):
"""Scan file when it gets focus"""
if syntax(view) in settings().get('supported_syntax'):
File(view)
def on_modified(self, view):
"""Scan file when it is modified"""
if syntax(view) in settings().get('supported_syntax'):
File(view, 'update')
def on_pre_save_async(self, view):
"""Scan file before it is saved"""
if syntax(view) in settings().get('supported_syntax'):
File(view, 'update')
def settings():
"""Shortcut to the settings"""
return load_settings("GutterColor.sublime-settings")
def syntax(view):
"""Return the view syntax"""
return view.settings().get('syntax').split('/')[-1].split('.')[0].lower()
| from .file import File
from sublime_plugin import EventListener
from sublime import load_settings
def plugin_loaded():
"""
If the folder exists, and has more than 5MB of icons in the cache, delete
it to clear all the icons then recreate it.
"""
from os.path import getsize, join, isfile, exists
from os import makedirs, listdir
from sublime import cache_path
from shutil import rmtree
# The icon cache path
icon_path = join(cache_path(), "GutterColor")
# The maximum amount of space to take up
limit = 5242880 # 5 MB
# Get the size of the cache folder
size = sum(getsize(join(icon_path, f)) for f in listdir(icon_path) if isfile(join(icon_path, f)))
if size > limit: rmtree(icon_path)
if not exists(icon_path): makedirs(icon_path)
class GutterColorEventListener(EventListener):
"""Scan the view when it gains focus, and when it is saved."""
def on_activated_async(self, view):
"""Scan file when it gets focus"""
if syntax(view) in settings().get('supported_syntax'):
File(view)
def on_modified(self, view):
"""Scan file when it is modified"""
if syntax(view) in settings().get('supported_syntax'):
File(view, 'update')
def on_pre_save_async(self, view):
"""Scan file before it is saved"""
if syntax(view) in settings().get('supported_syntax'):
File(view, 'update')
def settings():
"""Shortcut to the settings"""
return load_settings("GutterColor.sublime-settings")
def syntax(view):
"""Return the view syntax"""
return view.settings().get('syntax').split('/')[-1].split('.')[0].lower()
| mit | Python |
b040c3783a179618d1e87d52df6d5799f782d5b1 | Bump version to 0.9.0+dev | python-hyper/h11 | h11/_version.py | h11/_version.py | # This file must be kept very simple, because it is consumed from several
# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc.
# We use a simple scheme:
# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev
# where the +dev versions are never released into the wild, they're just what
# we stick into the VCS in between releases.
#
# This is compatible with PEP 440:
# http://legacy.python.org/dev/peps/pep-0440/
# via the use of the "local suffix" "+dev", which is disallowed on index
# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we
# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before*
# 1.0.0.)
__version__ = "0.9.0+dev"
| # This file must be kept very simple, because it is consumed from several
# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc.
# We use a simple scheme:
# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev
# where the +dev versions are never released into the wild, they're just what
# we stick into the VCS in between releases.
#
# This is compatible with PEP 440:
# http://legacy.python.org/dev/peps/pep-0440/
# via the use of the "local suffix" "+dev", which is disallowed on index
# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we
# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before*
# 1.0.0.)
__version__ = "0.9.0"
| mit | Python |
fc42655e356c5f74fbfb86b28e59f6ecb4b601fb | fix None problem different version of 3.0 | osmanbaskaya/semeval14-task3,osmanbaskaya/semeval14-task3 | run/wn-baseline.py | run/wn-baseline.py | #! /usr/bin/python
# -*- coding: utf-8 -*-
__author__ = "Osman Baskaya"
"""
"""
import sys
from nltk.corpus import wordnet as wn
from itertools import product
import task3_utils
import numpy as np
from wn_utils import get_synsets_for_sents_tuple
from collections import defaultdict as dd
import nltk
test_f = sys.stdin
sentences = task3_utils.get_sentences(test_f)
metric_name = sys.argv[1]
if metric_name == "lch":
metric = wn.lch_similarity
elif metric_name == "lin":
metric = wn.lin_similarity
elif metric_name == "jcn":
metric = wn.jcn_similarity
elif metric_name == "path":
metric = wn.path_similarity
elif metric_name == "wup":
metric = wn.wup_similarity
elif metric_name == "res":
metric = wn.res_similarity
else:
sys.stderr.write("No such similarity metric in WN module.\n")
exit(-1)
sys.stderr.write("Metric: {0}\n".format(metric.func_name))
def score1(closest):
score = 0
dists = [dist for t1, (t2, (dist, s1, s2)) in closest if dist is not None]
if len(closest) != 0:
score = sum(dists) / float(len(dists))
return score
syns_sents = get_synsets_for_sents_tuple(sentences)
unav = set(['a', 's', 'r'])
IC = nltk.corpus.wordnet_ic.ic('ic-brown.dat')
scores = []
for i, (s1, s2) in enumerate(syns_sents):
#print sentences[i][0]
#print sentences[i][1]
longer, shorter = (s1, s2) if len(s1) >= len(s2) else (s2, s1)
d = dd(dict)
for syns1, syns2 in product(shorter, longer):
#print syns1, syns2
for syn1, syn2 in product(syns1, syns2):
p1, p2 = syn1.pos, syn2.pos
if p1 == p2 and p1 not in unav and p2 not in unav:
sim = metric(syn1, syn2, IC)
d[syn1.offset][syn2.offset] = (sim, syn1, syn2)
#FIXME bu hatali olabilir
closest = [(t1, max(d[t1].iteritems(), key=lambda t: t[1][0])) for t1 in d]
score = score1(closest)
scores.append(score)
print >> sys.stderr, "{0} line processed".format(i+1)
#sys.stderr.write("{}/{} (miss/attempt).\n".format(miss, comparison))
scores = np.array(scores)
print '\n'.join(scores.astype(str))
| #! /usr/bin/python
# -*- coding: utf-8 -*-
__author__ = "Osman Baskaya"
"""
"""
import sys
from nltk.corpus import wordnet as wn
from itertools import product
import task3_utils
import numpy as np
from wn_utils import get_synsets_for_sents_tuple
from collections import defaultdict as dd
import nltk
test_f = sys.stdin
sentences = task3_utils.get_sentences(test_f)
metric_name = sys.argv[1]
if metric_name == "lch":
metric = wn.lch_similarity
elif metric_name == "lin":
metric = wn.lin_similarity
elif metric_name == "jcn":
metric = wn.jcn_similarity
elif metric_name == "path":
metric = wn.path_similarity
elif metric_name == "wup":
metric = wn.wup_similarity
elif metric_name == "res":
metric = wn.res_similarity
else:
sys.stderr.write("No such similarity metric in WN module.\n")
exit(-1)
sys.stderr.write("Metric: {0}\n".format(metric.func_name))
def score1(closest):
score = 0
dists = [dist for t1, (t2, (dist, s1, s2)) in closest]
if len(closest) != 0:
score = sum(dists) / float(len(dists))
return score
syns_sents = get_synsets_for_sents_tuple(sentences)
unav = set(['a', 's', 'r'])
IC = nltk.corpus.wordnet_ic.ic('ic-brown.dat')
scores = []
for i, (s1, s2) in enumerate(syns_sents):
#print sentences[i][0]
#print sentences[i][1]
longer, shorter = (s1, s2) if len(s1) >= len(s2) else (s2, s1)
d = dd(dict)
for syns1, syns2 in product(shorter, longer):
#print syns1, syns2
for syn1, syn2 in product(syns1, syns2):
p1, p2 = syn1.pos, syn2.pos
if p1 == p2 and p1 not in unav and p2 not in unav:
sim = metric(syn1, syn2, IC)
d[syn1.offset][syn2.offset] = (sim, syn1, syn2)
#FIXME bu hatali olabilir
closest = [(t1, max(d[t1].iteritems(), key=lambda t: t[1][0])) for t1 in d]
score = score1(closest)
scores.append(score)
print >> sys.stderr, "{0} line processed".format(i+1)
#sys.stderr.write("{}/{} (miss/attempt).\n".format(miss, comparison))
scores = np.array(scores)
print '\n'.join(scores.astype(str))
| mit | Python |
70a251ba27641e3c0425c659bb900e17f0f423dd | Enable initial user via service so that an event gets written | homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps | scripts/create_initial_admin_user.py | scripts/create_initial_admin_user.py | #!/usr/bin/env python
"""Create an initial user with admin privileges to begin BYCEPS setup.
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.services.user import creation_service as user_creation_service
from byceps.services.user import service as user_service
from byceps.util.system import get_config_filename_from_env_or_exit
from _util import app_context
@click.command()
@click.option('--screen_name', prompt=True)
@click.option('--email_address', prompt=True)
@click.option('--password', prompt=True, hide_input=True)
def execute(screen_name, email_address, password):
click.echo('Creating user "{}" ... '.format(screen_name), nl=False)
user = _create_user(screen_name, email_address, password)
user_service.enable_user(user.id, user.id)
click.secho('done.', fg='green')
def _create_user(screen_name, email_address, password):
try:
return user_creation_service \
.create_basic_user(screen_name, email_address, password)
except ValueError as e:
raise click.UsageError(e)
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
| #!/usr/bin/env python
"""Create an initial user with admin privileges to begin BYCEPS setup.
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.database import db
from byceps.services.user import creation_service as user_creation_service
from byceps.util.system import get_config_filename_from_env_or_exit
from _util import app_context
@click.command()
@click.option('--screen_name', prompt=True)
@click.option('--email_address', prompt=True)
@click.option('--password', prompt=True, hide_input=True)
def execute(screen_name, email_address, password):
click.echo('Creating user "{}" ... '.format(screen_name), nl=False)
user = _create_user(screen_name, email_address, password)
click.secho('done.', fg='green')
def _create_user(screen_name, email_address, password):
try:
user = user_creation_service \
.create_basic_user(screen_name, email_address, password)
except ValueError as e:
raise click.UsageError(e)
user.enabled = True
db.session.add(user)
db.session.commit()
return user
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
| bsd-3-clause | Python |
e524ea3db737ee92bb3ba486240dd60928781eaf | Fix tests. | iphoting/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,iphoting/healthchecks,healthchecks/healthchecks,iphoting/healthchecks,iphoting/healthchecks | hc/front/tests/test_add_pd.py | hc/front/tests/test_add_pd.py | from hc.api.models import Channel
from hc.test import BaseTestCase
class AddPdTestCase(BaseTestCase):
url = "/integrations/add_pd/"
def test_instructions_work(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "incident management system")
def test_it_works(self):
# Integration key is 32 characters long
form = {"value": "12345678901234567890123456789012"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, "/integrations/")
c = Channel.objects.get()
self.assertEqual(c.kind, "pd")
self.assertEqual(c.value, "12345678901234567890123456789012")
def test_it_trims_whitespace(self):
form = {"value": " 123456 "}
self.client.login(username="alice@example.org", password="password")
self.client.post(self.url, form)
c = Channel.objects.get()
self.assertEqual(c.value, "123456")
| from hc.api.models import Channel
from hc.test import BaseTestCase
class AddPdTestCase(BaseTestCase):
url = "/integrations/add_pd/"
def test_instructions_work(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "incident management system")
def test_it_works(self):
# Integration key is 32 characters long
form = {"value": "12345678901234567890123456789012"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, "/integrations/")
c = Channel.objects.get()
self.assertEqual(c.kind, "pd")
self.assertEqual(c.value, "123456")
def test_it_trims_whitespace(self):
form = {"value": " 123456 "}
self.client.login(username="alice@example.org", password="password")
self.client.post(self.url, form)
c = Channel.objects.get()
self.assertEqual(c.value, "123456")
| bsd-3-clause | Python |
65ae8fc33a1fa7297d3e68f7c67ca5c2678e81b7 | Set up Flask-User to provide user auth | interactomix/iis,interactomix/iis | app/__init__.py | app/__init__.py | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Configure user model for Flask-User
from app.models import User
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from app import views, models
| from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
from app import views, models
| agpl-3.0 | Python |
d0a6183b31b417b0ff11a1f74b1480b24fb558bb | Change encoding | openxc/openxc-python,openxc/openxc-python,openxc/openxc-python | openxc/formats/json.py | openxc/formats/json.py | """JSON formatting utilities."""
import json
from openxc.formats.base import VehicleMessageStreamer
class JsonStreamer(VehicleMessageStreamer):
SERIALIZED_COMMAND_TERMINATOR = b"\x00"
def parse_next_message(self):
parsed_message = None
remainder = self.message_buffer
message = ""
if self.SERIALIZED_COMMAND_TERMINATOR in self.message_buffer:
message, _, remainder = self.message_buffer.partition(
self.SERIALIZED_COMMAND_TERMINATOR)
try:
parsed_message = JsonFormatter.deserialize(message)
if not isinstance(parsed_message, dict):
raise ValueError()
except ValueError:
pass
self.message_buffer = remainder
return parsed_message
def serialize_for_stream(self, message):
return JsonFormatter.serialize(
message) + self.SERIALIZED_COMMAND_TERMINATOR
class JsonFormatter(object):
@classmethod
def deserialize(cls, message):
return json.loads(message)
@classmethod
def serialize(cls, data):
return json.dumps(data).encode("utf8")
@classmethod
def _validate(cls, message):
"""Confirm the validitiy of a given dict as an OpenXC message.
Returns:
``True`` if the message contains at least a ``name`` and ``value``.
"""
valid = False
if(('name' in message and 'value' in message) or
('id' in message and 'data' in message)):
valid = True
return valid
| """JSON formatting utilities."""
import json
from openxc.formats.base import VehicleMessageStreamer
class JsonStreamer(VehicleMessageStreamer):
SERIALIZED_COMMAND_TERMINATOR = b"\x00"
def parse_next_message(self):
parsed_message = None
remainder = self.message_buffer
message = ""
if self.SERIALIZED_COMMAND_TERMINATOR in self.message_buffer:
message, _, remainder = self.message_buffer.partition(
self.SERIALIZED_COMMAND_TERMINATOR)
try:
parsed_message = JsonFormatter.deserialize(message)
if not isinstance(parsed_message, dict):
raise ValueError()
except ValueError:
pass
self.message_buffer = remainder
return parsed_message
def serialize_for_stream(self, message):
return JsonFormatter.serialize(
message) + self.SERIALIZED_COMMAND_TERMINATOR
class JsonFormatter(object):
@classmethod
def deserialize(cls, message):
return json.loads(message.decode("utf8"))
@classmethod
def serialize(cls, data):
return json.dumps(data).encode("utf8")
@classmethod
def _validate(cls, message):
"""Confirm the validitiy of a given dict as an OpenXC message.
Returns:
``True`` if the message contains at least a ``name`` and ``value``.
"""
valid = False
if(('name' in message and 'value' in message) or
('id' in message and 'data' in message)):
valid = True
return valid
| bsd-3-clause | Python |
1e8d5cd1fc76527c650d2e47794ef3af3992dea7 | Fix broken test 😑 | UrLab/DocHub,UrLab/DocHub,UrLab/DocHub,UrLab/beta402,UrLab/beta402,UrLab/beta402,UrLab/DocHub | users/tests/auth_backend_authenticate_test.py | users/tests/auth_backend_authenticate_test.py | from django.test.client import RequestFactory
import pytest
import responses
from users.authBackend import NetidBackend
from users.models import User
pytestmark = pytest.mark.django_db
@responses.activate
def test_auth():
sid = "this-is-a-sid"
uid = "this-is-a-uid-and-is-longer"
xml = open("users/tests/xml-fixtures/nimarcha.xml").read()
responses.add(
responses.GET,
f"https://www.ulb.ac.be/commons/check?_type=normal&_sid={sid}&_uid={uid}",
body=xml,
status=200,
match_querystring=True,
)
rf = RequestFactory()
user = NetidBackend().authenticate(rf.get("/does-not-matter"), sid=sid, uid=uid)
assert len(responses.calls) == 1
assert isinstance(user, User)
assert user.netid == "nimarcha"
assert User.objects.filter(netid="nimarcha").count() == 1
assert open(f"/tmp/netids/{sid}__{uid}").read() == xml
assert user.inscription_set.count() == 4
| import pytest
import responses
from users.authBackend import NetidBackend
from users.models import User
pytestmark = pytest.mark.django_db
@responses.activate
def test_auth():
sid = "this-is-a-sid"
uid = 'this-is-a-uid-and-is-longer'
xml = open("users/tests/xml-fixtures/nimarcha.xml").read()
responses.add(
responses.GET,
f'https://www.ulb.ac.be/commons/check?_type=normal&_sid={sid}&_uid={uid}',
body=xml, status=200,
match_querystring=True
)
user = NetidBackend().authenticate(sid=sid, uid=uid)
assert len(responses.calls) == 1
assert isinstance(user, User)
assert user.netid == 'nimarcha'
assert User.objects.filter(netid='nimarcha').count() == 1
assert open(f"/tmp/netids/{sid}__{uid}").read() == xml
assert user.inscription_set.count() == 4
| agpl-3.0 | Python |
32f409756af68e4500c1de310c24c2636366e133 | Remove from flask_alembic | plenario/plenario,plenario/plenario,plenario/plenario | app/__init__.py | app/__init__.py | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
alembic = Alembic()
alembic.init_app(app)
from app import views, models | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
alembic = Alembic()
alembic.init_app(app)
from app import views, models | mit | Python |
b53bbce8cfffbcf926f5b9951cb89be9cdb8b276 | update debugger | miketwo/euler | debug.py | debug.py | # Useful debug
from time import time
START = 0
FINISH = 0
ACCUMULATOR = 0
DELTA = 0
def start():
global START, DELTA
START = time()
DELTA = START
def finish():
t = time()
if ACCUMULATOR != 0:
print "Acumulated time: {}".format(ACCUMULATOR)
else:
global FINISH
FINISH = t
print "Total time: {}".format(FINISH - START)
def accumulate():
global ACCUMULATOR, DELTA
t = time()
ACCUMULATOR += t - DELTA
DELTA = t
| # Useful debug
from time import time
START = 0
FINISH = 0
def start():
global START
START = time()
def finish():
global FINISH
FINISH = time()
print FINISH - START
| mit | Python |
d2e82419a8f1b7ead32a43e6a03ebe8093374840 | Set slug field readonly after channel create | williamroot/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,opps/opps,williamroot/opps,opps/opps,YACOWS/opps,opps/opps,opps/opps,YACOWS/opps | opps/channels/forms.py | opps/channels/forms.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Channel
class ChannelAdminForm(forms.ModelForm):
layout = forms.ChoiceField(choices=(('default', _('Default')),))
def __init__(self, *args, **kwargs):
super(ChannelAdminForm, self).__init__(*args, **kwargs)
instance = getattr(self, 'instance', None)
if instance and instance.pk:
self.fields['slug'].widget.attrs['readonly'] = True
class Meta:
model = Channel
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Channel
class ChannelAdminForm(forms.ModelForm):
layout = forms.ChoiceField(choices=(('default', _('Default')),))
class Meta:
model = Channel
| mit | Python |
c9284827eeec90a253157286214bc1d17771db24 | Remove skip of service-type management API test | NeCTAR-RC/neutron,apporc/neutron,takeshineshiro/neutron,mmnelemane/neutron,barnsnake351/neutron,glove747/liberty-neutron,sasukeh/neutron,SamYaple/neutron,dhanunjaya/neutron,swdream/neutron,noironetworks/neutron,bgxavier/neutron,chitr/neutron,eonpatapon/neutron,glove747/liberty-neutron,paninetworks/neutron,antonioUnina/neutron,wenhuizhang/neutron,klmitch/neutron,wolverineav/neutron,suneeth51/neutron,eayunstack/neutron,igor-toga/local-snat,shahbazn/neutron,jerryz1982/neutron,cloudbase/neutron,bigswitch/neutron,vivekanand1101/neutron,wolverineav/neutron,jumpojoy/neutron,JianyuWang/neutron,cisco-openstack/neutron,paninetworks/neutron,openstack/neutron,watonyweng/neutron,bigswitch/neutron,skyddv/neutron,mattt416/neutron,dims/neutron,neoareslinux/neutron,JianyuWang/neutron,huntxu/neutron,skyddv/neutron,yanheven/neutron,adelina-t/neutron,cisco-openstack/neutron,eonpatapon/neutron,SmartInfrastructures/neutron,igor-toga/local-snat,apporc/neutron,mandeepdhami/neutron,antonioUnina/neutron,SmartInfrastructures/neutron,sebrandon1/neutron,bgxavier/neutron,MaximNevrov/neutron,chitr/neutron,SamYaple/neutron,mahak/neutron,jumpojoy/neutron,shahbazn/neutron,asgard-lab/neutron,jacknjzhou/neutron,asgard-lab/neutron,mattt416/neutron,huntxu/neutron,takeshineshiro/neutron,silenci/neutron,JioCloud/neutron,mandeepdhami/neutron,javaos74/neutron,noironetworks/neutron,MaximNevrov/neutron,jerryz1982/neutron,adelina-t/neutron,swdream/neutron,silenci/neutron,barnsnake351/neutron,JioCloud/neutron,mahak/neutron,openstack/neutron,wenhuizhang/neutron,yanheven/neutron,dhanunjaya/neutron,eayunstack/neutron,mmnelemane/neutron,cloudbase/neutron,suneeth51/neutron,sasukeh/neutron,NeCTAR-RC/neutron,klmitch/neutron,vivekanand1101/neutron,jacknjzhou/neutron,watonyweng/neutron,mahak/neutron,sebrandon1/neutron,openstack/neutron,javaos74/neutron,neoareslinux/neutron,dims/neutron | neutron/tests/api/test_service_type_management.py | neutron/tests/api/test_service_type_management.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.tests.api import base
from neutron.tests.tempest import test
class ServiceTypeManagementTest(base.BaseNetworkTest):
@classmethod
def resource_setup(cls):
super(ServiceTypeManagementTest, cls).resource_setup()
if not test.is_extension_enabled('service-type', 'network'):
msg = "Neutron Service Type Management not enabled."
raise cls.skipException(msg)
@test.attr(type='smoke')
@test.idempotent_id('2cbbeea9-f010-40f6-8df5-4eaa0c918ea6')
def test_service_provider_list(self):
body = self.client.list_service_providers()
self.assertIsInstance(body['service_providers'], list)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib import decorators
from neutron.tests.api import base
from neutron.tests.tempest import test
class ServiceTypeManagementTestJSON(base.BaseNetworkTest):
@classmethod
def resource_setup(cls):
super(ServiceTypeManagementTestJSON, cls).resource_setup()
if not test.is_extension_enabled('service-type', 'network'):
msg = "Neutron Service Type Management not enabled."
raise cls.skipException(msg)
@decorators.skip_because(bug="1400370")
@test.attr(type='smoke')
@test.idempotent_id('2cbbeea9-f010-40f6-8df5-4eaa0c918ea6')
def test_service_provider_list(self):
body = self.client.list_service_providers()
self.assertIsInstance(body['service_providers'], list)
| apache-2.0 | Python |
c75a244247988dbce68aa7985241712d8c94a24a | Fix how we set 'build_dir' and 'install_dir' options from 'install' options -- irrelevant because this file is about to go away, but oh well. | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/distutils/command/install_ext.py | Lib/distutils/command/install_ext.py | """install_ext
Implement the Distutils "install_ext" command to install extension modules."""
# created 1999/09/12, Greg Ward
__revision__ = "$Id$"
from distutils.core import Command
from distutils.util import copy_tree
class install_ext (Command):
description = "install C/C++ extension modules"
user_options = [
('install-dir=', 'd', "directory to install to"),
('build-dir=','b', "build directory (where to install from)"),
]
def initialize_options (self):
# let the 'install' command dictate our installation directory
self.install_dir = None
self.build_dir = None
def finalize_options (self):
self.set_undefined_options ('install',
('build_lib', 'build_dir'),
('install_lib', 'install_dir'))
def run (self):
# Make sure we have built all extension modules first
self.run_peer ('build_ext')
# Dump the entire "build/platlib" directory (or whatever it really
# is; "build/platlib" is the default) to the installation target
# (eg. "/usr/local/lib/python1.5/site-packages"). Note that
# putting files in the right package dir is already done when we
# build.
outfiles = self.copy_tree (self.build_dir, self.install_dir)
# class InstallExt
| """install_ext
Implement the Distutils "install_ext" command to install extension modules."""
# created 1999/09/12, Greg Ward
__revision__ = "$Id$"
from distutils.core import Command
from distutils.util import copy_tree
class install_ext (Command):
description = "install C/C++ extension modules"
user_options = [
('install-dir=', 'd', "directory to install to"),
('build-dir=','b', "build directory (where to install from)"),
]
def initialize_options (self):
# let the 'install' command dictate our installation directory
self.install_dir = None
self.build_dir = None
def finalize_options (self):
self.set_undefined_options ('install',
('build_platlib', 'build_dir'),
('install_platlib', 'install_dir'))
def run (self):
# Make sure we have built all extension modules first
self.run_peer ('build_ext')
# Dump the entire "build/platlib" directory (or whatever it really
# is; "build/platlib" is the default) to the installation target
# (eg. "/usr/local/lib/python1.5/site-packages"). Note that
# putting files in the right package dir is already done when we
# build.
outfiles = self.copy_tree (self.build_dir, self.install_dir)
# class InstallExt
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.