commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
62a978256476754a7f604b2f872b7bd221930ac2 | add test_debian_repo and test_nested_debian_repo | merfi/tests/test_repocollector.py | merfi/tests/test_repocollector.py | from merfi.collector import RepoCollector, DebRepo
from os.path import join, dirname
class TestRepoCollector(object):
def setup(self):
self.repos = RepoCollector(path='/', _eager=False)
def test_simple_tree(self, deb_repotree):
repos = RepoCollector(path=deb_repotree)
# The root of the deb_repotree fixture is itself a repository.
assert [r.path for r in repos] == [deb_repotree]
def test_path_is_absolute(self):
assert self.repos._abspath('/') == '/'
def test_path_is_not_absolute(self):
assert self.repos._abspath('directory').startswith('/')
def test_debian_repo(self, deb_repotree):
repos = RepoCollector(deb_repotree)
# The root of the deb_repotree fixture is itself a repository.
assert repos == [DebRepo(deb_repotree)]
def test_debian_release_files(self, deb_repotree):
repos = RepoCollector(deb_repotree)
release_files = repos.debian_release_files
# The root of the deb_repotree fixture is itself a repository.
expected = [
join(deb_repotree, 'dists', 'trusty', 'Release'),
join(deb_repotree, 'dists', 'xenial', 'Release'),
]
assert set(release_files) == set(expected)
def test_nested_debian_repo(self, nested_deb_repotree):
# go one level up
path = dirname(nested_deb_repotree)
repos = RepoCollector(path)
# Verify that we found the two repo trees.
expected = [DebRepo(join(path, 'jewel')),
DebRepo(join(path, 'luminous'))]
assert repos == expected
def test_debian_nested_release_files(self, nested_deb_repotree):
# go one level up
path = dirname(nested_deb_repotree)
repos = RepoCollector(path)
release_files = repos.debian_release_files
expected = [
join(path, 'jewel', 'dists', 'trusty', 'Release'),
join(path, 'jewel', 'dists', 'xenial', 'Release'),
join(path, 'luminous', 'dists', 'trusty', 'Release'),
join(path, 'luminous', 'dists', 'xenial', 'Release'),
]
assert set(release_files) == set(expected)
| from merfi.collector import RepoCollector
from os.path import join, dirname
class TestRepoCollector(object):
def setup(self):
self.repos = RepoCollector(path='/', _eager=False)
def test_simple_tree(self, deb_repotree):
repos = RepoCollector(path=deb_repotree)
# The root of the deb_repotree fixture is itself a repository.
assert [r.path for r in repos] == [deb_repotree]
def test_path_is_absolute(self):
assert self.repos._abspath('/') == '/'
def test_path_is_not_absolute(self):
assert self.repos._abspath('directory').startswith('/')
def test_debian_release_files(self, deb_repotree):
repos = RepoCollector(deb_repotree)
release_files = repos.debian_release_files
# The root of the deb_repotree fixture is itself a repository.
expected = [
join(deb_repotree, 'dists', 'trusty', 'Release'),
join(deb_repotree, 'dists', 'xenial', 'Release'),
]
assert set(release_files) == set(expected)
def test_debian_nested_release_files(self, nested_deb_repotree):
# go one level up
path = dirname(nested_deb_repotree)
repos = RepoCollector(path)
release_files = repos.debian_release_files
expected = [
join(path, 'jewel', 'dists', 'trusty', 'Release'),
join(path, 'jewel', 'dists', 'xenial', 'Release'),
join(path, 'luminous', 'dists', 'trusty', 'Release'),
join(path, 'luminous', 'dists', 'xenial', 'Release'),
]
assert set(release_files) == set(expected)
| Python | 0 |
ecece212605bb588212a70588dc7fd4b67e85cc9 | Corrected first two lines | roles/common/tests/test_default.py | roles/common/tests/test_default.py | from testinfra.utils.ansible_runner import AnsibleRunner
testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all')
def test_hosts_file(File):
f = File('/etc/hosts')
assert f.exists
assert f.user == 'root'
assert f.group == 'root'
| import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
def test_hosts_file(File):
f = File('/etc/hosts')
assert f.exists
assert f.user == 'root'
assert f.group == 'root'
| Python | 0.999691 |
19fd0b75e07311bb3eb863d132125325e3478424 | Fix typo in docstring | byceps/services/user_avatar/models.py | byceps/services/user_avatar/models.py | """
byceps.services.user_avatar.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from collections import namedtuple
from datetime import datetime
from pathlib import Path
from flask import current_app, url_for
from sqlalchemy.ext.hybrid import hybrid_property
from ...database import db, generate_uuid
from ...util.image.models import ImageType
from ...util.instances import ReprBuilder
class Avatar(db.Model):
"""An avatar image uploaded by a user."""
__tablename__ = 'user_avatars'
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
creator_id = db.Column(db.Uuid, db.ForeignKey('users.id'), nullable=False)
_image_type = db.Column('image_type', db.Unicode(4), nullable=False)
def __init__(self, creator_id, image_type):
self.creator_id = creator_id
self.image_type = image_type
@hybrid_property
def image_type(self):
image_type_str = self._image_type
if image_type_str is not None:
return ImageType[image_type_str]
@image_type.setter
def image_type(self, image_type):
self._image_type = image_type.name if (image_type is not None) else None
@property
def filename(self):
name_without_suffix = str(self.id)
suffix = '.' + self.image_type.name
return Path(name_without_suffix).with_suffix(suffix)
@property
def path(self):
path = current_app.config['PATH_USER_AVATAR_IMAGES']
return path / self.filename
@property
def url(self):
path = 'users/avatars/{}'.format(self.filename)
return url_for('global_file', filename=path)
def __repr__(self):
return ReprBuilder(self) \
.add_with_lookup('id') \
.add('image_type', self.image_type.name) \
.build()
class AvatarSelection(db.Model):
"""The selection of an avatar image to be used for a user."""
__tablename__ = 'user_avatar_selections'
user_id = db.Column(db.Uuid, db.ForeignKey('users.id'), primary_key=True)
user = db.relationship('User', backref=db.backref('avatar_selection', uselist=False))
avatar_id = db.Column(db.Uuid, db.ForeignKey('user_avatars.id'), unique=True, nullable=False)
avatar = db.relationship(Avatar)
def __init__(self, user_id, avatar_id):
self.user_id = user_id
self.avatar_id = avatar_id
AvatarCreationTuple = namedtuple('AvatarCreationTuple', 'created_at, url')
| """
byceps.services.user_avatar.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from collections import namedtuple
from datetime import datetime
from pathlib import Path
from flask import current_app, url_for
from sqlalchemy.ext.hybrid import hybrid_property
from ...database import db, generate_uuid
from ...util.image.models import ImageType
from ...util.instances import ReprBuilder
class Avatar(db.Model):
"""A avatar image uploaded by a user."""
__tablename__ = 'user_avatars'
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
creator_id = db.Column(db.Uuid, db.ForeignKey('users.id'), nullable=False)
_image_type = db.Column('image_type', db.Unicode(4), nullable=False)
def __init__(self, creator_id, image_type):
self.creator_id = creator_id
self.image_type = image_type
@hybrid_property
def image_type(self):
image_type_str = self._image_type
if image_type_str is not None:
return ImageType[image_type_str]
@image_type.setter
def image_type(self, image_type):
self._image_type = image_type.name if (image_type is not None) else None
@property
def filename(self):
name_without_suffix = str(self.id)
suffix = '.' + self.image_type.name
return Path(name_without_suffix).with_suffix(suffix)
@property
def path(self):
path = current_app.config['PATH_USER_AVATAR_IMAGES']
return path / self.filename
@property
def url(self):
path = 'users/avatars/{}'.format(self.filename)
return url_for('global_file', filename=path)
def __repr__(self):
return ReprBuilder(self) \
.add_with_lookup('id') \
.add('image_type', self.image_type.name) \
.build()
class AvatarSelection(db.Model):
"""The selection of an avatar image to be used for a user."""
__tablename__ = 'user_avatar_selections'
user_id = db.Column(db.Uuid, db.ForeignKey('users.id'), primary_key=True)
user = db.relationship('User', backref=db.backref('avatar_selection', uselist=False))
avatar_id = db.Column(db.Uuid, db.ForeignKey('user_avatars.id'), unique=True, nullable=False)
avatar = db.relationship(Avatar)
def __init__(self, user_id, avatar_id):
self.user_id = user_id
self.avatar_id = avatar_id
AvatarCreationTuple = namedtuple('AvatarCreationTuple', 'created_at, url')
| Python | 0.013244 |
ef03541b2b25ab9cf34deec554a19a32dad7fbec | Add new line to end of init file for Meta Writer application | tools/python/odin_data/meta_writer/__init__.py | tools/python/odin_data/meta_writer/__init__.py | from pkg_resources import require
require('pygelf==0.3.1')
require("h5py==2.8.0")
require('pyzmq==16.0.2')
| from pkg_resources import require
require('pygelf==0.3.1')
require("h5py==2.8.0")
require('pyzmq==16.0.2') | Python | 0 |
5eaf4ed148f36f6cf578c9d943ee32652628de64 | Fix broken tests | xero/exceptions.py | xero/exceptions.py | from six.moves.urllib.parse import parse_qs
from xml.dom.minidom import parseString
import json
class XeroException(Exception):
def __init__(self, response, msg=None):
self.response = response
super(XeroException, self).__init__(msg)
class XeroNotVerified(Exception):
# Credentials haven't been verified
pass
class XeroBadRequest(XeroException):
# HTTP 400: Bad Request
def __init__(self, response):
if response.headers['content-type'].startswith('application/json'):
data = json.loads(response.text)
msg = "%s: %s" % (data['Type'], data['Message'])
self.errors = [err['Message']
for elem in data['Elements']
for err in elem['ValidationErrors']
]
self.problem = self.errors[0]
super(XeroBadRequest, self).__init__(response, msg=msg)
elif response.headers['content-type'].startswith('text/html'):
payload = parse_qs(response.text)
self.errors = [
payload['oauth_problem'][0],
]
self.problem = self.errors[0]
super(XeroBadRequest, self).__init__(response, payload['oauth_problem_advice'][0])
else:
# Extract the messages from the text.
# parseString takes byte content, not unicode.
dom = parseString(response.text.encode(response.encoding))
messages = dom.getElementsByTagName('Message')
msg = messages[0].childNodes[0].data
self.errors = [
m.childNodes[0].data for m in messages[1:]
]
self.problem = self.errors[0]
super(XeroBadRequest, self).__init__(response, msg)
class XeroUnauthorized(XeroException):
# HTTP 401: Unauthorized
def __init__(self, response):
payload = parse_qs(response.text)
self.problem = payload['oauth_problem'][0]
super(XeroUnauthorized, self).__init__(response, payload['oauth_problem_advice'][0])
class XeroForbidden(XeroException):
# HTTP 403: Forbidden
def __init__(self, response):
super(XeroForbidden, self).__init__(response, response.text)
class XeroNotFound(XeroException):
# HTTP 404: Not Found
def __init__(self, response):
super(XeroNotFound, self).__init__(response, response.text)
class XeroUnsupportedMediaType(XeroException):
# HTTP 415: UnsupportedMediaType
def __init__(self, response):
super(XeroUnsupportedMediaType, self).__init__(response, response.text)
class XeroInternalError(XeroException):
# HTTP 500: Internal Error
def __init__(self, response):
super(XeroInternalError, self).__init__(response, response.text)
class XeroNotImplemented(XeroException):
# HTTP 501
def __init__(self, response):
# Extract the useful error message from the text.
# parseString takes byte content, not unicode.
dom = parseString(response.text.encode(response.encoding))
messages = dom.getElementsByTagName('Message')
msg = messages[0].childNodes[0].data
super(XeroNotImplemented, self).__init__(response, msg)
class XeroRateLimitExceeded(XeroException):
# HTTP 503 - Rate limit exceeded
def __init__(self, response, payload):
self.problem = payload['oauth_problem'][0]
super(XeroRateLimitExceeded, self).__init__(response, payload['oauth_problem_advice'][0])
class XeroNotAvailable(XeroException):
# HTTP 503 - Not available
def __init__(self, response):
super(XeroNotAvailable, self).__init__(response, response.text)
class XeroExceptionUnknown(XeroException):
# Any other exception.
pass
| from six.moves.urllib.parse import parse_qs
from xml.dom.minidom import parseString
import json
class XeroException(Exception):
def __init__(self, response, msg=None):
self.response = response
super(XeroException, self).__init__(msg)
class XeroNotVerified(Exception):
# Credentials haven't been verified
pass
class XeroBadRequest(XeroException):
# HTTP 400: Bad Request
def __init__(self, response):
if response.headers['content-type'].startswith('application/json'):
data = json.loads(response.text)
msg = "%s: %s" % (data['Type'], data['Message'])
self.errors = [err['Message']
for elem in data['Elements']
for err in elem['ValidationErrors']
]
super(XeroBadRequest, self).__init__(response, msg=msg)
elif response.headers['content-type'].startswith('text/html'):
payload = parse_qs(response.text)
self.errors = [
payload['oauth_problem'][0],
]
super(XeroBadRequest, self).__init__(response, payload['oauth_problem_advice'][0])
else:
# Extract the messages from the text.
# parseString takes byte content, not unicode.
dom = parseString(response.text.encode(response.encoding))
messages = dom.getElementsByTagName('Message')
msg = messages[0].childNodes[0].data
self.errors = [
m.childNodes[0].data for m in messages[1:]
]
super(XeroBadRequest, self).__init__(response, msg)
class XeroUnauthorized(XeroException):
# HTTP 401: Unauthorized
def __init__(self, response):
payload = parse_qs(response.text)
self.problem = payload['oauth_problem'][0]
super(XeroUnauthorized, self).__init__(response, payload['oauth_problem_advice'][0])
class XeroForbidden(XeroException):
# HTTP 403: Forbidden
def __init__(self, response):
super(XeroForbidden, self).__init__(response, response.text)
class XeroNotFound(XeroException):
# HTTP 404: Not Found
def __init__(self, response):
super(XeroNotFound, self).__init__(response, response.text)
class XeroUnsupportedMediaType(XeroException):
# HTTP 415: UnsupportedMediaType
def __init__(self, response):
super(XeroUnsupportedMediaType, self).__init__(response, response.text)
class XeroInternalError(XeroException):
# HTTP 500: Internal Error
def __init__(self, response):
super(XeroInternalError, self).__init__(response, response.text)
class XeroNotImplemented(XeroException):
# HTTP 501
def __init__(self, response):
# Extract the useful error message from the text.
# parseString takes byte content, not unicode.
dom = parseString(response.text.encode(response.encoding))
messages = dom.getElementsByTagName('Message')
msg = messages[0].childNodes[0].data
super(XeroNotImplemented, self).__init__(response, msg)
class XeroRateLimitExceeded(XeroException):
# HTTP 503 - Rate limit exceeded
def __init__(self, response, payload):
self.problem = payload['oauth_problem'][0]
super(XeroRateLimitExceeded, self).__init__(response, payload['oauth_problem_advice'][0])
class XeroNotAvailable(XeroException):
# HTTP 503 - Not available
def __init__(self, response):
super(XeroNotAvailable, self).__init__(response, response.text)
class XeroExceptionUnknown(XeroException):
# Any other exception.
pass
| Python | 0.000555 |
514074dee639b30fb56ec664804bdd3f533befda | Apply `cacheonceproperty` on props of Tree & Chunk. | xmlpumpkin/tree.py | xmlpumpkin/tree.py | # encoding: utf-8
from lxml import etree
from .utils import cacheonceproperty
XML_ENCODING = 'utf-8'
class Tree(object):
"""Tree accessor for CaboCha xml."""
def __init__(self, cabocha_xml):
self._element = etree.fromstring(
cabocha_xml.encode(XML_ENCODING),
)
@cacheonceproperty
def chunks(self):
chunk_elems = self._element.findall('.//chunk')
chunks = tuple([Chunk(elem, self) for elem in chunk_elems])
return chunks
@cacheonceproperty
def root(self):
for chunk in self.chunks:
if chunk.link_to_id == -1:
return chunk
return None
def chunk_by_id(self, chunk_id):
for chunk in self.chunks:
if chunk.id == chunk_id:
return chunk
return None
class Chunk(object):
"""CaboCha chunk object representation."""
def __init__(self, element, parent):
self._element = element
self._parent = parent
def __eq__(self, other):
return self._element == other._element
@cacheonceproperty
def id(self):
return int(self._element.attrib['id'])
@cacheonceproperty
def link_to_id(self):
return int(self._element.attrib['link'])
@cacheonceproperty
def linked_from_ids(self):
return tuple([chunk.id for chunk in self.linked])
@cacheonceproperty
def func_id(self):
return int(self._element.attrib['func'])
@cacheonceproperty
def dep(self):
return self._parent.chunk_by_id(self.link_to_id)
@cacheonceproperty
def linked(self):
to_id = self.id
return [
chunk for chunk
in self._parent.chunks
if chunk.link_to_id == to_id
]
@cacheonceproperty
def surface(self):
tokens = self._tokens()
texts = [t.text for t in tokens]
return u''.join(texts)
@cacheonceproperty
def func_surface(self):
tid = self.func_id
tokens = self._tokens()
for tok in tokens:
if int(tok.attrib['id']) == tid:
return tok.text
def _tokens(self):
return self._element.findall('.//tok')
| # encoding: utf-8
from lxml import etree
XML_ENCODING = 'utf-8'
class Tree(object):
"""Tree accessor for CaboCha xml."""
def __init__(self, cabocha_xml):
self._element = etree.fromstring(
cabocha_xml.encode(XML_ENCODING),
)
@property
def chunks(self):
chunk_elems = self._element.findall('.//chunk')
chunks = tuple([Chunk(elem, self) for elem in chunk_elems])
return chunks
@property
def root(self):
for chunk in self.chunks:
if chunk.link_to_id == -1:
return chunk
return None
def chunk_by_id(self, chunk_id):
for chunk in self.chunks:
if chunk.id == chunk_id:
return chunk
return None
class Chunk(object):
"""CaboCha chunk object representation."""
def __init__(self, element, parent):
self._element = element
self._parent = parent
def __eq__(self, other):
return self._element == other._element
@property
def id(self):
return int(self._element.attrib['id'])
@property
def link_to_id(self):
return int(self._element.attrib['link'])
@property
def linked_from_ids(self):
return tuple([chunk.id for chunk in self.linked])
@property
def func_id(self):
return int(self._element.attrib['func'])
@property
def dep(self):
return self._parent.chunk_by_id(self.link_to_id)
@property
def linked(self):
to_id = self.id
return [
chunk for chunk
in self._parent.chunks
if chunk.link_to_id == to_id
]
@property
def surface(self):
tokens = self._tokens()
texts = [t.text for t in tokens]
return u''.join(texts)
@property
def func_surface(self):
tid = self.func_id
tokens = self._tokens()
for tok in tokens:
if int(tok.attrib['id']) == tid:
return tok.text
def _tokens(self):
return self._element.findall('.//tok')
| Python | 0 |
7aae3f244f15d31e4d5a0c844df5cbbb5a594e84 | update mongostring | mongo.py | mongo.py | import os
import sys
import pymongo
from bson import BSON
from bson import json_util
MONGODB_URI_LOCAL = 'mongodb://aps:aps@127.0.0.1:27017/aps'
def getlast3():
try:
client = pymongo.MongoClient(MONGODB_URI_LOCAL)
except:
print('Error: Unable to Connect')
connection = None
db = client['aps']
cursor = db.entries.find({'type':'cal'}).sort('date', -1).limit(3)
for doc in cursor:
print (doc)
client.close()
if __name__ == '__main__':
getlast3()
| import os
import sys
import pymongo
from bson import BSON
from bson import json_util
MONGODB_URI_REMOTE = 'mongodb://Lars_2009:Lars65535@euve76271.serverprofi24.de:21060/larscgmtest'
MONGODB_URI_LOCAL = 'mongodb://aps:aps@127.0.0.1:27017/aps'
def getlast3():
try:
client = pymongo.MongoClient(MONGODB_URI_LOCAL)
except:
print('Error: Unable to Connect')
connection = None
db = client['aps']
cursor = db.entries.find({'type':'cal'}).sort('date', -1).limit(3)
for doc in cursor:
print (doc)
client.close()
if __name__ == '__main__':
getlast3()
| Python | 0.000001 |
de3b4775b7dbcecc9c42e18c59b35485f83ca74a | Update max-chunks-to-make-sorted-i.py | Python/max-chunks-to-make-sorted-i.py | Python/max-chunks-to-make-sorted-i.py | # Time: O(n)
# Space: O(1)
# Given an array arr that is a permutation of [0, 1, ..., arr.length - 1],
# we split the array into some number of "chunks" (partitions), and individually sort each chunk.
# After concatenating them, the result equals the sorted array.
#
# What is the most number of chunks we could have made?
#
# Example 1:
#
# Input: arr = [4,3,2,1,0]
# Output: 1
# Explanation:
# Splitting into two or more chunks will not return the required result.
# For example, splitting into [4, 3], [2, 1, 0] will result in [3, 4, 0, 1, 2], which isn't sorted.
#
# Example 2:
#
# Input: arr = [1,0,2,3,4]
# Output: 4
# Explanation:
# We can split into two chunks, such as [1, 0], [2, 3, 4].
# However, splitting into [1, 0], [2], [3], [4] is the highest number of chunks possible.
#
# Note:
# - arr will have length in range [1, 10].
# - arr[i] will be a permutation of [0, 1, ..., arr.length - 1].
class Solution(object):
def maxChunksToSorted(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
result, max_i = 0, 0
for i, v in enumerate(arr):
max_i = max(max_i, v)
if max_i == i:
result += 1
return result
| # Time: O(n)
# Space: O(1)
class Solution(object):
def maxChunksToSorted(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
result, max_i = 0, 0
for i, v in enumerate(arr):
max_i = max(max_i, v)
if max_i == i:
result += 1
return result
| Python | 0.000001 |
32fccb04bac6be7e79f6b05b727e5e847fef498c | Update misc.py | misc/misc.py | misc/misc.py | import discord
from discord.ext import commands
import random
import time
class misc:
"""My custom cog that does stuff"""
def __init__(self, bot):
self.bot = bot
self.bank = Bank(bot, "data/economy/bank.json")
def role_colour():
#Rand between 0 - 256
a = random.randrange(0,256)
b = random.randrange(0,256)
c = random.randrange(0,256)
if a != 0 or b != 0 or c != 0:
choice = random.randrange(1,4)
if choice === 1:
a = 0
if choice === 2:
b = 0
if choice === 3:
c = 0
return a, b, c
def change_colour(r, g, b):
picked_role = bot.role("400618311861272577")
bot.edit_role(role=picked_role, colour=bot.colour(r, g, b))
def colour_loop():
while true:
change_colour(role_colour())
time.sleep(5)
colour_loop()
def setup(bot):
bot.add_cog(Counter(bot))
| Python | 0 | |
4b9948e665c78df468917b0906afc288244fa303 | add doc back in. | osbs/exceptions.py | osbs/exceptions.py | """
Exceptions raised by OSBS
"""
class OsbsException(Exception):
pass
class OsbsResponseException(OsbsException):
""" OpenShift didn't respond with OK (200) status """
def __init__ (self, message, status_code, *args, **kwargs):
super (OsbsResponseException, self).__init__ (message, *args, **kwargs)
self.status_code = status_code
class OsbsNetworkException(OsbsException):
def __init__ (self, url, message, status_code, *args, **kwargs):
super (OsbsNetworkException, self).__init__ (message, *args, **kwargs)
self.url = url
self.status_code = status_code
| """
Exceptions raised by OSBS
"""
class OsbsException(Exception):
pass
class OsbsResponseException(OsbsException):
def __init__ (self, message, status_code, *args, **kwargs):
super (OsbsResponseException, self).__init__ (message, *args, **kwargs)
self.status_code = status_code
class OsbsNetworkException(OsbsException):
def __init__ (self, url, message, status_code, *args, **kwargs):
super (OsbsNetworkException, self).__init__ (message, *args, **kwargs)
self.url = url
self.status_code = status_code
| Python | 0 |
e9a1ee7faef9b208e83173c39c62926553ab6b5f | mark issue as closed if resolution type is finished or fixed | src/survivor/tasks/sync.py | src/survivor/tasks/sync.py | """
Synchronises local database with JIRA.
"""
import argparse
import iso8601
import itertools
from jira.client import JIRA
from survivor import config, init
from survivor.models import User, Issue
# max number of issues to have jira return for the project
MAX_ISSUE_RESULTS = 99999
def create_user(jira_user):
"Creates a `survivor.models.User` from a `jira.resources.User`."
user = User(login=jira_user.name)
user.name = jira_user.displayName
user.email = jira_user.emailAddress
user.avatar_url = jira_user.avatarUrls.__dict__['48x48']
return user.save()
def get_or_create_user(jira_user):
"""
Get or create a `survivor.models.User` from a partially-loaded
`jira.resources.User`.
"""
try:
return User.objects.get(login=jira_user.name)
except User.DoesNotExist:
return create_user(jira_user)
def create_issue(jira_issue):
"Creates a `survivor.models.Issue` from a `jira.resources.Issue`."
issue = Issue(key=jira_issue.key,
title=jira_issue.fields.description,
state=jira_issue.fields.status.name.lower(),
opened=iso8601.parse_date(jira_issue.fields.created),
updated=iso8601.parse_date(jira_issue.fields.updated),
url=jira_issue.self)
issue.reporter = get_or_create_user(jira_issue.fields.reporter)
if jira_issue.fields.resolutiondate:
resolution_type = jira_issue.fields.resolution.name
if resolution_type == "Finished" or resolution_type == "Fixed":
issue.closed = iso8601.parse_date(jira_issue.fields.resolutiondate)
if jira_issue.fields.assignee:
issue.assignee = get_or_create_user(jira_issue.fields.assignee)
# TODO comments, labels
return issue.save()
def sync(types, verbose=False):
"Refresh selected collections from JIRA."
jira_project = config['jira.project']
jira_username = config['jira.username']
jira_password = config['jira.password']
jira_server = config['jira.server']
jira = JIRA(basic_auth=(jira_username, jira_password), options={'server': jira_server})
if 'users' in types:
User.drop_collection()
# FIXME: can this come from config?
for jira_user in jira.search_assignable_users_for_projects('', jira_project):
try:
user = create_user(jira_user)
except:
print 'Error creating user: %s' % jira_user.name
raise
if verbose: print 'created user: %s' % jira_user.name
if 'issues' in types:
Issue.drop_collection()
issues = jira.search_issues(
'project=%s and (status=OPEN or status=CLOSED)' % jira_project,
maxResults=MAX_ISSUE_RESULTS
)
for jira_issue in issues:
try:
issue = create_issue(jira_issue)
except:
print 'Error creating %s' % jira_issue.key
raise
if verbose: print 'created issue: %s' % jira_issue.key
if __name__ == '__main__':
argparser = argparse.ArgumentParser(description='Synchronises local DB with JIRA')
argparser.add_argument('model', nargs='*', help='model types to sync')
argparser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='verbose output')
args = argparser.parse_args()
types = args.model or ('users', 'issues')
init()
sync(types, args.verbose)
| """
Synchronises local database with JIRA.
"""
import argparse
import iso8601
import itertools
from jira.client import JIRA
from survivor import config, init
from survivor.models import User, Issue
# max number of issues to have jira return for the project
MAX_ISSUE_RESULTS = 99999
def create_user(jira_user):
"Creates a `survivor.models.User` from a `jira.resources.User`."
user = User(login=jira_user.name)
user.name = jira_user.displayName
user.email = jira_user.emailAddress
user.avatar_url = jira_user.avatarUrls.__dict__['48x48']
return user.save()
def get_or_create_user(jira_user):
"""
Get or create a `survivor.models.User` from a partially-loaded
`jira.resources.User`.
"""
try:
return User.objects.get(login=jira_user.name)
except User.DoesNotExist:
return create_user(jira_user)
def create_issue(jira_issue):
"Creates a `survivor.models.Issue` from a `jira.resources.Issue`."
issue = Issue(key=jira_issue.key,
title=jira_issue.fields.description,
state=jira_issue.fields.status.name.lower(),
opened=iso8601.parse_date(jira_issue.fields.created),
updated=iso8601.parse_date(jira_issue.fields.updated),
url=jira_issue.self)
issue.reporter = get_or_create_user(jira_issue.fields.reporter)
if jira_issue.fields.resolutiondate:
issue.closed = iso8601.parse_date(jira_issue.fields.resolutiondate)
if jira_issue.fields.assignee:
issue.assignee = get_or_create_user(jira_issue.fields.assignee)
# TODO comments, labels
return issue.save()
def sync(types, verbose=False):
"Refresh selected collections from JIRA."
jira_project = config['jira.project']
jira_username = config['jira.username']
jira_password = config['jira.password']
jira_server = config['jira.server']
jira = JIRA(basic_auth=(jira_username, jira_password), options={'server': jira_server})
if 'users' in types:
User.drop_collection()
# FIXME: can this come from config?
for jira_user in jira.search_assignable_users_for_projects('', jira_project):
try:
user = create_user(jira_user)
except:
print 'Error creating user: %s' % jira_user.name
raise
if verbose: print 'created user: %s' % jira_user.name
if 'issues' in types:
Issue.drop_collection()
issues = jira.search_issues(
'project=%s and (status=OPEN or status=CLOSED)' % jira_project,
maxResults=MAX_ISSUE_RESULTS
)
for jira_issue in issues:
try:
issue = create_issue(jira_issue)
except:
print 'Error creating %s' % jira_issue.key
raise
if verbose: print 'created issue: %s' % jira_issue.key
if __name__ == '__main__':
argparser = argparse.ArgumentParser(description='Synchronises local DB with JIRA')
argparser.add_argument('model', nargs='*', help='model types to sync')
argparser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='verbose output')
args = argparser.parse_args()
types = args.model or ('users', 'issues')
init()
sync(types, args.verbose)
| Python | 0 |
bbf8886a2cbf4fa371f0a67157fdd3df3dfa47dd | Fix broken MLflow DB README link in CLI docs (#2377) | mlflow/db.py | mlflow/db.py | import click
import mlflow.store.db.utils
@click.group("db")
def commands():
"""
Commands for managing an MLflow tracking database.
"""
pass
@commands.command()
@click.argument("url")
def upgrade(url):
"""
Upgrade the schema of an MLflow tracking database to the latest supported version.
**IMPORTANT**: Schema migrations can be slow and are not guaranteed to be transactional -
**always take a backup of your database before running migrations**. The migrations README,
which is located at
https://github.com/mlflow/mlflow/blob/master/mlflow/store/db_migrations/README.md, describes
large migrations and includes information about how to estimate their performance and
recover from failures.
"""
if mlflow.store.db.utils._is_initialized_before_mlflow_1(url):
mlflow.store.db.utils._upgrade_db_initialized_before_mlflow_1(url)
mlflow.store.db.utils._upgrade_db(url)
| import click
import mlflow.store.db.utils
@click.group("db")
def commands():
"""
Commands for managing an MLflow tracking database.
"""
pass
@commands.command()
@click.argument("url")
def upgrade(url):
"""
Upgrade the schema of an MLflow tracking database to the latest supported version.
**IMPORTANT**: Schema migrations can be slow and are not guaranteed to be transactional -
**always take a backup of your database before running migrations**. The migrations README,
which is located at
https://github.com/mlflow/mlflow/blob/master/mlflow/store/db_migrations/README, describes
large migrations and includes information about how to estimate their performance and
recover from failures.
"""
if mlflow.store.db.utils._is_initialized_before_mlflow_1(url):
mlflow.store.db.utils._upgrade_db_initialized_before_mlflow_1(url)
mlflow.store.db.utils._upgrade_db(url)
| Python | 0 |
d5f979236089e7cb3de90b03303e1c3af967331c | add UW-Madison, minor formatting | uw_si2/rest/rester.py | uw_si2/rest/rester.py | from __future__ import division, unicode_literals
import six, bson, os
from bson.json_util import dumps, loads
from mpcontribs.rest.rester import MPContribsRester
from mpcontribs.io.core.utils import get_short_object_id
from mpcontribs.io.archieml.mpfile import MPFile
from pandas import Series
class UWSI2Rester(MPContribsRester):
"""UW/SI2-specific convenience functions to interact with MPContribs REST interface"""
z = loads(open(os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'z.json'
), 'r').read())
def get_uwsi2_contributions(self):
"""
- [<host(pretty-formula)>] <mp_cat_id-linked-to-materials-details-page> <cid-linked-to-contribution-details-page>
|- <solute> <D0-value> <Q-value> <toggle-in-graph>
|- ...
- ...
"""
labels = ["Solute element name", "Solute D0 [cm^2/s]", "Solute Q [eV]"]
data = []
for doc in self.query_contributions(
criteria={'project': {'$in': ['LBNL', 'UW-Madison']}},
projection={'_id': 1, 'mp_cat_id': 1, 'content': 1}
):
mpfile = MPFile.from_contribution(doc)
mp_id = mpfile.ids[0]
table = mpfile.tdata[mp_id]['data_supporting'][labels]
table.columns = ['El.', 'D0 [cm2/s]', 'Q [eV]']
anums = [self.z[el] for el in table['El.']]
table.insert(0, 'Z', Series(anums, index=table.index))
table.sort_values('Z', inplace=True)
table.reset_index(drop=True, inplace=True)
hdata = mpfile.hdata[mp_id]
data.append({
'mp_id': mp_id, 'cid': doc['_id'],
'short_cid': get_short_object_id(doc['_id']),
'formula': hdata['formula'],
'table': table
})
return data
| from __future__ import division, unicode_literals
import six, bson, os
from bson.json_util import dumps, loads
from mpcontribs.rest.rester import MPContribsRester
from mpcontribs.io.core.utils import get_short_object_id
from mpcontribs.io.archieml.mpfile import MPFile
from pandas import Series
class UWSI2Rester(MPContribsRester):
"""UW/SI2-specific convenience functions to interact with MPContribs REST interface"""
z = loads(open(os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'z.json'
), 'r').read())
def get_uwsi2_contributions(self):
"""
- [<host(pretty-formula)>] <mp_cat_id-linked-to-materials-details-page> <cid-linked-to-contribution-details-page>
|- <solute> <D0-value> <Q-value> <toggle-in-graph>
|- ...
- ...
"""
labels = ["Solute element name", "Solute D0 [cm^2/s]", "Solute Q [eV]"]
data = []
for doc in self.query_contributions(
criteria={'project': 'LBNL'},
projection={'_id': 1, 'mp_cat_id': 1, 'content': 1}
):
mpfile = MPFile.from_contribution(doc)
mp_id = mpfile.ids[0]
table = mpfile.tdata[mp_id]['data_supporting'][labels]
table.columns = ['El.', 'D0 [cm^2/s]', 'Q [eV]']
anums = [self.z[el] for el in table['El.']]
table.insert(0, 'Z', Series(anums, index=table.index))
table.sort_values('Z', inplace=True)
table.reset_index(drop=True, inplace=True)
hdata = mpfile.hdata[mp_id]
data.append({
'mp_id': mp_id, 'cid': doc['_id'],
'short_cid': get_short_object_id(doc['_id']),
'formula': hdata['formula'],
'table': table
})
return data
| Python | 0.00243 |
af54f9666b15cd68e5404b60f495f6d51c1470b1 | Fix upload_manual_flac command to add its arguments | WhatManager2/management/commands/upload_manual_flac.py | WhatManager2/management/commands/upload_manual_flac.py | #!/usr/bin/env python
from __future__ import unicode_literals
import requests
import time
from django.core.management.base import BaseCommand
from WhatManager2.utils import wm_unicode
from home.models import get_what_client
from what_transcode.tasks import TranscodeSingleJob
def _add_to_wm_transcode(what_id):
print 'Adding {0} to wm'.format(what_id)
post_data = {
'what_id': what_id,
}
response = requests.post('https://karamanolev.com/wm/transcode/request', data=post_data,
auth=('', ''))
response_json = response.json()
if response_json['message'] != 'Request added.':
raise Exception('Cannot add {0} to wm: {1}'.format(what_id, response_json['message']))
def add_to_wm_transcode(what_id):
for i in range(2):
try:
_add_to_wm_transcode(what_id)
return
except Exception:
print 'Error adding to wm, trying again in 2 sec...'
time.sleep(3)
_add_to_wm_transcode(what_id)
def report_progress(msg):
print msg
class Command(BaseCommand):
help = 'Help you create a torrent and add it to WM'
def add_arguments(self, parser):
parser.add_argument('source_dir', required=True, help='Source directory for the torrent.')
def handle(self, *args, **options):
source_dir = wm_unicode(options['source_dir'])
if not source_dir:
print u'Pass only the source directory.'
return 1
if source_dir.endswith('/'):
source_dir = source_dir[:-1]
what = get_what_client(lambda: None)
job = TranscodeSingleJob(what, None, report_progress, None, None, source_dir)
job.create_torrent()
raw_input('Please upload the torrent and press enter...')
job.move_torrent_to_dest()
add_to_wm_transcode(job.new_torrent['torrent']['id'])
| #!/usr/bin/env python
from __future__ import unicode_literals
import time
from django.core.management.base import BaseCommand
import requests
from WhatManager2.utils import wm_unicode
from home.models import get_what_client
from what_transcode.tasks import TranscodeSingleJob
def _add_to_wm_transcode(what_id):
print 'Adding {0} to wm'.format(what_id)
post_data = {
'what_id': what_id,
}
response = requests.post('https://karamanolev.com/wm/transcode/request', data=post_data,
auth=('', ''))
response_json = response.json()
if response_json['message'] != 'Request added.':
raise Exception('Cannot add {0} to wm: {1}'.format(what_id, response_json['message']))
def add_to_wm_transcode(what_id):
for i in range(2):
try:
_add_to_wm_transcode(what_id)
return
except Exception:
print 'Error adding to wm, trying again in 2 sec...'
time.sleep(3)
_add_to_wm_transcode(what_id)
def report_progress(msg):
print msg
class Command(BaseCommand):
help = 'Help you create a torrent and add it to WM'
def handle(self, *args, **options):
if len(args) != 1:
print u'Pass only the source directory.'
return 1
source_dir = wm_unicode(args[0])
if source_dir.endswith('/'):
source_dir = source_dir[:-1]
what = get_what_client(lambda: None)
job = TranscodeSingleJob(what, None, report_progress, None, None, source_dir)
job.create_torrent()
raw_input('Please upload the torrent and press enter...')
job.move_torrent_to_dest()
add_to_wm_transcode(job.new_torrent['torrent']['id'])
| Python | 0 |
fdeb06bdf33a55413f1f8f8cd780c84438ad2277 | add missing import | src/zeit/content/cp/browser/blocks/av.py | src/zeit/content/cp/browser/blocks/av.py | # Copyright (c) 2009 gocept gmbh & co. kg
# See also LICENSE.txt
from zeit.content.cp.i18n import MessageFactory as _
import zeit.content.cp.interfaces
import zope.app.pagetemplate
import zope.formlib.form
class EditProperties(zope.formlib.form.SubPageEditForm):
template = zope.app.pagetemplate.ViewPageTemplateFile(
'av.edit-properties.pt')
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IAVBlock).omit('media_type')
close = False
@property
def form(self):
return super(EditProperties, self).template
@zope.formlib.form.action(_('Apply'))
def handle_edit_action(self, action, data):
self.close = True
# XXX: dear zope.formlib, are you serious?!
return super(EditProperties, self).handle_edit_action.success(data)
| # Copyright (c) 2009 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.cp.interfaces
import zope.app.pagetemplate
import zope.formlib.form
class EditProperties(zope.formlib.form.SubPageEditForm):
template = zope.app.pagetemplate.ViewPageTemplateFile(
'av.edit-properties.pt')
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IAVBlock).omit('media_type')
close = False
@property
def form(self):
return super(EditProperties, self).template
@zope.formlib.form.action(_('Apply'))
def handle_edit_action(self, action, data):
self.close = True
# XXX: dear zope.formlib, are you serious?!
return super(EditProperties, self).handle_edit_action.success(data)
| Python | 0.000042 |
db1f0556f72eb84e4273ff8925494de81bf21898 | rename paths / meta not needed | src/learn/dev_ben/generate_training_data.py | src/learn/dev_ben/generate_training_data.py | import os
import sgf
from time import strftime
from os.path import dirname, abspath
from src.play.model.Board import Board
size = 9
EMPTY_val = 0 # 0.45
BLACK_val = 1 # -1.35
WHITE_val = -1 # 1.05
data_dir = os.path.join(dirname(dirname(dirname(dirname(abspath(__file__))))), 'data')
sgf_files = [
os.path.join(data_dir, 'game_57083.sgf'),
os.path.join(data_dir, 'game_100672.sgf'),
]
training_data_dir = os.path.join(data_dir, 'training_data')
if not os.path.exists(training_data_dir): # create the folder if it does not exist yet
os.makedirs(training_data_dir)
training_data_file = open(
os.path.join(training_data_dir, str(len(sgf_files)) + '_games_' + strftime('%d-%m-%Y_%H-%M-%S') + '.csv'), 'w')
for path in sgf_files:
sgf_file = open(path, 'r')
training_data_file.write(os.path.basename(path) + '\n')
collection = sgf.parse(sgf_file.read())
game_tree = collection.children[0]
moves = game_tree.nodes[1:]
# meta = game_tree.nodes[0].properties
# see SGF properties here: www.red-bean.com/sgf/properties.html
board = Board([[EMPTY_val] * size] * size)
training_data_file.write(board.matrix2csv() + '\n')
for move in moves:
keys = move.properties.keys()
if 'B' not in keys and 'W' not in keys: # don't know how to deal with special stuff yet
continue
# can't rely on the order in keys(), apparently must extract it like this
player_color = 'B' if 'B' in move.properties.keys() else 'W'
sgf_move = move.properties[player_color][0]
if len(sgf_move) is 2: # otherwise its a pass
loc = ord(sgf_move[1]) - ord('a'), ord(sgf_move[0]) - ord('a')
player_val = BLACK_val if player_color == 'B' else WHITE_val
opponent_val = WHITE_val if player_color == 'B' else BLACK_val
board.place_stone_and_capture_if_applicable(loc, player_val, opponent_val, EMPTY_val)
training_data_file.write(board.matrix2csv() + '\n')
training_data_file.close()
| import os
import sgf
from time import strftime
from os.path import dirname, abspath
from src.play.model.Board import Board
size = 9
EMPTY_val = 0 # 0.45
BLACK_val = 1 # -1.35
WHITE_val = -1 # 1.05
data_dir = os.path.join(dirname(dirname(dirname(dirname(abspath(__file__))))), 'data')
paths = [
os.path.join(data_dir, 'game_57083.sgf'),
os.path.join(data_dir, 'game_100672.sgf'),
]
training_data_dir = os.path.join(data_dir, 'training_data')
if not os.path.exists(training_data_dir): # create the folder if it does not exist yet
os.makedirs(training_data_dir)
training_data_file = open(
os.path.join(training_data_dir, str(len(paths)) + '_games_' + strftime('%d-%m-%Y_%H-%M-%S') + '.csv'), 'w')
for path in paths:
sgf_file = open(path, 'r')
training_data_file.write(os.path.basename(path) + '\n')
collection = sgf.parse(sgf_file.read())
game_tree = collection.children[0]
meta = game_tree.nodes[0].properties
moves = game_tree.nodes[1:]
# see SGF properties here: www.red-bean.com/sgf/properties.html
board = Board([[EMPTY_val] * size] * size)
training_data_file.write(board.matrix2csv() + '\n')
for move in moves:
keys = move.properties.keys()
if 'B' not in keys and 'W' not in keys: # don't know how to deal with special stuff yet
continue
# can't rely on the order in keys(), apparently must extract it like this
player_color = 'B' if 'B' in move.properties.keys() else 'W'
sgf_move = move.properties[player_color][0]
if len(sgf_move) is 2: # otherwise its a pass
loc = ord(sgf_move[1]) - ord('a'), ord(sgf_move[0]) - ord('a')
player_val = BLACK_val if player_color == 'B' else WHITE_val
opponent_val = WHITE_val if player_color == 'B' else BLACK_val
board.place_stone_and_capture_if_applicable(loc, player_val, opponent_val, EMPTY_val)
training_data_file.write(board.matrix2csv() + '\n')
training_data_file.close()
| Python | 0 |
9e86d12e1135d16b32da5f130e14cfde4ffe9a95 | Support CloudFlare "email" protection | module/plugins/hoster/UpleaCom.py | module/plugins/hoster/UpleaCom.py | # -*- coding: utf-8 -*-
import re
import urlparse
from module.plugins.internal.XFSHoster import XFSHoster
def decode_cloudflare_email(value):
email = ""
key = int(value[:2], 16)
for i in xrange(2, len(value), 2):
email += chr(int(value[i:i+2], 16) ^ key)
return email
class UpleaCom(XFSHoster):
__name__ = "UpleaCom"
__type__ = "hoster"
__version__ = "0.18"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?uplea\.com/dl/\w{15}'
__config__ = [("activated" , "bool", "Activated" , True),
("use_premium" , "bool", "Use premium account if available" , True),
("fallback" , "bool", "Fallback to free download if premium fails" , True),
("chk_filesize", "bool", "Check file size" , True),
("max_wait" , "int" , "Reconnect if waiting time is greater than minutes", 10 )]
__description__ = """Uplea.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Redleon" , None),
("GammaC0de", None)]
PLUGIN_DOMAIN = "uplea.com"
SIZE_REPLACEMENTS = [('ko','KB'), ('mo','MB'), ('go','GB'), ('Ko','KB'), ('Mo','MB'), ('Go','GB')]
NAME_PATTERN = r'<span class="gold-text">(?P<N>.+?)</span>'
SIZE_PATTERN = r'<span class="label label-info agmd">(?P<S>[\d.,]+) (?P<U>[\w^_]+?)</span>'
OFFLINE_PATTERN = r'>You followed an invalid or expired link'
LINK_PATTERN = r'"(https?://\w+\.uplea\.com/anonym/.*?)"'
PREMIUM_ONLY_PATTERN = r'You need to have a Premium subscription to download this file'
WAIT_PATTERN = r'timeText: ?(\d+),'
STEP_PATTERN = r'<a href="(/step/.+)">'
NAME_REPLACEMENTS = [(r'(<a class="__cf_email__" .+? data-cfemail="(\w+?)".+)',
lambda x: decode_cloudflare_email(x.group(2)))]
def setup(self):
self.multiDL = False
self.chunk_limit = 1
self.resume_download = True
def handle_free(self, pyfile):
m = re.search(self.STEP_PATTERN, self.data)
if m is None:
self.error(_("STEP_PATTERN not found"))
self.data = self.load(urlparse.urljoin("http://uplea.com/", m.group(1)))
m = re.search(self.WAIT_PATTERN, self.data)
if m:
self.wait(m.group(1), True)
self.retry()
m = re.search(self.LINK_PATTERN, self.data)
if m is None:
self.error(_("LINK_PATTERN not found"))
self.link = m.group(1)
m = re.search(r".ulCounter\({'timer':(\d+)}\)", self.data)
if m:
self.wait(m.group(1))
| # -*- coding: utf-8 -*-
import re
import urlparse
from module.plugins.internal.XFSHoster import XFSHoster
class UpleaCom(XFSHoster):
__name__ = "UpleaCom"
__type__ = "hoster"
__version__ = "0.17"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?uplea\.com/dl/\w{15}'
__config__ = [("activated" , "bool", "Activated" , True),
("use_premium" , "bool", "Use premium account if available" , True),
("fallback" , "bool", "Fallback to free download if premium fails" , True),
("chk_filesize", "bool", "Check file size" , True),
("max_wait" , "int" , "Reconnect if waiting time is greater than minutes", 10 )]
__description__ = """Uplea.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Redleon" , None),
("GammaC0de", None)]
PLUGIN_DOMAIN = "uplea.com"
SIZE_REPLACEMENTS = [('ko','KB'), ('mo','MB'), ('go','GB'), ('Ko','KB'), ('Mo','MB'), ('Go','GB')]
NAME_PATTERN = r'<span class="gold-text">(?P<N>.+?)</span>'
SIZE_PATTERN = r'<span class="label label-info agmd">(?P<S>[\d.,]+) (?P<U>[\w^_]+?)</span>'
OFFLINE_PATTERN = r'>You followed an invalid or expired link'
LINK_PATTERN = r'"(https?://\w+\.uplea\.com/anonym/.*?)"'
PREMIUM_ONLY_PATTERN = r'You need to have a Premium subscription to download this file'
WAIT_PATTERN = r'timeText: ?(\d+),'
STEP_PATTERN = r'<a href="(/step/.+)">'
def setup(self):
self.multiDL = False
self.chunk_limit = 1
self.resume_download = True
def handle_free(self, pyfile):
m = re.search(self.STEP_PATTERN, self.data)
if m is None:
self.error(_("STEP_PATTERN not found"))
self.data = self.load(urlparse.urljoin("http://uplea.com/", m.group(1)))
m = re.search(self.WAIT_PATTERN, self.data)
if m:
self.wait(m.group(1), True)
self.retry()
m = re.search(self.LINK_PATTERN, self.data)
if m is None:
self.error(_("LINK_PATTERN not found"))
self.link = m.group(1)
m = re.search(r".ulCounter\({'timer':(\d+)}\)", self.data)
if m:
self.wait(m.group(1))
| Python | 0 |
f9e63022eb975c131bef86a81655885ea0563857 | Capitalise constants | saau/sections/geology/elevation.py | saau/sections/geology/elevation.py | # geology-elevation1
from os.path import basename
import cartopy.crs as ccrs
from ..image_provider import ImageProvider
from ...utils.download import get_binary
from ...utils.shape import shape_from_zip
URL = 'http://www.ga.gov.au/corporate_data/48006/48006_shp.zip'
FILENAME = basename(URL)
class ElevationImageProvider(ImageProvider):
def has_required_data(self):
return self.data_dir_exists(FILENAME)
def obtain_data(self):
return get_binary(URL, self.data_dir_join(FILENAME))
def build_image(self):
shp = shape_from_zip(self.data_dir_join(FILENAME))
aus_map = self.services.aus_map.get_map()
aus_map.add_geometries(
[rec.geometry for rec in shp.records()],
crs=ccrs.PlateCarree()
)
return aus_map
| # geology-elevation1
from os.path import basename
import cartopy.crs as ccrs
from ..image_provider import ImageProvider
from ...utils.download import get_binary
from ...utils.shape import shape_from_zip
url = 'http://www.ga.gov.au/corporate_data/48006/48006_shp.zip'
filename = basename(url)
class ElevationImageProvider(ImageProvider):
def has_required_data(self):
return self.data_dir_exists(filename)
def obtain_data(self):
return get_binary(url, self.data_dir_join(filename))
def build_image(self):
shp = shape_from_zip(self.data_dir_join(filename))
aus_map = self.services.aus_map.get_map()
aus_map.add_geometries(
[rec.geometry for rec in shp.records()],
crs=ccrs.PlateCarree()
)
return aus_map
| Python | 0.999886 |
327fcfd4c6b0ad10b25c286f271c577afd741099 | set width for login details to 50 chars. | Source/Hg/wb_hg_credential_dialogs.py | Source/Hg/wb_hg_credential_dialogs.py | '''
====================================================================
Copyright (c) 2016 Barry A Scott. All rights reserved.
This software is licensed as described in the file LICENSE.txt,
which you should have received as part of this distribution.
====================================================================
wb_hg_credential_dialogs.py
'''
from PyQt5 import QtWidgets
from PyQt5 import QtGui
from PyQt5 import QtCore
import wb_dialog_bases
class WbHgGetLoginDialog(wb_dialog_bases.WbDialog):
def __init__( self, parent, url, realm ):
super().__init__( parent )
self.setWindowTitle( T_('Mercurial Credentials') )
self.username = QtWidgets.QLineEdit( '' )
self.password = QtWidgets.QLineEdit()
self.password.setEchoMode( self.password.Password )
self.username.textChanged.connect( self.nameTextChanged )
self.password.textChanged.connect( self.nameTextChanged )
em = self.fontMetrics().width( 'M' )
self.addRow( T_('URL'), url )
self.addRow( T_('Realm'), realm )
self.addRow( T_('Username'), self.username, min_width=50*em )
self.addRow( T_('Password'), self.password )
self.addButtons()
def completeInit( self ):
# set focus
self.username.setFocus()
def nameTextChanged( self, text ):
self.ok_button.setEnabled( self.getUsername() != '' and self.getPassword() != '' )
def getUsername( self ):
return self.username.text().strip()
def getPassword( self ):
return self.password.text().strip()
| '''
====================================================================
Copyright (c) 2016 Barry A Scott. All rights reserved.
This software is licensed as described in the file LICENSE.txt,
which you should have received as part of this distribution.
====================================================================
wb_hg_credential_dialogs.py
'''
from PyQt5 import QtWidgets
from PyQt5 import QtGui
from PyQt5 import QtCore
import wb_dialog_bases
class WbHgGetLoginDialog(wb_dialog_bases.WbDialog):
def __init__( self, parent, url, realm ):
super().__init__( parent )
self.setWindowTitle( T_('Mercurial Credentials') )
self.username = QtWidgets.QLineEdit( '' )
self.password = QtWidgets.QLineEdit()
self.password.setEchoMode( self.password.Password )
self.username.textChanged.connect( self.nameTextChanged )
self.password.textChanged.connect( self.nameTextChanged )
em = self.fontMetrics().width( 'M' )
self.username.setMinimumWidth( 50*em )
self.addRow( T_('URL'), url )
self.addRow( T_('Realm'), realm )
self.addRow( T_('Username'), self.username )
self.addRow( T_('Password'), self.password )
self.addButtons()
def completeInit( self ):
# set focus
self.username.setFocus()
def nameTextChanged( self, text ):
self.ok_button.setEnabled( self.getUsername() != '' and self.getPassword() != '' )
def getUsername( self ):
return self.username.text().strip()
def getPassword( self ):
return self.password.text().strip()
| Python | 0 |
bd66185722417cfc24f348b7538e189636c75352 | Fix full node in VoresourceRendererMixin | daiquiri/core/renderers/voresource.py | daiquiri/core/renderers/voresource.py | from datetime import datetime
from . import XMLRenderer
from .vosi import CapabilitiesRendererMixin, TablesetRendererMixin
class VoresourceRendererMixin(CapabilitiesRendererMixin, TablesetRendererMixin):
def render_voresource(self, metadata):
self.start('ri:Resource', {
'created': self.render_date(metadata.get('created')),
'updated': self.render_date(metadata.get('updated')),
'status': metadata.get('status'),
'xsi:type': metadata.get('type'),
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'xmlns:ri': 'http://www.ivoa.net/xml/RegistryInterface/v1.0',
'xmlns:vg': 'http://www.ivoa.net/xml/VORegistry/v1.0',
'xmlns:vr': 'http://www.ivoa.net/xml/VOResource/v1.0',
'xmlns:vs': 'http://www.ivoa.net/xml/VODataService/v1.1',
'xsi:schemaLocation': 'http://www.ivoa.net/xml/RegistryInterface/v1.0 http://www.ivoa.net/xml/VORegistry/v1.0 http://www.ivoa.net/xml/VOResource/v1.0 http://www.ivoa.net/xml/VODataService/v1.1'
})
self.node('title', {}, metadata.get('title'))
self.node('identifier', {}, metadata.get('identifier'))
if metadata.get('short_name'):
self.node('shortName', {}, metadata.get('short_name'))
self.render_curation(metadata.get('curation', {}))
self.render_content(metadata.get('content', {}))
for capability in metadata.get('capabilities', []):
self.render_capability(capability)
tableset = metadata.get('tableset', [])
if tableset:
self.start('tableset')
self.render_tableset(tableset)
self.end('tableset')
rights = metadata.get('rights')
if rights:
self.node('rights', {}, metadata.get('rights'))
self.node('full', {}, metadata.get('full', 'false'))
managed_authority = metadata.get('managed_authority')
if managed_authority:
self.node('managedAuthority', {}, managed_authority)
managing_org = metadata.get('managing_org')
if managing_org:
self.node('managingOrg', {}, managing_org)
self.end('ri:Resource')
def render_curation(self, curation_metadata):
self.start('curation')
self.node('publisher', {}, curation_metadata.get('publisher'))
creator = curation_metadata.get('creator')
if creator:
self.start('creator')
self.node('name', {}, creator.get('name'))
self.node('logo', {}, creator.get('logo'))
self.end('creator')
self.node('date', {'role': 'updated'}, self.render_date(curation_metadata.get('date')))
contact = curation_metadata.get('contact')
if contact:
self.start('contact')
self.node('name', {}, contact.get('name'))
self.node('address', {}, contact.get('address'))
self.node('email', {}, contact.get('email'))
self.node('telephone', {}, contact.get('telephone'))
self.end('contact')
self.end('curation')
def render_content(self, content_metadata):
self.start('content')
for subject in content_metadata.get('subjects', []):
self.node('subject', {}, subject)
self.node('description', {}, content_metadata.get('description'))
self.node('referenceURL', {}, content_metadata.get('referenceURL'))
self.node('type', {}, content_metadata.get('type'))
self.end('content')
def render_date(self, date):
return datetime.strptime(date, '%Y-%m-%d').strftime('%Y-%m-%dT%H:%M:%SZ')
class VoresourceRenderer(VoresourceRendererMixin, XMLRenderer):
def render_document(self, data, accepted_media_type=None, renderer_context=None):
self.render_voresource(data)
| from datetime import datetime
from . import XMLRenderer
from .vosi import CapabilitiesRendererMixin, TablesetRendererMixin
class VoresourceRendererMixin(CapabilitiesRendererMixin, TablesetRendererMixin):
def render_voresource(self, metadata):
self.start('ri:Resource', {
'created': self.render_date(metadata.get('created')),
'updated': self.render_date(metadata.get('updated')),
'status': metadata.get('status'),
'xsi:type': metadata.get('type'),
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'xmlns:ri': 'http://www.ivoa.net/xml/RegistryInterface/v1.0',
'xmlns:vg': 'http://www.ivoa.net/xml/VORegistry/v1.0',
'xmlns:vr': 'http://www.ivoa.net/xml/VOResource/v1.0',
'xmlns:vs': 'http://www.ivoa.net/xml/VODataService/v1.1',
'xsi:schemaLocation': 'http://www.ivoa.net/xml/RegistryInterface/v1.0 http://www.ivoa.net/xml/VORegistry/v1.0 http://www.ivoa.net/xml/VOResource/v1.0 http://www.ivoa.net/xml/VODataService/v1.1'
})
self.node('title', {}, metadata.get('title'))
self.node('identifier', {}, metadata.get('identifier'))
if metadata.get('short_name'):
self.node('shortName', {}, metadata.get('short_name'))
self.render_curation(metadata.get('curation', {}))
self.render_content(metadata.get('content', {}))
for capability in metadata.get('capabilities', []):
self.render_capability(capability)
tableset = metadata.get('tableset', [])
if tableset:
self.start('tableset')
self.render_tableset(tableset)
self.end('tableset')
rights = metadata.get('rights')
if rights:
self.node('rights', {}, metadata.get('rights'))
full = metadata.get('full')
if full:
self.node('full', {}, metadata.get('full'))
managed_authority = metadata.get('managed_authority')
if managed_authority:
self.node('managedAuthority', {}, managed_authority)
managing_org = metadata.get('managing_org')
if managing_org:
self.node('managingOrg', {}, managing_org)
self.end('ri:Resource')
def render_curation(self, curation_metadata):
self.start('curation')
self.node('publisher', {}, curation_metadata.get('publisher'))
creator = curation_metadata.get('creator')
if creator:
self.start('creator')
self.node('name', {}, creator.get('name'))
self.node('logo', {}, creator.get('logo'))
self.end('creator')
self.node('date', {'role': 'updated'}, self.render_date(curation_metadata.get('date')))
contact = curation_metadata.get('contact')
if contact:
self.start('contact')
self.node('name', {}, contact.get('name'))
self.node('address', {}, contact.get('address'))
self.node('email', {}, contact.get('email'))
self.node('telephone', {}, contact.get('telephone'))
self.end('contact')
self.end('curation')
def render_content(self, content_metadata):
self.start('content')
for subject in content_metadata.get('subjects', []):
self.node('subject', {}, subject)
self.node('description', {}, content_metadata.get('description'))
self.node('referenceURL', {}, content_metadata.get('referenceURL'))
self.node('type', {}, content_metadata.get('type'))
self.end('content')
def render_date(self, date):
return datetime.strptime(date, '%Y-%m-%d').strftime('%Y-%m-%dT%H:%M:%SZ')
class VoresourceRenderer(VoresourceRendererMixin, XMLRenderer):
def render_document(self, data, accepted_media_type=None, renderer_context=None):
self.render_voresource(data)
| Python | 0.000001 |
fe7d5ec956f0277d0689dec57d9e145fcd19f79f | Modify svm | mnist_svm.py | mnist_svm.py | import numpy as np
import matplotlib.pyplot as plt
GRAY_SCALE_RANGE = 255
import pickle
data_filename = 'data_deskewed.pkl'
print('Loading data from file \'' + data_filename + '\' ...')
with open(data_filename, 'rb') as f:
train_labels = pickle.load(f)
train_images = pickle.load(f)
test_labels = pickle.load(f)
test_images = pickle.load(f)
num_pixel = pickle.load(f)
print('Data loading complete.')
train_images = np.array(train_images)
train_images.resize(train_images.size // num_pixel, num_pixel)
test_images = np.array(test_images)
test_images.resize(test_images.size // num_pixel, num_pixel)
test_labels = np.array(test_labels)
train_labels = np.array(train_labels)
## normalization
train_images = train_images / GRAY_SCALE_RANGE
test_images = test_images / GRAY_SCALE_RANGE
from sklearn import svm, metrics
# clf = svm.SVC(gamma = 0.001)
clf = svm.SVC(kernel = 'linear')
clf.fit(train_images[:1000], train_labels[:1000])
prediction = clf.predict(test_images)
print("Classification report for classifier %s:\n%s\n"
% (clf, metrics.classification_report(test_labels, prediction)))
print("Confusion matrix:\n%s" % metrics.confusion_matrix(test_labels, prediction)) | import numpy as np
import matplotlib.pyplot as plt
GRAY_SCALE_RANGE = 255
import pickle
data_filename = 'data_deskewed.pkl'
print('Loading data from file \'' + data_filename + '\' ...')
with open(data_filename, 'rb') as f:
train_labels = pickle.load(f)
train_images = pickle.load(f)
test_labels = pickle.load(f)
test_images = pickle.load(f)
num_pixel = pickle.load(f)
print('Data loading complete.')
train_images = np.array(train_images)
train_images.resize(train_images.size // num_pixel, num_pixel)
test_images = np.array(test_images)
test_images.resize(test_images.size // num_pixel, num_pixel)
test_labels = np.array(test_labels)
train_labels = np.array(train_labels)
## normalization
train_images = train_images / GRAY_SCALE_RANGE
test_images = test_images / GRAY_SCALE_RANGE
from sklearn import svm, metrics
clf = svm.SVC(gamma = 0.001)
clf.fit(train_images, train_labels)
prediction = clf.predict(test_images)
print("Classification report for classifier %s:\n%s\n"
% (clf, metrics.classification_report(test_labels, prediction)))
print("Confusion matrix:\n%s" % metrics.confusion_matrix(test_labels, prediction)) | Python | 0.000426 |
ec7411f409f07bd04778c9baf509adb10f446f10 | allow cross origin requests | mock/mock.py | mock/mock.py | import cherrypy
class MockController:
def poi(self, location):
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
with open("poi.json") as poifile:
return poifile.read()
def faq(self, location):
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
with open("faq.json") as faqfile:
return faqfile.read()
def phrasebook(self, location):
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
with open("phrasebook.json") as phrasebookfile:
return phrasebookfile.read()
def setup_routes():
d = cherrypy.dispatch.RoutesDispatcher()
d.connect('mock', '/:action/:location', controller=MockController())
dispatcher = d
return dispatcher
conf = {
'/': {
'request.dispatch': setup_routes()
}
}
if __name__ == '__main__':
app = cherrypy.tree.mount(None, config=conf)
cherrypy.config.update({'server.socket_host': '0.0.0.0'})
cherrypy.quickstart(app)
| import cherrypy
class MockController:
def poi(self, location):
with open("poi.json") as poifile:
return poifile.read()
def faq(self, location):
with open("faq.json") as faqfile:
return faqfile.read()
def phrasebook(self, location):
with open("phrasebook.json") as phrasebookfile:
return phrasebookfile.read()
def setup_routes():
d = cherrypy.dispatch.RoutesDispatcher()
d.connect('mock', '/:action/:location', controller=MockController())
dispatcher = d
return dispatcher
conf = {
'/': {
'request.dispatch': setup_routes()
}
}
if __name__ == '__main__':
app = cherrypy.tree.mount(None, config=conf)
cherrypy.config.update({'server.socket_host': '0.0.0.0'})
cherrypy.quickstart(app) | Python | 0 |
fa58cda42afaf1ed80352d9b59cf473a16706436 | work around, closes #464 | vent/helpers/paths.py | vent/helpers/paths.py | import errno
import os
import platform
from vent.api.templates import Template
class PathDirs:
""" Global path directories for vent """
def __init__(self,
base_dir=os.path.join(os.path.expanduser("~"), ".vent/"),
plugins_dir="plugins/",
meta_dir=os.path.join(os.path.expanduser("~"), ".vent")):
self.base_dir = base_dir
self.plugins_dir = base_dir + plugins_dir
self.meta_dir = meta_dir
self.init_file = base_dir+"vent.init"
# make sure the paths exists, if not create them
self.ensure_dir(self.base_dir)
self.ensure_dir(self.plugins_dir)
self.ensure_dir(self.meta_dir)
@staticmethod
def ensure_dir(path):
""" Tries to create directory, if fails, checks if path already exists """
try:
os.makedirs(path)
except OSError as e: # pragma: no cover
if e.errno == errno.EEXIST and os.path.isdir(path):
return (True, "exists")
else:
return (False, e)
return (True, path)
@staticmethod
def ensure_file(path):
""" Checks if file exists, if fails, tries to create file """
try:
exists = os.path.isfile(path)
if not exists:
with open (path, 'w+') as fname:
fname.write("initialized")
return (True, path)
return (True, "exists")
except OSError as e: # pragma: no cover
return (False, e)
def host_config(self):
""" Ensure the host configuration file exists """
if platform.system() == 'Darwin':
default_file_dir = os.path.join(os.path.expanduser("~"), "vent_files")
else:
default_file_dir = "/tmp/vent_files"
config = Template(template=os.path.join(self.base_dir, "vent.cfg"))
resp = config.section("main")
if resp[0]:
resp = config.option("main", "files")
if not resp[0]:
config.add_option("main", "files", default_file_dir)
self.ensure_dir(default_file_dir)
else:
config.add_option("main", "files", default_file_dir)
self.ensure_dir(default_file_dir)
config.write_config()
return
| import errno
import os
from vent.api.templates import Template
class PathDirs:
""" Global path directories for vent """
def __init__(self,
base_dir=os.path.join(os.path.expanduser("~"), ".vent/"),
plugins_dir="plugins/",
meta_dir=os.path.join(os.path.expanduser("~"), ".vent")):
self.base_dir = base_dir
self.plugins_dir = base_dir + plugins_dir
self.meta_dir = meta_dir
self.init_file = base_dir+"vent.init"
# make sure the paths exists, if not create them
self.ensure_dir(self.base_dir)
self.ensure_dir(self.plugins_dir)
self.ensure_dir(self.meta_dir)
@staticmethod
def ensure_dir(path):
""" Tries to create directory, if fails, checks if path already exists """
try:
os.makedirs(path)
except OSError as e: # pragma: no cover
if e.errno == errno.EEXIST and os.path.isdir(path):
return (True, "exists")
else:
return (False, e)
return (True, path)
@staticmethod
def ensure_file(path):
""" Checks if file exists, if fails, tries to create file """
try:
exists = os.path.isfile(path)
if not exists:
with open (path, 'w+') as fname:
fname.write("initialized")
return (True, path)
return (True, "exists")
except OSError as e: # pragma: no cover
return (False, e)
def host_config(self):
""" Ensure the host configuration file exists """
default_file_dir = "/tmp/vent_files"
config = Template(template=os.path.join(self.base_dir, "vent.cfg"))
resp = config.section("main")
if resp[0]:
resp = config.option("main", "files")
if not resp[0]:
config.add_option("main", "files", default_file_dir)
self.ensure_dir(default_file_dir)
else:
config.add_option("main", "files", default_file_dir)
self.ensure_dir(default_file_dir)
config.write_config()
return
| Python | 0 |
7f248f252b0a846e39c60d66485f796576b2179e | fix doctest | aoc2016/day9.py | aoc2016/day9.py | import re
def parse(lines):
return ''.join([x.strip() for x in lines])
class Marker(object):
def __init__(self, chars, repeats):
self.chars = chars
self.repeats = repeats
@classmethod
def parse(clazz, text):
"""
>>> m, rest = Marker.parse('(10x2)abc')
>>> m.chars
10
>>> m.repeats
2
>>> rest
'abc'
"""
pattern = r"\((\d+)x(\d+)\)"
m = re.match(pattern, text)
if not m:
return None, text
return Marker(int(m.group(1)), int(m.group(2))), text[len(m.group(0)):]
def take(s, n):
return s[:n], s[n:]
def decompress(compressed):
"""
>>> decompress('ADVENT')
'ADVENT'
>>> decompress('A(1x5)BC')
'ABBBBBC'
>>> decompress('(3x3)XYZ')
'XYZXYZXYZ'
"""
result = []
while compressed:
m, compressed = Marker.parse(compressed)
if m is None:
c, compressed = take(compressed, 1)
result.append(c)
else:
s, compressed = take(compressed, m.chars)
result.append(s * m.repeats)
return ''.join(result)
def decompressed_length2(compressed):
"""
>>> decompressed_length2('ADVENT')
6
>>> decompressed_length2('X(8x2)(3x3)ABCY')
20
"""
result = 0
while compressed:
m, compressed = Marker.parse(compressed)
if m is None:
c, compressed = take(compressed, 1)
result += 1
else:
s, compressed = take(compressed, m.chars)
d = decompressed_length2(s)
result += d * m.repeats
return result
def step1(input):
return len(decompress(input))
def step2(input):
return decompressed_length2(input) | import re
def parse(lines):
return ''.join([x.strip() for x in lines])
class Marker(object):
def __init__(self, chars, repeats):
self.chars = chars
self.repeats = repeats
@classmethod
def parse(clazz, text):
"""
>>> m, rest = Marker.parse('(10x2)abc')
>>> m.chars
10
>>> m.repeats
2
>>> rest
'abc'
"""
pattern = r"\((\d+)x(\d+)\)"
m = re.match(pattern, text)
if not m:
return None, text
return Marker(int(m.group(1)), int(m.group(2))), text[len(m.group(0)):]
def take(s, n):
return s[:n], s[n:]
def decompress(compressed):
"""
>>> decompress('ADVENT')
'ADVENT'
>>> decompress('A(1x5)BC')
'ABBBBBC'
>>> decompress('(3x3)XYZ')
'XYZXYZXYZ'
"""
result = []
while compressed:
m, compressed = Marker.parse(compressed)
if m is None:
c, compressed = take(compressed, 1)
result.append(c)
else:
s, compressed = take(compressed, m.chars)
result.append(s * m.repeats)
return ''.join(result)
def decompressed_length2(compressed):
"""
>>> decompress2('ADVENT')
'ADVENT'
>>> decompress2('X(8x2)(3x3)ABCY')
'XABCABCABCABCABCABCY'
"""
result = 0
while compressed:
m, compressed = Marker.parse(compressed)
if m is None:
c, compressed = take(compressed, 1)
result += 1
else:
s, compressed = take(compressed, m.chars)
d = decompressed_length2(s)
result += d * m.repeats
return result
def step1(input):
return len(decompress(input))
def step2(input):
return decompressed_length2(input) | Python | 0.000001 |
cef46656955cca0a5b0a83487418cc733a79e52b | fix profile url (#849) | open_discussions/urls.py | open_discussions/urls.py | """project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from rest_framework_jwt.views import refresh_jwt_token
from open_discussions.views import index, saml_metadata
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^status/', include('server_status.urls')),
url(r'', include('authentication.urls')),
url(r'', include('social_django.urls', namespace='social')),
url(r'', include('channels.urls')),
url(r'', include('profiles.urls')),
url(r'', include('notifications.urls')),
url(r'', include('embedly.urls')),
url(r'^api/token/refresh/', refresh_jwt_token),
# React App
url(r'^$', index, name='open_discussions-index'),
url(r'^auth_required/$', index),
url(r'^content_policy/$', index),
url( # so that we can use reverse() to link to this
r'^channel/(?P<channel_name>[A-Za-z0-9_]+)/(?P<post_id>[A-Za-z0-9_]+)/comment/(?P<comment_id>[A-Za-z0-9_]+)/$',
index,
name='channel-post-comment',
),
url( # so that we can use reverse() to link to this
r'^channel/(?P<channel_name>[A-Za-z0-9_]+)/(?P<post_id>[A-Za-z0-9_]+)/$',
index,
name='channel-post',
),
url( # so that we can use reverse() to link to this
r'^channel/(?P<channel_name>[A-Za-z0-9_]+)/$',
index,
name='channel',
),
url(r'^settings/(?P<token>[^/]+)/$', index, name='settings-anon'),
url(r'^channel/', index),
url(r'^manage/', index),
url(r'^create_post/', index),
url(r'^moderation/', index),
url(r'^settings/', index),
url(r'^saml/metadata/', saml_metadata, name='saml-metadata'),
url(r'^profile/[A-Za-z0-9_]+/', index, name='profile'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
import debug_toolbar # pylint: disable=wrong-import-position, wrong-import-order
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| """project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from rest_framework_jwt.views import refresh_jwt_token
from open_discussions.views import index, saml_metadata
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^status/', include('server_status.urls')),
url(r'', include('authentication.urls')),
url(r'', include('social_django.urls', namespace='social')),
url(r'', include('channels.urls')),
url(r'', include('profiles.urls')),
url(r'', include('notifications.urls')),
url(r'', include('embedly.urls')),
url(r'^api/token/refresh/', refresh_jwt_token),
# React App
url(r'^$', index, name='open_discussions-index'),
url(r'^auth_required/$', index),
url(r'^content_policy/$', index),
url( # so that we can use reverse() to link to this
r'^channel/(?P<channel_name>[A-Za-z0-9_]+)/(?P<post_id>[A-Za-z0-9_]+)/comment/(?P<comment_id>[A-Za-z0-9_]+)/$',
index,
name='channel-post-comment',
),
url( # so that we can use reverse() to link to this
r'^channel/(?P<channel_name>[A-Za-z0-9_]+)/(?P<post_id>[A-Za-z0-9_]+)/$',
index,
name='channel-post',
),
url( # so that we can use reverse() to link to this
r'^channel/(?P<channel_name>[A-Za-z0-9_]+)/$',
index,
name='channel',
),
url(r'^settings/(?P<token>[^/]+)/$', index, name='settings-anon'),
url(r'^channel/', index),
url(r'^manage/', index),
url(r'^create_post/', index),
url(r'^moderation/', index),
url(r'^settings/', index),
url(r'^saml/metadata/', saml_metadata, name='saml-metadata'),
url(r'^profile/(?P<channel_name>[A-Za-z0-9_]+)/', index, name='profile'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
import debug_toolbar # pylint: disable=wrong-import-position, wrong-import-order
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| Python | 0 |
6b8de33dbd50243d566e095005699f0611a38d8b | add new fail message during commit | netmiko/vyos/vyos_ssh.py | netmiko/vyos/vyos_ssh.py | from __future__ import print_function
from __future__ import unicode_literals
import time
from netmiko.cisco_base_connection import CiscoSSHConnection
class VyOSSSH(CiscoSSHConnection):
"""Implement methods for interacting with VyOS network devices."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read()
self.set_base_prompt()
self.disable_paging(command="set terminal length 0")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def check_enable_mode(self, *args, **kwargs):
"""No enable mode on VyOS."""
pass
def enable(self, *args, **kwargs):
"""No enable mode on VyOS."""
pass
def exit_enable_mode(self, *args, **kwargs):
"""No enable mode on VyOS."""
pass
def check_config_mode(self, check_string='#'):
"""Checks if the device is in configuration mode"""
return super(VyOSSSH, self).check_config_mode(check_string=check_string)
def config_mode(self, config_command='configure', pattern=r'[edit]'):
"""Enter configuration mode."""
return super(VyOSSSH, self).config_mode(config_command=config_command, pattern=pattern)
def exit_config_mode(self, exit_config='exit', pattern=r'exit'):
"""Exit configuration mode"""
output = ""
if self.check_config_mode():
output = self.send_command_timing(exit_config, strip_prompt=False, strip_command=False)
if 'Cannot exit: configuration modified' in output:
output += self.send_command_timing('exit discard', strip_prompt=False,
strip_command=False)
if self.check_config_mode():
raise ValueError("Failed to exit configuration mode")
return output
def commit(self, comment='', delay_factor=.1):
"""
Commit the candidate configuration.
Commit the entered configuration. Raise an error and return the failure
if the commit fails.
default:
command_string = commit
comment:
command_string = commit comment <comment>
"""
delay_factor = self.select_delay_factor(delay_factor)
error_marker = ['Failed to generate committed config', 'Commit failed']
command_string = 'commit'
if comment:
command_string += ' comment "{}"'.format(comment)
output = self.config_mode()
output += self.send_command_expect(command_string, strip_prompt=False,
strip_command=False, delay_factor=delay_factor)
if any(x in output for x in error_marker):
raise ValueError('Commit failed with following errors:\n\n{}'.format(output))
return output
def set_base_prompt(self, pri_prompt_terminator='$', alt_prompt_terminator='#',
delay_factor=1):
"""Sets self.base_prompt: used as delimiter for stripping of trailing prompt in output."""
prompt = super(VyOSSSH, self).set_base_prompt(pri_prompt_terminator=pri_prompt_terminator,
alt_prompt_terminator=alt_prompt_terminator,
delay_factor=delay_factor)
# Set prompt to user@hostname (remove two additional characters)
self.base_prompt = prompt[:-2].strip()
return self.base_prompt
def send_config_set(self, config_commands=None, exit_config_mode=False, delay_factor=1,
max_loops=150, strip_prompt=False, strip_command=False,
config_mode_command=None):
"""Remain in configuration mode."""
return super(VyOSSSH, self).send_config_set(config_commands=config_commands,
exit_config_mode=exit_config_mode,
delay_factor=delay_factor, max_loops=max_loops,
strip_prompt=strip_prompt,
strip_command=strip_command,
config_mode_command=config_mode_command)
def save_config(self, cmd='', confirm=True, confirm_response=''):
"""Not Implemented"""
raise NotImplementedError
| from __future__ import print_function
from __future__ import unicode_literals
import time
from netmiko.cisco_base_connection import CiscoSSHConnection
class VyOSSSH(CiscoSSHConnection):
"""Implement methods for interacting with VyOS network devices."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read()
self.set_base_prompt()
self.disable_paging(command="set terminal length 0")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def check_enable_mode(self, *args, **kwargs):
"""No enable mode on VyOS."""
pass
def enable(self, *args, **kwargs):
"""No enable mode on VyOS."""
pass
def exit_enable_mode(self, *args, **kwargs):
"""No enable mode on VyOS."""
pass
def check_config_mode(self, check_string='#'):
"""Checks if the device is in configuration mode"""
return super(VyOSSSH, self).check_config_mode(check_string=check_string)
def config_mode(self, config_command='configure', pattern=r'[edit]'):
"""Enter configuration mode."""
return super(VyOSSSH, self).config_mode(config_command=config_command, pattern=pattern)
def exit_config_mode(self, exit_config='exit', pattern=r'exit'):
"""Exit configuration mode"""
output = ""
if self.check_config_mode():
output = self.send_command_timing(exit_config, strip_prompt=False, strip_command=False)
if 'Cannot exit: configuration modified' in output:
output += self.send_command_timing('exit discard', strip_prompt=False,
strip_command=False)
if self.check_config_mode():
raise ValueError("Failed to exit configuration mode")
return output
def commit(self, comment='', delay_factor=.1):
"""
Commit the candidate configuration.
Commit the entered configuration. Raise an error and return the failure
if the commit fails.
default:
command_string = commit
comment:
command_string = commit comment <comment>
"""
delay_factor = self.select_delay_factor(delay_factor)
error_marker = 'Failed to generate committed config'
command_string = 'commit'
if comment:
command_string += ' comment "{}"'.format(comment)
output = self.config_mode()
output += self.send_command_expect(command_string, strip_prompt=False,
strip_command=False, delay_factor=delay_factor)
if error_marker in output:
raise ValueError('Commit failed with following errors:\n\n{}'.format(output))
return output
def set_base_prompt(self, pri_prompt_terminator='$', alt_prompt_terminator='#',
delay_factor=1):
"""Sets self.base_prompt: used as delimiter for stripping of trailing prompt in output."""
prompt = super(VyOSSSH, self).set_base_prompt(pri_prompt_terminator=pri_prompt_terminator,
alt_prompt_terminator=alt_prompt_terminator,
delay_factor=delay_factor)
# Set prompt to user@hostname (remove two additional characters)
self.base_prompt = prompt[:-2].strip()
return self.base_prompt
def send_config_set(self, config_commands=None, exit_config_mode=False, delay_factor=1,
max_loops=150, strip_prompt=False, strip_command=False,
config_mode_command=None):
"""Remain in configuration mode."""
return super(VyOSSSH, self).send_config_set(config_commands=config_commands,
exit_config_mode=exit_config_mode,
delay_factor=delay_factor, max_loops=max_loops,
strip_prompt=strip_prompt,
strip_command=strip_command,
config_mode_command=config_mode_command)
def save_config(self, cmd='', confirm=True, confirm_response=''):
"""Not Implemented"""
raise NotImplementedError
| Python | 0 |
eaae2a1e88572e224621e242be1d15e92065f15e | Use new extension setup() API | mopidy_nad/__init__.py | mopidy_nad/__init__.py | from __future__ import unicode_literals
import os
import pygst
pygst.require('0.10')
import gst
import gobject
from mopidy import config, ext
__version__ = '1.0.0'
class Extension(ext.Extension):
dist_name = 'Mopidy-NAD'
ext_name = 'nad'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def setup(self, registry):
from .mixer import NadMixer
gobject.type_register(NadMixer)
gst.element_register(NadMixer, 'nadmixer', gst.RANK_MARGINAL)
| from __future__ import unicode_literals
import os
import pygst
pygst.require('0.10')
import gst
import gobject
from mopidy import config, ext
__version__ = '1.0.0'
class Extension(ext.Extension):
dist_name = 'Mopidy-NAD'
ext_name = 'nad'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def register_gstreamer_elements(self):
from .mixer import NadMixer
gobject.type_register(NadMixer)
gst.element_register(NadMixer, 'nadmixer', gst.RANK_MARGINAL)
| Python | 0 |
daf577f1e4bab13f9d5f2e3fdad8765dbab70dfe | refactor settings | openstax/settings/dev.py | openstax/settings/dev.py | from .base import *
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# BASE_URL required for notification emails
BASE_URL = 'http://localhost:8000'
try:
from .local import *
except ImportError:
pass
##################################
# OVERRIDE ACCOUNTS SETTINGS #
##################################
# use default loging and logout urls,
# Needed for selenium test.
ACC_APP_LOGIN_URL = None
ACC_APP_LOGOUT_URL = None
ACC_APP_PROFILE_URL = None
| from .base import *
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# BASE_URL required for notification emails
BASE_URL = 'http://localhost:8000'
try:
from .local import *
except ImportError:
pass
##################################
# ACCOUNTS SETTINGS #
##################################
# Use default login, logout and profile urls
ACC_APP_LOGIN_URL = None
ACC_APP_LOGOUT_URL = None
ACC_APP_PROFILE_URL = None
ACCOUNTS_LOGIN_URL = 'https://accounts-qa.openstax.org/login?'
AUTHORIZATION_URL = 'https://accounts-qa.openstax.org/oauth/authorize'
ACCESS_TOKEN_URL = 'https://accounts-qa.openstax.org/oauth/token'
USER_QUERY = 'https://accounts-qa.openstax.org/api/user?'
SOCIAL_AUTH_OPENSTAX_KEY = '0a3c6b8c21091873805181b4b2a42cdbabeec6f6871332b817f59fac37033537'
SOCIAL_AUTH_OPENSTAX_SECRET = '40035a7f2a7948b33ffce370af3918d692b958a6cc195e8b57b1fbe621a88157'
| Python | 0.000002 |
945c93fa91cb7b3b14f002e37e2a8bd2ee915fdd | Clean the mako cache between runs, because it breaks theme switching | nikola/mako_templates.py | nikola/mako_templates.py | ########################################
# Mako template handlers
########################################
import os
import shutil
from mako import util, lexer
from mako.lookup import TemplateLookup
lookup = None
cache = {}
def get_deps(filename):
text = util.read_file(filename)
lex = lexer.Lexer(text=text, filename=filename)
lex.parse()
deps = []
for n in lex.template.nodes:
if getattr(n, 'keyword', None) == "inherit":
deps.append(n.attributes['file'])
# TODO: include tags are not handled
return deps
def get_template_lookup(directories):
cache_dir = os.path.join('cache', '.mako.tmp')
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
return TemplateLookup(
directories=directories,
module_directory=cache_dir,
output_encoding='utf-8',
)
def render_template(template_name, output_name, context, global_context):
template = lookup.get_template(template_name)
context.update(global_context)
data = template.render_unicode(**context)
if output_name is not None:
try:
os.makedirs(os.path.dirname(output_name))
except:
pass
with open(output_name, 'w+') as output:
output.write(data)
return data
def template_deps(template_name):
# We can cache here because depedencies should
# not change between runs
if cache.get(template_name, None) is None:
template = lookup.get_template(template_name)
dep_filenames = get_deps(template.filename)
deps = [template.filename]
for fname in dep_filenames:
deps += template_deps(fname)
cache[template_name] = tuple(deps)
return list(cache[template_name])
| ########################################
# Mako template handlers
########################################
import os
import shutil
from mako import util, lexer
from mako.lookup import TemplateLookup
lookup = None
cache = {}
def get_deps(filename):
text = util.read_file(filename)
lex = lexer.Lexer(text=text, filename=filename)
lex.parse()
deps = []
for n in lex.template.nodes:
if getattr(n, 'keyword', None) == "inherit":
deps.append(n.attributes['file'])
# TODO: include tags are not handled
return deps
def get_template_lookup(directories):
print "Directories:", directories
cache_dir = os.path.join('cache', '.mako.tmp')
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
return TemplateLookup(
directories=directories,
module_directory=cache_dir,
output_encoding='utf-8',
)
def render_template(template_name, output_name, context, global_context):
template = lookup.get_template(template_name)
print template.filename
context.update(global_context)
data = template.render_unicode(**context)
if output_name is not None:
try:
os.makedirs(os.path.dirname(output_name))
except:
pass
with open(output_name, 'w+') as output:
output.write(data)
return data
def template_deps(template_name):
# We can cache here because depedencies should
# not change between runs
if cache.get(template_name, None) is None:
template = lookup.get_template(template_name)
dep_filenames = get_deps(template.filename)
deps = [template.filename]
for fname in dep_filenames:
deps += template_deps(fname)
cache[template_name] = tuple(deps)
return list(cache[template_name])
| Python | 0 |
c21d7bee740fe27012d9affed27b6c489e5f6cac | add logging types | avalanche/evaluation/metric_results.py | avalanche/evaluation/metric_results.py | ################################################################################
# Copyright (c) 2021 ContinualAI. #
# Copyrights licensed under the MIT License. #
# See the accompanying LICENSE file for terms. #
# #
# Date: 30-12-2020 #
# Author(s): Lorenzo Pellegrini #
# E-mail: contact@continualai.org #
# Website: www.continualai.org #
################################################################################
from dataclasses import dataclass
from typing import List, Optional, TYPE_CHECKING, Tuple, Union
from PIL.Image import Image
from matplotlib.figure import Figure
from torch import Tensor
from enum import Enum
if TYPE_CHECKING:
from .metric_definitions import Metric
MetricResult = Optional[List["MetricValue"]]
class LoggingType(Enum):
"""A type for MetricValues.
It can be used by MetricValues to choose how they want to be visualize.
For example, a 2D tensor could be a line plot or be used to create a
histogram.
"""
ANY = 1 # generic type. The logger will use the value type to decide how
# to serialize it.
IMAGE = 2
FIGURE = 3 # Matplotlib figure.
HISTOGRAM = 4
# you can add others here. All Tensorboard metrics are good candidates:
# https://pytorch.org/docs/stable/tensorboard.html
# just remember to add explicit support to the loggers once you add them.
# If a metric is already printed correctly by the loggers (e.g. scalars)
# there is no need to add it here.
@dataclass
class TensorImage:
image: Tensor
def __array__(self):
return self.image.numpy()
MetricType = Union[float, int, Tensor, Image, TensorImage, Figure]
class AlternativeValues:
"""
A container for alternative representations of the same metric value.
"""
def __init__(self, *alternatives: MetricType):
self.alternatives: Tuple[MetricType] = alternatives
def best_supported_value(
self, *supported_types: type
) -> Optional[MetricType]:
"""
Retrieves a supported representation for this metric value.
:param supported_types: A list of supported value types.
:return: The best supported representation. Returns None if no supported
representation is found.
"""
for alternative in self.alternatives:
if isinstance(alternative, supported_types):
return alternative
return None
class MetricValue(object):
"""
The result of a Metric.
A result has a name, a value and a "x" position in which the metric value
should be plotted.
The "value" field can also be an instance of "AlternativeValues", in which
case it means that alternative representations exist for this value. For
instance, the Confusion Matrix can be represented both as a Tensor and as
an Image. It's up to the Logger, according to its capabilities, decide which
representation to use.
"""
def __init__(
self,
origin: "Metric",
name: str,
value: Union[MetricType, AlternativeValues],
x_plot: int,
logging_type: LoggingType = LoggingType.ANY,
):
"""
Creates an instance of MetricValue.
:param origin: The originating Metric instance.
:param name: The display name of this value. This value roughly
corresponds to the name of the plot in which the value should
be logged.
:param value: The value of the metric. Can be a scalar value,
a PIL Image, or a Tensor. If more than a possible representation
of the same value exist, an instance of :class:`AlternativeValues`
can be passed. For instance, the Confusion Matrix can be represented
both as an Image and a Tensor, in which case an instance of
:class:`AlternativeValues` carrying both the Tensor and the Image
is more appropriate. The Logger instance will then select the most
appropriate way to log the metric according to its capabilities.
:param x_plot: The position of the value. This value roughly corresponds
to the x-axis position of the value in a plot. When logging a
singleton value, pass 0 as a value for this parameter.
:param logging_type: determines how the metric should be logged.
"""
self.origin: "Metric" = origin
self.name: str = name
self.value: Union[MetricType, AlternativeValues] = value
self.x_plot: int = x_plot
self.logging_type = logging_type
__all__ = [
"MetricType",
"MetricResult",
"AlternativeValues",
"MetricValue",
"TensorImage",
]
| ################################################################################
# Copyright (c) 2021 ContinualAI. #
# Copyrights licensed under the MIT License. #
# See the accompanying LICENSE file for terms. #
# #
# Date: 30-12-2020 #
# Author(s): Lorenzo Pellegrini #
# E-mail: contact@continualai.org #
# Website: www.continualai.org #
################################################################################
from dataclasses import dataclass
from typing import List, Optional, TYPE_CHECKING, Tuple, Union
from PIL.Image import Image
from matplotlib.figure import Figure
from torch import Tensor
if TYPE_CHECKING:
from .metric_definitions import Metric
MetricResult = Optional[List["MetricValue"]]
@dataclass
class TensorImage:
image: Tensor
def __array__(self):
return self.image.numpy()
MetricType = Union[float, int, Tensor, Image, TensorImage, Figure]
class AlternativeValues:
"""
A container for alternative representations of the same metric value.
"""
def __init__(self, *alternatives: MetricType):
self.alternatives: Tuple[MetricType] = alternatives
def best_supported_value(
self, *supported_types: type
) -> Optional[MetricType]:
"""
Retrieves a supported representation for this metric value.
:param supported_types: A list of supported value types.
:return: The best supported representation. Returns None if no supported
representation is found.
"""
for alternative in self.alternatives:
if isinstance(alternative, supported_types):
return alternative
return None
class MetricValue(object):
"""
The result of a Metric.
A result has a name, a value and a "x" position in which the metric value
should be plotted.
The "value" field can also be an instance of "AlternativeValues", in which
case it means that alternative representations exist for this value. For
instance, the Confusion Matrix can be represented both as a Tensor and as
an Image. It's up to the Logger, according to its capabilities, decide which
representation to use.
"""
def __init__(
self,
origin: "Metric",
name: str,
value: Union[MetricType, AlternativeValues],
x_plot: int,
):
"""
Creates an instance of MetricValue.
:param origin: The originating Metric instance.
:param name: The display name of this value. This value roughly
corresponds to the name of the plot in which the value should
be logged.
:param value: The value of the metric. Can be a scalar value,
a PIL Image, or a Tensor. If more than a possible representation
of the same value exist, an instance of :class:`AlternativeValues`
can be passed. For instance, the Confusion Matrix can be represented
both as an Image and a Tensor, in which case an instance of
:class:`AlternativeValues` carrying both the Tensor and the Image
is more appropriate. The Logger instance will then select the most
appropriate way to log the metric according to its capabilities.
:param x_plot: The position of the value. This value roughly corresponds
to the x-axis position of the value in a plot. When logging a
singleton value, pass 0 as a value for this parameter.
"""
self.origin: "Metric" = origin
self.name: str = name
self.value: Union[MetricType, AlternativeValues] = value
self.x_plot: int = x_plot
__all__ = [
"MetricType",
"MetricResult",
"AlternativeValues",
"MetricValue",
"TensorImage",
]
| Python | 0.000001 |
4912027d6cb0f27c736e46498231595f50a36cd3 | add cv element | mriqc/classifier/cv.py | mriqc/classifier/cv.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: oesteban
# @Date: 2015-11-19 16:44:27
# @Last Modified by: oesteban
# @Last Modified time: 2016-05-12 17:46:31
"""
MRIQC Cross-validation
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import os.path as op
from argparse import ArgumentParser
from argparse import RawTextHelpFormatter
import pandas as pd
from sklearn import svm
from sklearn.cross_validation import LeaveOneLabelOut
def main():
"""Entry point"""
parser = ArgumentParser(description='MRI Quality Control',
formatter_class=RawTextHelpFormatter)
g_input = parser.add_argument_group('Inputs')
g_input.add_argument('-X', '--in-training', action='store',
required=True)
g_input.add_argument('-y', '--in-training-labels', action='store',
required=True)
# g_outputs = parser.add_argument_group('Outputs')
opts = parser.parse_args()
with open(opts.in_training, 'r') as fileX:
X_df = pd.read_csv(fileX).sort_values(by=['subject_id'])
with open(opts.in_training_labels, 'r') as fileY:
y_df = pd.read_csv(fileY).sort_values(by=['subject_id'])
# Remove columns that are not IQMs
columns = X_df.columns.ravel().to_list()
columns.remove('subject_id')
columns.remove('session_id')
columns.remove('run_id')
# Remove failed cases from Y, append new columns to X
y_df = y_df[y_df['subject_id'].isin(X_df.subject_id)]
sites = list(y_df.site.values)
X_df['rate'] = y_df.rate.values
# Convert all samples to tuples
X = [tuple(x) for x in X_df[columns].values]
lolo = LeaveOneLabelOut(labels)
clf = svm.SVC()
clf.fit(X, list(y_df.rate.values))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: oesteban
# @Date: 2015-11-19 16:44:27
# @Last Modified by: oesteban
# @Last Modified time: 2016-05-12 17:46:31
"""
MRIQC Cross-validation
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import os.path as op
from argparse import ArgumentParser
from argparse import RawTextHelpFormatter
import pandas as pd
from sklearn import svm
def main():
"""Entry point"""
parser = ArgumentParser(description='MRI Quality Control',
formatter_class=RawTextHelpFormatter)
g_input = parser.add_argument_group('Inputs')
g_input.add_argument('-X', '--in-training', action='store',
required=True)
g_input.add_argument('-y', '--in-training-labels', action='store',
required=True)
# g_outputs = parser.add_argument_group('Outputs')
opts = parser.parse_args()
with open(opts.in_training, 'r') as fileX:
X_df = pd.read_csv(fileX).sort_values(by=['subject_id'])
with open(opts.in_training_labels, 'r') as fileY:
y_df = pd.read_csv(fileY).sort_values(by=['subject_id'])
# Remove columns that are not IQMs
columns = X_df.columns.ravel().to_list()
columns.remove('subject_id')
columns.remove('session_id')
columns.remove('run_id')
# Remove failed cases from Y, append new columns to X
y_df = y_df[y_df['subject_id'].isin(X_df.subject_id)]
X_df['site'] = y_df.site.values
X_df['rate'] = y_df.rate.values
# Convert all samples to tuples
X = [tuple(x) for x in X_df[columns].values]
clf = svm.SVC()
clf.fit(X, list(y_df.rate.values))
if __name__ == '__main__':
main()
| Python | 0.000001 |
2aeda5c12710e197282f015f7e4b8519f1d8bcc5 | Update tests.py | verification/tests.py | verification/tests.py | """
TESTS is a dict with all you tests.
Keys for this will be categories' names.
Each test is dict with
"input" -- input data for user function
"answer" -- your right answer
"explanation" -- not necessary key, it's using for additional info in animation.
"""
TESTS = {
"Basics": [
{
"input": "$5.34",
"answer": lambda:0
}
]
}
| """
TESTS is a dict with all you tests.
Keys for this will be categories' names.
Each test is dict with
"input" -- input data for user function
"answer" -- your right answer
"explanation" -- not necessary key, it's using for additional info in animation.
"""
TESTS = {
"Basics": [
{
"input": "$5.34",
"two": "$5.34"
}
]
}
| Python | 0.000001 |
711e49f0a49a45d7b7021b5c26137989883c270c | Refresh monitoring.nagios.probes.mssql and fix pylint+pep8. | monitoring/nagios/probes/mssql.py | monitoring/nagios/probes/mssql.py | # -*- coding: utf-8 -*-
# Copyright (C) Vincent BESANCON <besancon.vincent@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""MSSQL probe module."""
import logging as log
from monitoring.nagios.probes import Probe
from monitoring.nagios.plugin.exceptions import PluginError
import pymssql
logger = log.getLogger('monitoring.nagios.probes.mssql')
class ProbeMSSQL(Probe):
"""
A MS SQL Server probe.
:param host: The host to connect to.
:type host: str
:param username: Login user name.
:type username: str
:param password: Login user password.
:type password: str
:param database: Database to connect to, by default selects the database
which is set as default for specific user.
:type database: str
:param query_timeout: Query timeout in seconds, default is 30 secs.
:type query_timeout: int
:param login_timeout: Timeout for connection and login in seconds, default
is 15 secs.
:type login_timeout: int
"""
def __init__(self, host, username, password, database=None,
query_timeout=30, login_timeout=15):
super(ProbeMSSQL, self).__init__(host)
logger.debug('Establishing MS SQL server connection '
'to %s on database %s with user %s...',
host, database, username)
try:
self._db_connection = pymssql.connect(host=self._hostaddress,
user=username,
password=password,
database=database,
timeout=query_timeout,
login_timeout=login_timeout,
as_dict=True)
except pymssql.Error as e:
raise PluginError('Cannot connect to the database %s on server '
'%s !' % (database, host), "\n".join(list(e)))
def _get_cursor(self):
"""
Get connection cursor.
:return: MSSQL Connection Cursor.
:rtype: pymssql.Cursor
"""
return self._db_connection.cursor()
def execute(self, query):
"""
Execute a SQL query.
:param query: SQL query.
:type query: str
:return: pymssql.Cursor
"""
try:
cursor = self._get_cursor()
cursor.execute(query)
return cursor
except pymssql.Error as e:
raise PluginError('Error during query execution !\n'
'Query: %s' % query, e.message)
def close(self):
"""Close the connection."""
self._db_connection.close()
| # -*- coding: utf-8 -*-
#===============================================================================
# Filename : mssql
# Author : Vincent BESANCON <besancon.vincent@gmail.com>
# Description : Module that define a MS SQL Server probe.
#-------------------------------------------------------------------------------
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#===============================================================================
import logging as log
from monitoring.nagios.probes import Probe
from monitoring.nagios.plugin.exceptions import PluginError, NagiosUnknown, NagiosCritical
import pymssql
logger = log.getLogger('monitoring.nagios.probes.mssql')
class ProbeMSSQL(Probe):
"""
A MS SQL Server probe.
:param host: The host to connect to.
:type host: str
:param username: Login user name.
:type username: str
:param password: Login user password.
:type password: str
:param database: Database to connect to, by default selects the database which is set as default for specific user.
:type database: str
:param query_timeout: Query timeout in seconds, default is 30 secs.
:type query_timeout: int
:param login_timeout: Timeout for connection and login in seconds, default is 15 secs.
:type login_timeout: int
"""
def __init__(self, host, username, password, database=None, query_timeout=30, login_timeout=15):
super(ProbeMSSQL, self).__init__(host)
logger.debug('Establishing MS SQL server connection to %s on database %s with user %s...' % (host, database,
username))
try:
self._db_connection = pymssql.connect(host=self._hostaddress,
user=username,
password=password,
database=database,
timeout=query_timeout,
login_timeout=login_timeout,
as_dict=True)
except pymssql.Error as e:
raise PluginError('Cannot connect to the database %s on server %s !' % (database, host),
"\n".join(list(e)))
def _get_cursor(self):
"""
Get connection cursor.
:return: MSSQL Connection Cursor.
:rtype: pymssql.Cursor
"""
return self._db_connection.cursor()
def execute(self, query):
"""
Execute a SQL query.
:param query: SQL query.
:type query: str
:return: pymssql.Cursor
"""
try:
cursor = self._get_cursor()
cursor.execute(query)
return cursor
except pymssql.Error as e:
raise PluginError('Error during query execution !\nQuery: %s' % query, e.message)
def close(self):
"""
Close the connection.
"""
self._db_connection.close() | Python | 0 |
decab6827b5dacc21f0263af7e5d895e5a737726 | Update tests.py | verification/tests.py | verification/tests.py | """
TESTS is a dict with all you tests.
Keys for this will be categories' names.
Each test is dict with
"input" -- input data for user function
"answer" -- your right answer
"explanation" -- not necessary key, it's using for additional info in animation.
"""
TESTS = {
"Basics": [
{
"input": "$5.34",
"answer": "$5.34"
},
{
"input": "$5,34",
"answer": "$5.34"
},
{
"input": "$222,100,455.34",
"answer": "$222,100,455.34"
},
{
"input": "$222.100.455,34",
"answer": "$222,100,455.34"
},
{
"input": "$222,100,455",
"answer": "$222,100,455"
},
{
"input": "$222.100.455",
"answer": "$222,100,455"
}
],
"Extra": [
{
"input": "$4,13 + $5,24 = $9,37",
"answer": "$4.13 + $5.24 = $9.37"
},
{
"input": "$4,13 + $1.005,24 = $1.009,37",
"answer": "$4.13 + $1,005.24 = $1,009.37"
},
{
"input": "$8.000 - $8.000 = $0",
"answer": "$8,000 - $8,000 = $0"
},
{
"input": "$4.545,45 is less than $5,454.54.",
"answer": "$4,545.45 is less than $5,454.54."
},
{
"input": "$4,545.45 is less than $5.454,54.",
"answer": "$4,545.45 is less than $5,454.54."
},
{
"input": "Our movie tickets cost $12,20.",
"answer": "Our movie tickets cost $12.20."
},
{
"input": "127.255.255.255",
"answer": "127.255.255.255"
},
{
"input": ("Clayton Kershaw $31.000.000\n"
"Zack Greinke $27.000.000\n"
"Adrian Gonzalez $21.857.143\n"),
"answer": ("Clayton Kershaw $31,000,000\n"
"Zack Greinke $27,000,000\n"
"Adrian Gonzalez $21,857,143\n")
}
]
}
| """
TESTS is a dict with all you tests.
Keys for this will be categories' names.
Each test is dict with
"input" -- input data for user function
"answer" -- your right answer
"explanation" -- not necessary key, it's using for additional info in animation.
"""
TESTS = {
"Basics": [
{
"input": "$5.34",
"answer": "$5.34"
},
{
"input": "$5,34",
"answer": "$5.34"
},
{
"input": "$222,100,455.34",
"answer": "$222,100,455.34"
},
{
"input": "$222.100.455,34",
"answer": "$222,100,455.34"
},
{
"input": "$222,100,455",
"answer": "$222,100,455"
},
{
"input": "$222.100.455",
"answer": "$222,100,455"
}
],
"Extra": [
{
"input": "$4,13 + $5,24 = $9,37",
"answer": "$4.13 + $5.24 = $9.37"
},
{
"input": "$4,13 + $1.005,24 = $1.009,37",
"answer": "$4.13 + $1,005.24 = $1,009.37"
},
{
"input": "$8.000 - $8.000 = $0",
"answer": "$8,000 - $8,000 = $0"
},
{
"input": "$4.545,45 is less than $5,454.54.",
"answer": "$4,545.45 is less than $5,454.54."
},
{
"input": "$4,545.45 is less than $5.454,54.",
"answer": "$4,545.45 is less than $5,454.54."
},
{
"input": "Our movie tickets cost $12,20.",
"answer": "Our movie tickets cost $12.20."
},
{
"input": "127.255.255.255",
"answer": "127.255.255.255"
},
{
"input": ("Clayton Kershaw $31.000.000\n"
"Zack Greinker $27.000.000\n"
"Adrian Gonzalez $21.857.143\n"),
"answer": ("Clayton Kershaw $31,000,000\n"
"Zack Greinker $27,000,000\n"
"Adrian Gonzalez $21,857,143\n")
}
]
}
| Python | 0.000001 |
0626c8db3f2287d78c467c194e01cf004f0c7e78 | Convert simple-mapped results back to Series. | pandas/util/map.py | pandas/util/map.py | import numpy as np
from pandas import _tseries as lib
from pandas import notnull, Series
from functools import wraps
class repeat(object):
def __init__(self, obj):
self.obj = obj
def __getitem__(self, i):
return self.obj
class azip(object):
def __init__(self, *args):
self.cols = []
for a in args:
if np.isscalar(a):
self.cols.append(repeat(a))
else:
self.cols.append(a)
def __getitem__(self, i):
return [col[i] for col in self.cols]
def map_iter_args(arr, f, otherargs, n_otherargs, required, n_results):
'''
Substitute for np.vectorize with pandas-friendly dtype inference
Parameters
----------
arr : ndarray
f : function
Returns
-------
mapped : ndarray
'''
n = len(arr)
result = np.empty((n, n_results), dtype=object)
for i, val in enumerate(arr):
args = otherargs[i]
if notnull(val) and all(notnull(args[r]) for r in required):
result[i] = f(val, *args)
else:
result[i] = [np.nan] * n_results
return [lib.maybe_convert_objects(col, try_float=0) for col in result.T]
def auto_map(arr, f, otherargs, n_results=1, required='all'):
if all(np.isscalar(a) for a in otherargs):
res = lib.map_infer(arr, lambda v: f(v, *otherargs))
return Series(res, index=arr.index, copy=False)
n_otherargs = len(otherargs)
if required == 'all':
required = list(range(n_otherargs))
res = map_iter_args(arr, f, azip(*otherargs), n_otherargs, required, n_results)
res = [Series(col, index=arr.index, copy=False) for col in res]
if n_results == 1:
return res[0]
return res
def mapwrap(f, n_results_default=1, required='all'):
@wraps(f)
def wrapped(arr, *otherargs, n_results=None):
n_results = n_results or n_results_default
return auto_map(arr, f, otherargs, n_results, required)
return wrapped
| import numpy as np
from pandas import _tseries as lib
from pandas import notnull, Series
from functools import wraps
class repeat(object):
def __init__(self, obj):
self.obj = obj
def __getitem__(self, i):
return self.obj
class azip(object):
def __init__(self, *args):
self.cols = []
for a in args:
if np.isscalar(a):
self.cols.append(repeat(a))
else:
self.cols.append(a)
def __getitem__(self, i):
return [col[i] for col in self.cols]
def map_iter_args(arr, f, otherargs, n_otherargs, required, n_results):
'''
Substitute for np.vectorize with pandas-friendly dtype inference
Parameters
----------
arr : ndarray
f : function
Returns
-------
mapped : ndarray
'''
n = len(arr)
result = np.empty((n, n_results), dtype=object)
for i, val in enumerate(arr):
args = otherargs[i]
if notnull(val) and all(notnull(args[r]) for r in required):
result[i] = f(val, *args)
else:
result[i] = [np.nan] * n_results
return [lib.maybe_convert_objects(col, try_float=0) for col in result.T]
def auto_map(arr, f, otherargs, n_results=1, required='all'):
if all(np.isscalar(a) for a in otherargs):
return lib.map_infer(arr, lambda v: f(v, *otherargs))
n_otherargs = len(otherargs)
if required == 'all':
required = list(range(n_otherargs))
res = map_iter_args(arr, f, azip(*otherargs), n_otherargs, required, n_results)
res = [Series(col, index=arr.index, copy=False) for col in res]
if n_results == 1:
return res[0]
return res
def mapwrap(f, n_results_default=1, required='all'):
@wraps(f)
def wrapped(arr, otherargs=(), n_results=None):
n_results = n_results or n_results_default
return auto_map(arr, f, otherargs, n_results, required)
return wrapped
| Python | 0.000129 |
42f74f304d0ac404f17d6489033b6140816cb194 | Implement Stonesplinter Trogg, Burly Rockjaw Trogg, Ship's Cannon | fireplace/cards/gvg/neutral_common.py | fireplace/cards/gvg/neutral_common.py | from ..utils import *
##
# Minions
# Stonesplinter Trogg
class GVG_067:
def CARD_PLAYED(self, player, card):
if player is not self.controller and card.type == CardType.SPELL:
self.buff("GVG_067a")
class GVG_067a:
Atk = 1
# Burly Rockjaw Trogg
class GVG_068:
def CARD_PLAYED(self, player, card):
if player is not self.controller and card.type == CardType.SPELL:
self.buff("GVG_068a")
class GVG_068a:
Atk = 2
# Ship's Cannon
class GVG_075:
def OWN_MINION_SUMMONED(self, minion):
if minion.race == Race.PIRATE:
targets = self.controller.getTargets(TARGET_ENEMY_CHARACTERS)
self.hit(random.choice(targets), 2)
# Explosive Sheep
class GVG_076:
def deathrattle(self):
for target in self.game.board:
self.hit(target, 2)
# Clockwork Gnome
class GVG_082:
deathrattle = giveSparePart
# Micro Machine
class GVG_103:
def TURN_BEGIN(self, player):
# That card ID is not a mistake
self.buff(self, "GVG_076a")
# Pistons
class GVG_076a:
Atk = 1
| from ..utils import *
##
# Minions
# Explosive Sheep
class GVG_076:
def deathrattle(self):
for target in self.game.board:
self.hit(target, 2)
# Clockwork Gnome
class GVG_082:
deathrattle = giveSparePart
# Micro Machine
class GVG_103:
def TURN_BEGIN(self, player):
# That card ID is not a mistake
self.buff(self, "GVG_076a")
# Pistons
class GVG_076a:
Atk = 1
| Python | 0 |
0bd2fffcab47c79999e5bf20b881a69193855bd9 | Fix install script | dstat_plugins/__init__.py | dstat_plugins/__init__.py | import glob
import shutil
import sys
import os
import os.path
import pkg_resources as pr
def install():
destdir = sys.argv[1]
datadir = pr.resource_filename('dstat_plugins', 'plugins')
try:
os.makedirs(destdir)
except OSError:
if not os.path.isdir(destdir):
sys.stderr.write("{} could not be created and does not "
"exist.\n".format(destdir))
sys.exit(1)
for plugin in glob.glob(os.path.join(datadir, 'dstat_*')):
shutil.copy(plugin, destdir)
| import shutil
import sys
import pkg_resources as pr
def install():
destdir = sys.argv[1]
datadir = pr.resource_filename(__name__, 'plugins/dstat_mysql5_innodb.py')
shutil.copytree(datadir, destdir)
| Python | 0.000001 |
2cc505d3a3c54f3ce1e91941a905c6a298a46d05 | Fix classifiers. | narcissus.hub/setup.py | narcissus.hub/setup.py | # This file is part of Narcissus
# Copyright (C) 2011-2013 Ralph Bean
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from setuptools import setup, find_packages
import sys
f = open('README.rst')
long_description = f.read().strip()
f.close()
setup(
name='narcissus.hub',
version='0.9.0.1',
description='Hub components for Narcissus, realtime log visualization',
long_description=long_description,
license="AGPLv3+",
author='Ralph Bean',
author_email='rbean@redhat.com',
url='http://narcissus.ws/',
install_requires=[
"moksha.hub",
"pygeoip",
"geojson",
],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
namespace_packages=['narcissus'],
classifiers=[
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Topic :: Scientific/Engineering :: Visualization",
"Topic :: System :: Logging",
"Topic :: System :: Monitoring",
"Intended Audience :: System Administrators",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
],
entry_points={
'moksha.stream' : (
## Enable this to *test* narcissus. It produces random ips.
#'random_lol = narcissus.hub.producers:RandomIPProducer',
# We used to keep these in an rrd database. That was too heavy.
#'series_pro = narcissus.hub.consumers:TimeSeriesProducer',
),
'moksha.consumer': (
'raw_ip = narcissus.hub.consumers:RawIPConsumer',
'httpdlight = narcissus.hub.consumers:HttpLightConsumer',
'latlon2geo = narcissus.hub.consumers:LatLon2GeoJsonConsumer',
# We used to keep these in an rrd database. That was too heavy.
#'series_con = narcissus.hub.consumers:TimeSeriesConsumer',
),
},
)
| # This file is part of Narcissus
# Copyright (C) 2011-2013 Ralph Bean
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from setuptools import setup, find_packages
import sys
f = open('README.rst')
long_description = f.read().strip()
f.close()
setup(
name='narcissus.hub',
version='0.9.0.1',
description='Hub components for Narcissus, realtime log visualization',
long_description=long_description,
license="AGPLv3+",
author='Ralph Bean',
author_email='rbean@redhat.com',
url='http://narcissus.ws/',
install_requires=[
"moksha.hub",
"pygeoip",
"geojson",
],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
namespace_packages=['narcissus'],
classifiers=[
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Topic :: Scientific/Engineering :: Visualization"
"Topic :: System :: Logging"
"Topic :: System :: Monitoring",
"Intended Audience :: System Administrators",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
],
entry_points={
'moksha.stream' : (
## Enable this to *test* narcissus. It produces random ips.
#'random_lol = narcissus.hub.producers:RandomIPProducer',
# We used to keep these in an rrd database. That was too heavy.
#'series_pro = narcissus.hub.consumers:TimeSeriesProducer',
),
'moksha.consumer': (
'raw_ip = narcissus.hub.consumers:RawIPConsumer',
'httpdlight = narcissus.hub.consumers:HttpLightConsumer',
'latlon2geo = narcissus.hub.consumers:LatLon2GeoJsonConsumer',
# We used to keep these in an rrd database. That was too heavy.
#'series_con = narcissus.hub.consumers:TimeSeriesConsumer',
),
},
)
| Python | 0.000007 |
624599bc0172e9166536abfc6be254b5117ac64c | Add error handling in plugin installation process | nailgun/nailgun/plugin/process.py | nailgun/nailgun/plugin/process.py | # -*- coding: utf-8 -*-
import traceback
import time
from multiprocessing import Queue, Process
from sqlalchemy import update
from nailgun.api.models import Task
from nailgun.task.helpers import TaskHelper
from nailgun.logger import logger
from nailgun.db import make_session
import nailgun.plugin.manager
PLUGIN_PROCESSING_QUEUE = None
def get_queue():
global PLUGIN_PROCESSING_QUEUE
if not PLUGIN_PROCESSING_QUEUE:
PLUGIN_PROCESSING_QUEUE = Queue()
return PLUGIN_PROCESSING_QUEUE
class PluginProcessor(Process):
def __init__(self):
Process.__init__(self)
self.db = make_session()
self.plugin_manager = nailgun.plugin.manager.PluginManager(self.db)
self.queue = get_queue()
def run(self):
while True:
task_uuid = None
try:
task_uuid = self.queue.get()
self.plugin_manager.process(task_uuid)
except Exception as exc:
if task_uuid:
self.set_error(task_uuid, exc)
logger.error(traceback.format_exc())
time.sleep(2)
def set_error(self, task_uuid, msg):
self.db.query(Task).filter_by(uuid=task_uuid).update({
'status': 'error',
'progress': 100,
'msg': str(msg)})
| # -*- coding: utf-8 -*-
import traceback
import time
from multiprocessing import Queue, Process
from nailgun.task.helpers import TaskHelper
from nailgun.logger import logger
from nailgun.db import make_session
import nailgun.plugin.manager
PLUGIN_PROCESSING_QUEUE = None
def get_queue():
global PLUGIN_PROCESSING_QUEUE
if not PLUGIN_PROCESSING_QUEUE:
PLUGIN_PROCESSING_QUEUE = Queue()
return PLUGIN_PROCESSING_QUEUE
class PluginProcessor(Process):
def __init__(self):
Process.__init__(self)
self.db = make_session()
self.plugin_manager = nailgun.plugin.manager.PluginManager(self.db)
self.queue = get_queue()
def run(self):
while True:
try:
task_uuid = self.queue.get()
self.plugin_manager.process(task_uuid)
except Exception as exc:
# TaskHelper.set_error(task_uuid, exc)
logger.error(traceback.format_exc())
time.sleep(2)
| Python | 0 |
4cd7bc99509c197e8c8139d6d597ab04bece0cb1 | Use include_paths_relative_to_dir | vim/ycm_extra_conf.py | vim/ycm_extra_conf.py | from itertools import chain, repeat
from subprocess import Popen, PIPE
import os
import re
import ycm_core
extra_flags = [
'-Wall',
'-Wextra',
'-pedantic',
'-DNDEBUG',
'-I', '.',
]
filetype_flags = {
'c': ['-x', 'c', '-std=c11'],
'cpp': ['-x', 'c++', '-std=c++14'],
}
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
compilation_database_folder = 'build/'
if os.path.exists(compilation_database_folder):
database = ycm_core.CompilationDatabase(compilation_database_folder)
else:
database = None
def GetSearchList(filetype):
p = Popen(['clang', '-E'] + filetype_flags[filetype] + ['-', '-v'],
stdout=PIPE, stderr=PIPE)
_, stderr = p.communicate()
search_list = re.search(
'#include <\.\.\.> search starts here:\n(.+)\nEnd of search list',
stderr.decode(),
re.DOTALL)
return [s.strip() for s in search_list.group(1).splitlines()]
def GetDefaultFlags(filetype):
if filetype in filetype_flags:
return filetype_flags[filetype] + list(chain.from_iterable(
zip(repeat('-isystem'), GetSearchList(filetype))))
return []
def IsHeaderFile(filename):
extension = os.path.splitext(filename)[1]
return extension in ['.h', '.hxx', '.hpp', '.hh']
def GetCompilationInfoForFile(filename):
# The compilation_commands.json file generated by CMake does not have
# entries for header files. So we do our best by asking the db for flags
# for a corresponding source file, if any. If one exists, the flags
# for that file should be good enough.
if IsHeaderFile(filename):
basename = os.path.splitext(filename)[0]
for extension in ['.cpp', '.cxx', '.cc', '.c', '.m', '.mm']:
replacement_file = basename + extension
if os.path.exists(replacement_file):
compilation_info = database.GetCompilationInfoForFile(
replacement_file)
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile(filename)
def FlagsForFile(filename, **kwargs):
client_data = kwargs['client_data']
filetype = client_data.get('&filetype', '')
default_flags = GetDefaultFlags(filetype)
if not default_flags:
return None
if not database:
return {
'flags': default_flags + extra_flags,
'include_paths_relative_to_dir': os.path.dirname(os.path.abspath(__file__))
}
compilation_info = GetCompilationInfoForFile(filename)
if not compilation_info:
return None
return {
'flags': default_flags + list(compilation_info.compiler_flags_),
'include_paths_relative_to_dir': compilation_info.compiler_working_dir_
}
| from subprocess import Popen, PIPE
import os
import re
import ycm_core
# These are the compilation flags that will be used in case there's no
# compilation database set (by default, one is not set).
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
default_flags = [
'-Wall',
'-Wextra',
'-pedantic',
'-DNDEBUG',
'-I', '.',
]
filetype_flags = {
'c': ['-x', 'c', '-std=c11'],
'cpp': ['-x', 'c++', '-std=c++14'],
}
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
compilation_database_folder = 'build/'
if os.path.exists(compilation_database_folder):
database = ycm_core.CompilationDatabase(compilation_database_folder)
else:
database = None
def GetDefaultSearchList(flags):
p = Popen(['clang', '-E'] + flags + ['-', '-v'], stdout=PIPE, stderr=PIPE)
_, stderr = p.communicate()
search_list = re.search(
'#include <\.\.\.> search starts here:\n(.+)\nEnd of search list',
stderr.decode(),
re.DOTALL)
return [s.strip() for s in search_list.group(1).splitlines()]
def MakeRelativePathsInFlagsAbsolute(flags, working_directory):
if not working_directory:
return list(flags)
new_flags = []
make_next_absolute = False
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith('/'):
new_flag = os.path.join(working_directory, flag)
for path_flag in ['-isystem', '-I', '-iquote', '--sysroot=']:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith(path_flag):
path = flag[len(path_flag):]
new_flag = path_flag + os.path.join(working_directory, path)
break
new_flags.append(new_flag)
return new_flags
def IsHeaderFile(filename):
extension = os.path.splitext(filename)[1]
return extension in ['.h', '.hxx', '.hpp', '.hh']
def GetCompilationInfoForFile(filename):
# The compilation_commands.json file generated by CMake does not have
# entries for header files. So we do our best by asking the db for flags
# for a corresponding source file, if any. If one exists, the flags
# for that file should be good enough.
if IsHeaderFile(filename):
basename = os.path.splitext(filename)[0]
for extension in ['.cpp', '.cxx', '.cc', '.c', '.m', '.mm']:
replacement_file = basename + extension
if os.path.exists(replacement_file):
compilation_info = database.GetCompilationInfoForFile(
replacement_file)
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile(filename)
def FlagsForFile(filename, **kwargs):
flags = []
client_data = kwargs['client_data']
filetype = client_data.get('&filetype', '')
if filetype in filetype_flags:
flags.extend(filetype_flags[filetype])
for path in GetDefaultSearchList(filetype_flags[filetype]):
flags.extend(['-isystem', path])
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile(filename)
if not compilation_info:
return None
flags.extend(compilation_info.compiler_flags_)
workdir = compilation_info.compiler_working_dir_
else:
flags.extend(default_flags)
workdir = os.path.dirname(os.path.abspath(__file__))
return {
'flags': MakeRelativePathsInFlagsAbsolute(flags, workdir),
'do_cache': True
}
| Python | 0.000004 |
49ab81275b0e29281703257000c62a54f9627df8 | fix property usage | polyjit/buildbot/builders/slurm.py | polyjit/buildbot/builders/slurm.py | import sys
from polyjit.buildbot.builders import register
from polyjit.buildbot import slaves
from polyjit.buildbot.utils import (builder, define, git, cmd, trigger, ip,
mkdir, s_sbranch, s_force, s_trigger,
hash_download_from_master, clean_unpack)
from polyjit.buildbot.repos import make_cb, codebases
from buildbot.plugins import util
from buildbot.changes import filter
codebase = make_cb(['benchbuild'])
P = util.Property
BuildFactory = util.BuildFactory
def has_munged(host):
if "has_munged" in host["properties"]:
return host["properties"]["has_munged"]
return False
accepted_builders = slaves.get_hostlist(slaves.infosun, predicate=has_munged)
# yapf: disable
def configure(c):
llvm_dl = hash_download_from_master("public_html/llvm.tar.gz",
"llvm.tar.gz", "llvm")
polyjit_dl = hash_download_from_master("public_html/polyjit.tar.gz",
"polyjit.tar.gz", "polyjit")
steps = [
# trigger(schedulerNames=['trigger-build-llvm', 'trigger-build-jit']),
define("scratch", ip("/scratch/pjtest/%(prop:buildnumber)s/"))
]
steps.extend(llvm_dl)
steps.extend(clean_unpack("llvm.tar.gz", "llvm"))
steps.extend(polyjit_dl)
steps.extend(clean_unpack("polyjit.tar.gz", "polyjit"))
steps.extend([
define("BENCHBUILD_ROOT", ip("%(prop:builddir)s/build/benchbuild/")),
git('benchbuild', 'develop', codebases, workdir=P("BENCHBUILD_ROOT")),
])
steps.extend([
define('benchbuild', ip('%(prop:scratch)s/env/bin/benchbuild')),
define('llvm', ip('%(prop:scratch)s/llvm')),
define('polyjit', ip('%(prop:scratch)s/polyjit')),
mkdir(P("scratch")),
cmd('virtualenv', '-ppython3', ip('%(prop:scratch)s/env/')),
cmd(ip('%(prop:scratch)s/env/bin/pip3'), 'install', '--upgrade', '.',
workdir='build/benchbuild'),
cmd("rsync", "-var", "./", P("scratch")),
cmd(P('benchbuild'), 'bootstrap', env={
'BB_ENV_COMPILER_PATH': ip('%(prop:llvm)s/bin'),
'BB_ENV_COMPILER_LD_LIBRARY_PATH':
ip('%(prop:llvm)s/lib:%(prop:polyjit)s/lib'),
'BB_ENV_LOOKUP_PATH':
ip('%(prop:llvm)s/bin:%(prop:polyjit)s/bin'),
'BB_ENV_LOOKUP_LD_LIBRARY_PATH':
ip('%(prop:llvm)s/lib:%(prop:polyjit)s/lib'),
'BB_LLVM_DIR': ip('%(prop:scratch)s/llvm'),
'BB_LIKWID_PREFIX': '/usr',
'BB_PAPI_INCLUDE': '/usr/include',
'BB_PAPI_LIBRARY': '/usr/lib',
'BB_SRC_DIR': ip('%(prop:scratch)s/benchbuild'),
'BB_UNIONFS_ENABLE': 'false'
},
workdir=P('scratch')),
])
c['builders'].append(builder("build-slurm-set", None, accepted_builders,
factory=BuildFactory(steps)))
# yapf: enable
def schedule(c):
c['schedulers'].extend([
s_sbranch("build-slurm-set-sched", codebase, ["build-slurm-set"],
change_filter=filter.ChangeFilter(branch_re='next|develop'),
treeStableTimer=2 * 60),
s_force("force-build-slurm-set", codebase, ["build-slurm-set"]),
s_trigger("trigger-slurm-set", codebase, ['build-slurm-set'])
])
register(sys.modules[__name__])
| import sys
from polyjit.buildbot.builders import register
from polyjit.buildbot import slaves
from polyjit.buildbot.utils import (builder, define, git, cmd, trigger, ip,
mkdir, s_sbranch, s_force, s_trigger,
hash_download_from_master, clean_unpack)
from polyjit.buildbot.repos import make_cb, codebases
from buildbot.plugins import util
from buildbot.changes import filter
codebase = make_cb(['benchbuild'])
P = util.Property
BuildFactory = util.BuildFactory
def has_munged(host):
if "has_munged" in host["properties"]:
return host["properties"]["has_munged"]
return False
accepted_builders = slaves.get_hostlist(slaves.infosun, predicate=has_munged)
# yapf: disable
def configure(c):
llvm_dl = hash_download_from_master("public_html/llvm.tar.gz",
"llvm.tar.gz", "llvm")
polyjit_dl = hash_download_from_master("public_html/polyjit.tar.gz",
"polyjit.tar.gz", "polyjit")
steps = [
# trigger(schedulerNames=['trigger-build-llvm', 'trigger-build-jit']),
define("scratch", ip("/scratch/pjtest/%(prop:buildnumber)s/"))
]
steps.extend(llvm_dl)
steps.extend(clean_unpack("llvm.tar.gz", "llvm"))
steps.extend(polyjit_dl)
steps.extend(clean_unpack("polyjit.tar.gz", "polyjit"))
steps.extend([
define("BENCHBUILD_ROOT", ip("%(prop:builddir)s/build/benchbuild/")),
git('benchbuild', 'develop', codebases, workdir=P("BENCHBUILD_ROOT")),
])
steps.extend([
define('benchbuild', ip('%(prop:scratch)s/env/bin/benchbuild')),
define('llvm', ip('%(prop:scratch)s/llvm')),
define('polyjit', ip('%(prop:scratch)s/polyjit')),
mkdir(P("scratch")),
cmd('virtualenv', '-ppython3', ip('%(prop:scratch)s/env/')),
cmd(ip('%(prop:scratch)s/env/bin/pip3'), 'install', '--upgrade', '.',
workdir='build/benchbuild'),
cmd("rsync", "-var", "./", P("scratch")),
cmd(P('benchbuild'), 'bootstrap', env={
'BB_ENV_COMPILER_PATH': ip('%(prop:llvm)s/bin'),
'BB_ENV_COMPILER_LD_LIBRARY_PATH':
ip('%(prop:llvm)s/lib:%(prop:polyjit)s/lib'),
'BB_ENV_LOOKUP_PATH':
ip('%(prop:llvm)s/bin:%(prop:polyjit)s/bin'),
'BB_ENV_LOOKUP_LD_LIBRARY_PATH':
ip('%(prop:llvm)s/lib:%(prop:polyjit)s/lib'),
'BB_LLVM_DIR': ip('%(prop:scratch)s/llvm'),
'BB_LIKWID_PREFIX': '/usr',
'BB_PAPI_INCLUDE': '/usr/include',
'BB_PAPI_LIBRARY': '/usr/lib',
'BB_SRC_DIR': ip('%(prop:scratch)s/benchbuild'),
'BB_UNIONFS_ENABLE': 'false'
},
workdir=P('%(prop:scratch)s')),
])
c['builders'].append(builder("build-slurm-set", None, accepted_builders,
factory=BuildFactory(steps)))
# yapf: enable
def schedule(c):
c['schedulers'].extend([
s_sbranch("build-slurm-set-sched", codebase, ["build-slurm-set"],
change_filter=filter.ChangeFilter(branch_re='next|develop'),
treeStableTimer=2 * 60),
s_force("force-build-slurm-set", codebase, ["build-slurm-set"]),
s_trigger("trigger-slurm-set", codebase, ['build-slurm-set'])
])
register(sys.modules[__name__])
| Python | 0.000002 |
3b9954b3a6206d758664084dc24cd83774f8a623 | use flownet to compute flow map | FlowNet/flownet-release/models/flownet/read_video.py | FlowNet/flownet-release/models/flownet/read_video.py | # Simple optical flow algorithm
#
#
# OpenCV version: 2.4.8
#
#
# Contact:
# Min-Hung (Steve) Chen at <cmhungsteve@gatech.edu>
# Chih-Yao Ma at <cyma@gatech.edu>
#
# Last update: 05/16/2016
import numpy as np
import cv2
from scripts.flownet import FlowNet
# read the video file
cap = cv2.VideoCapture('v_Basketball_g01_c01.avi')
# information of the video
# property identifier:
# 1: ?; 2: s/frame; 3: width; 4: height; 6: ?; 7: ?
Fr = int(round(1 / cap.get(2)))
Wd = int(cap.get(3))
Ht = int(cap.get(4))
# Define the codec and create VideoWriter object
# fourcc = cv2.cv.CV_FOURCC('X','V','I','D')
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('out_flow.avi', fourcc, Fr, (Wd, Ht))
# read the first frame
ret, prvs = cap.read()
# save in HSV (because of the optical flow algorithm we used)
hsv = np.zeros_like(prvs)
hsv[..., 1] = 255
indFrame = 1
while(cap.isOpened):
# Capture frame-by-frame
ret, next = cap.read()
if (indFrame % 7) == 0:
if ret == True:
# Get frame sizes
height, width, channels = prvs.shape
cv2.imshow('Frame 1', prvs)
cv2.imshow('Frame 2', next)
# save the frames into png files for FlowNet to read
# TODO: this maybe stupid but is the easiest way without reconfigure
# the FlowNet and possible re-train the model
cv2.imwrite('data/frame1.png', prvs)
cv2.imwrite('data/frame2.png', next)
# compute the optical flow from two adjacent frames
FlowNet.run(prvs) # the FlowNet will save a .flo file
# read the .flo file
fileName = 'flownetc-pred-0000000.flo'
flowMapSize = np.fromfile(fileName, np.float32, count=1)
if flowMapSize != 202021.25:
print 'Dimension incorrect. Invalid .flo file'
else:
data = np.fromfile(fileName, np.float32,
count=2 * width * height)
flow = np.resize(data, (height, width, 2))
for index, x in np.ndenumerate(flow):
if x > 100:
flow[index] = 0
# show in RGB for visualization
mag, ang = cv2.cartToPolar(flow[..., 0], flow[..., 1])
hsv[..., 0] = ang * 180 / np.pi / 2
hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
frameProc = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
# write the processed frame
out.write(frameProc)
# Display the resulting frame
cv2.imshow('Processed frame', frameProc)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
prvs = next
else:
break
indFrame = indFrame + 1
# When everything done, release the capture
cap.release()
out.release()
cv2.destroyAllWindows()
| # Simple optical flow algorithm
#
#
# OpenCV version: 2.4.8
#
#
# Contact:
# Min-Hung (Steve) Chen at <cmhungsteve@gatech.edu>
# Chih-Yao Ma at <cyma@gatech.edu>
#
# Last update: 05/13/2016
import numpy as np
import cv2
from scripts.flownet import FlowNet
cap = cv2.VideoCapture('v_Basketball_g01_c01.avi')
# information of the video
# property identifier:
# 1: ?; 2: s/frame; 3: width; 4: height; 6: ?; 7: ?
Fr = int(round(1/cap.get(2)))
#Fr = 25
Wd = int(cap.get(3))
Ht = int(cap.get(4))
print Fr
print Wd
print Ht
# Define the codec and create VideoWriter object
# fourcc = cv2.cv.CV_FOURCC('X','V','I','D')
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('out_flow.avi',fourcc, Fr, (Wd,Ht))
# read the first frame
ret, frame1 = cap.read()
# prvs = cv2.cvtColor(frame1,cv2.COLOR_BGR2GRAY) # convert to gray scale
prvs = frame1
# save in HSV (because of the optical flow algorithm we used)
hsv = np.zeros_like(frame1)
hsv[...,1] = 255
while(cap.isOpened):
# Capture frame-by-frame
ret, frame2 = cap.read()
if ret==True:
# Get frame sizes
height, width, channels = prvs.shape
next = frame2
cv2.imshow('Frame 1',prvs)
cv2.imshow('Frame 2',next)
# save the frames into png files for FlowNet to read
# TODO: this maybe stupid but is the easiest way without reconfigure
# the FlowNet and possible re-train the model
cv2.imwrite('data/frame1.png',prvs)
cv2.imwrite('data/frame2.png',next)
# compute the optical flow from two adjacent frames
FlowNet.run(prvs) # the FlowNet will save a .flo file
# read the .flo file
fileName = 'flownetc-pred-0000000.flo'
flowMapSize = np.fromfile(fileName,np.float32, count = 1)
if flowMapSize != 202021.25:
print 'Dimension incorrect. Invalid .flo file'
else:
data = np.fromfile(fileName, np.float32, count = 2*width*height)
flow = np.resize(data, (height, width, 2))
# show in RGB for visualization
# TODO: there is something wrong here...
mag, ang = cv2.cartToPolar(flow[..., 0], flow[..., 1])
hsv[..., 0] = ang * 180 / np.pi / 2
hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
frameProc = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
out.write(frameProc)
# Display the resulting frame
cv2.imshow('Processed frame',frameProc)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
prvs = next
else:
break
# When everything done, release the capture
cap.release()
out.release()
cv2.destroyAllWindows()
| Python | 0 |
3a910621b36f0555b4a16f22582313333e162093 | Check for icons when displaying thumbnails | paw/admin.py | paw/admin.py | from django.contrib import admin
from paw.models import TextLink, IconLink, IconFolder, Page, PageTextLink, PageIconDisplay, IconFolderIcon, EntryPoint
from adminsortable.admin import NonSortableParentAdmin, SortableStackedInline, SortableTabularInline, SortableAdmin
class PageTextLinkInline(SortableStackedInline):
model = PageTextLink
extra = 1
class PageIconDisplayInline(SortableTabularInline):
model = PageIconDisplay
extra = 1
fields = ['icon', 'icon_thumbnail']
readonly_fields = ['icon_thumbnail']
def icon_thumbnail(self, o):
if o.icon.display_icon:
return '<img src="{url:}" />'.format(url=o.icon.display_icon.thumbnail['50x50'].url)
icon_thumbnail.short_description = 'Thumbnail'
icon_thumbnail.allow_tags = True
class IconFolderIconInline(SortableTabularInline):
model = IconFolderIcon
extra = 1
def icon_thumbnail(self, o):
if o.icon.display_icon:
return '<img src="{url:}" />'.format(url=o.icon.display_icon.thumbnail['50x50'].url)
icon_thumbnail.short_description = 'Thumbnail'
icon_thumbnail.allow_tags = True
fields = ['icon', 'icon_thumbnail']
readonly_fields = ['icon_thumbnail']
class PageAdmin(NonSortableParentAdmin):
inlines = [PageIconDisplayInline, PageTextLinkInline]
prepopulated_fields = {"slug": ("title",)}
class IconFolderAdmin(NonSortableParentAdmin):
inlines = [IconFolderIconInline]
list_display = ['title', 'admin_icon']
readonly_fields = ['form_icon']
def admin_icon(self, o):
if o.display_icon:
return '<img src="{url:}" />'.format(url=o.display_icon.thumbnail['50x50'].url)
def form_icon(self, o):
if o.display_icon:
return '<img src="{url:}" />'.format(url=o.display_icon.thumbnail['100x100'].url)
admin_icon.short_description = 'Icon'
admin_icon.allow_tags = True
form_icon.short_description = 'Thumbnail'
form_icon.allow_tags = True
fields = ['display_icon', 'form_icon', 'title', 'internal_description']
class IconLinkAdmin(admin.ModelAdmin):
list_display=['title', 'admin_icon']
fields = ['display_icon', 'form_icon', 'title', 'internal_description', 'url', 'check_url', 'start_hidden', 'mac_pc_only']
readonly_fields = ['form_icon']
def admin_icon(self, o):
if o.display_icon:
return '<img src="{url:}" />'.format(url=o.display_icon.thumbnail['50x50'].url)
def form_icon(self, o):
if o.display_icon:
return '<img src="{url:}" />'.format(url=o.display_icon.thumbnail['100x100'].url)
admin_icon.short_description = 'Icon'
admin_icon.allow_tags = True
form_icon.short_description = 'Thumbnail'
form_icon.allow_tags = True
class EntryPointAdmin(admin.ModelAdmin):
list_display=['__str__', 'page']
# Register your models here.
admin.site.register(TextLink)
admin.site.register(Page, PageAdmin)
admin.site.register(IconLink, IconLinkAdmin)
admin.site.register(IconFolder, IconFolderAdmin)
admin.site.register(EntryPoint, EntryPointAdmin)
| from django.contrib import admin
from paw.models import TextLink, IconLink, IconFolder, Page, PageTextLink, PageIconDisplay, IconFolderIcon, EntryPoint
from adminsortable.admin import NonSortableParentAdmin, SortableStackedInline, SortableTabularInline, SortableAdmin
class PageTextLinkInline(SortableStackedInline):
model = PageTextLink
extra = 1
class PageIconDisplayInline(SortableTabularInline):
model = PageIconDisplay
extra = 1
fields = ['icon', 'icon_thumbnail']
readonly_fields = ['icon_thumbnail']
def icon_thumbnail(self, o):
return '<img src="{url:}" />'.format(url=o.icon.display_icon.thumbnail['50x50'].url)
icon_thumbnail.short_description = 'Thumbnail'
icon_thumbnail.allow_tags = True
class IconFolderIconInline(SortableTabularInline):
model = IconFolderIcon
extra = 1
def icon_thumbnail(self, o):
return '<img src="{url:}" />'.format(url=o.icon.display_icon.thumbnail['50x50'].url)
icon_thumbnail.short_description = 'Thumbnail'
icon_thumbnail.allow_tags = True
fields = ['icon', 'icon_thumbnail']
readonly_fields = ['icon_thumbnail']
class PageAdmin(NonSortableParentAdmin):
inlines = [PageIconDisplayInline, PageTextLinkInline]
prepopulated_fields = {"slug": ("title",)}
class IconFolderAdmin(NonSortableParentAdmin):
inlines = [IconFolderIconInline]
list_display = ['title', 'admin_icon']
readonly_fields = ['form_icon']
def admin_icon(self, o):
return '<img src="{url:}" />'.format(url=o.display_icon.thumbnail['50x50'].url)
def form_icon(self, o):
return '<img src="{url:}" />'.format(url=o.display_icon.thumbnail['100x100'].url)
admin_icon.short_description = 'Icon'
admin_icon.allow_tags = True
form_icon.short_description = 'Thumbnail'
form_icon.allow_tags = True
fields = ['display_icon', 'form_icon', 'title', 'internal_description']
class IconLinkAdmin(admin.ModelAdmin):
list_display=['title', 'admin_icon']
fields = ['display_icon', 'form_icon', 'title', 'internal_description', 'url', 'check_url', 'start_hidden', 'mac_pc_only']
readonly_fields = ['form_icon']
def admin_icon(self, o):
return '<img src="{url:}" />'.format(url=o.display_icon.thumbnail['50x50'].url)
def form_icon(self, o):
return '<img src="{url:}" />'.format(url=o.display_icon.thumbnail['100x100'].url)
admin_icon.short_description = 'Icon'
admin_icon.allow_tags = True
form_icon.short_description = 'Thumbnail'
form_icon.allow_tags = True
class EntryPointAdmin(admin.ModelAdmin):
list_display=['__str__', 'page']
# Register your models here.
admin.site.register(TextLink)
admin.site.register(Page, PageAdmin)
admin.site.register(IconLink, IconLinkAdmin)
admin.site.register(IconFolder, IconFolderAdmin)
admin.site.register(EntryPoint, EntryPointAdmin)
| Python | 0 |
00556c84e23dd86eb4ca08ba4c6238425a3eba7e | Create Preparation model | project_fish/whats_fresh/models.py | project_fish/whats_fresh/models.py | from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='%Y/%m/%d')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
class Product(models.Model):
"""
The Product model holds the information for a product, including the
origin, season, market price, and availability.
In addition, it holds a foreign key to the image and story related to the
product.
"""
name = models.TextField()
variety = models.TextField()
alt_name = models.TextField()
description = models.TextField()
origin = models.TextField()
season = models.TextField()
available = models.NullBooleanField()
market_price = models.TextField()
link = models.URLField()
image_id = models.ForeignKey('Image')
story_id = models.ForeignKey('Story')
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Story(models.Model):
pass
class Preparation(models.Model):
"""
The Preparation model contains possible preparations of product, to be
associated many-to-many with product (a product can have one or more
preparations, preparations apply to many products). Preparations may be
things like 'frozen', 'dried', 'fresh', 'live', etc, to be defined by
Sea Grant data input.
"""
name = models.TextField()
description = models.TextField()
additional_info = models.TextField()
| from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='%Y/%m/%d')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
class Product(models.Model):
"""
The Product model holds the information for a product, including the
origin, season, market price, and availability.
In addition, it holds a foreign key to the image and story related to the
product.
"""
name = models.TextField()
variety = models.TextField()
alt_name = models.TextField()
description = models.TextField()
origin = models.TextField()
season = models.TextField()
available = models.NullBooleanField()
market_price = models.TextField()
link = models.URLField()
image_id = models.ForeignKey('Image')
story_id = models.ForeignKey('Story')
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Story(models.Model):
pass
class Preparation(models.Model):
"""
The Preparation model contains possible preparations of product, to be
associated many-to-many with product (a product can have one or more
preparations, preparations apply to many products). Preparations may be
things like 'frozen', 'dried', 'fresh', 'live', etc, to be defined by
Sea Grant data input.
"""
pass
| Python | 0 |
5d9f83c06e3418cbb4bd5314136bd4700d7e26c3 | Remove print statement | paasta_tools/cli/cmds/performance_check.py | paasta_tools/cli/cmds/performance_check.py | #!/usr/bin/env python
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
from service_configuration_lib import read_extra_service_information
from paasta_tools.cli.utils import validate_service_name
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import timeout
def add_subparser(subparsers):
list_parser = subparsers.add_parser(
'performance-check',
description='Performs a performance check',
help='Performs a performance check',
)
list_parser.add_argument(
'-s', '--service',
help='Name of service for which you wish to check. Leading "services-", as included in a '
'Jenkins job name, will be stripped.',
)
list_parser.add_argument(
'-d', '--soa-dir',
dest='soa_dir',
metavar='SOA_DIR',
default=DEFAULT_SOA_DIR,
help='Define a different soa config directory',
)
list_parser.set_defaults(command=perform_performance_check)
def load_performance_check_config(service, soa_dir):
return read_extra_service_information(
service_name=service,
extra_info='performance-check',
soa_dir=soa_dir,
)
def submit_performance_check_job(service, soa_dir):
performance_check_config = load_performance_check_config(service, soa_dir)
if not performance_check_config:
print "No performance-check.yaml. Skipping performance-check."
return
endpoint = performance_check_config.pop('endpoint')
r = requests.post(
url=endpoint,
params=performance_check_config,
)
r.raise_for_status()
print "Posted a submission to the PaaSTA performance-check service."
print "Endpoint: {}".format(endpoint)
print "Parameters: {}".format(performance_check_config)
@timeout()
def perform_performance_check(args):
service = args.service
if service.startswith('services-'):
service = service.split('services-', 1)[1]
validate_service_name(service, args.soa_dir)
try:
submit_performance_check_job(
service=service,
soa_dir=args.soa_dir,
)
except Exception as e:
print "Something went wrong with the performance check. Safely bailing. No need to panic."
print "Here was the error:"
print str(e)
| #!/usr/bin/env python
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
from service_configuration_lib import read_extra_service_information
from paasta_tools.cli.utils import validate_service_name
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import timeout
def add_subparser(subparsers):
list_parser = subparsers.add_parser(
'performance-check',
description='Performs a performance check',
help='Performs a performance check',
)
list_parser.add_argument(
'-s', '--service',
help='Name of service for which you wish to check. Leading "services-", as included in a '
'Jenkins job name, will be stripped.',
)
list_parser.add_argument(
'-d', '--soa-dir',
dest='soa_dir',
metavar='SOA_DIR',
default=DEFAULT_SOA_DIR,
help='Define a different soa config directory',
)
list_parser.set_defaults(command=perform_performance_check)
def load_performance_check_config(service, soa_dir):
return read_extra_service_information(
service_name=service,
extra_info='performance-check',
soa_dir=soa_dir,
)
def submit_performance_check_job(service, soa_dir):
performance_check_config = load_performance_check_config(service, soa_dir)
if not performance_check_config:
print "No performance-check.yaml. Skipping performance-check."
return
endpoint = performance_check_config.pop('endpoint')
r = requests.post(
url=endpoint,
params=performance_check_config,
)
r.raise_for_status()
print "Posted a submission to the PaaSTA performance-check service."
print "Endpoint: {}".format(endpoint)
print "Parameters: {}".format(performance_check_config)
@timeout()
def perform_performance_check(args):
service = args.service
if service.startswith('services-'):
service = service.split('services-', 1)[1]
validate_service_name(service, args.soa_dir)
print service
try:
submit_performance_check_job(
service=service,
soa_dir=args.soa_dir,
)
except Exception as e:
print "Something went wrong with the performance check. Safely bailing. No need to panic."
print "Here was the error:"
print str(e)
| Python | 0.007015 |
0de2aace2a493d0d760b1ceec3b67f5a6c3f86e6 | fix exceptin 'TypeError: a float is required' | vmchecker/coursedb.py | vmchecker/coursedb.py | #!/usr/bin/env python
"""Manage the course database"""
from __future__ import with_statement
import sqlite3
from contextlib import contextmanager, closing
class CourseDb(object):
"""A class to encapsulate the logic behind updates and querries of
the course's db"""
def __init__(self, db_cursor):
self.db_cursor = db_cursor
def create_tables(self):
"""Create the tables needed for vmchecker"""
self.db_cursor.executescript("""
CREATE TABLE assignments (id INTEGER PRIMARY KEY, name TEXT);
CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT);
CREATE TABLE grades (assignment_id INTEGER,
user_id INTEGER,
grade TEXT,
mtime TIMESTAMP NOT NULL,
PRIMARY KEY(assignment_id, user_id));""")
def add_assignment(self, assignment):
"""Creates an id of the homework and returns it."""
self.db_cursor.execute('INSERT INTO assignments (name) values (?)',
(assignment,))
self.db_cursor.execute('SELECT last_insert_rowid()')
assignment_id, = self.db_cursor.fetchone()
return assignment_id
def get_assignment_id(self, assignment):
"""Returns the id of the assignment"""
self.db_cursor.execute('SELECT id FROM assignments WHERE name=?',
(assignment,))
result = self.db_cursor.fetchone()
if result is None:
return self.add_assignment(assignment)
return result[0]
def add_user(self, user):
"""Creates an id of the user and returns it."""
self.db_cursor.execute('INSERT INTO users (name) values (?)', (user,))
self.db_cursor.execute('SELECT last_insert_rowid()')
user_id, = self.db_cursor.fetchone()
return user_id
def get_user_id(self, user):
"""Returns the id of the user"""
self.db_cursor.execute('SELECT id FROM users WHERE name=?', (user,))
result = self.db_cursor.fetchone()
if result is None:
return self.add_user(user)
return result[0]
def get_grade_mtime(self, assignment_id, user_id):
"""Returns the mtime of a grade"""
self.db_cursor.execute('SELECT mtime FROM grades '
'WHERE assignment_id = ? and user_id = ?',
(assignment_id, user_id))
result = self.db_cursor.fetchone()
if result is not None:
return result[0]
def save_grade(self, assignment_id, user_id, grade, mtime):
"""Save the grade into the database
If the grade identified by (assignment_id, user_id)
exists then update the DB, else inserts a new entry.
"""
self.db_cursor.execute('INSERT OR REPLACE INTO grades '
'(grade, mtime, assignment_id, user_id) '
'VALUES (?, ?, ?, ?) ',
(grade, mtime, assignment_id, user_id))
@contextmanager
def opening_course_db(db_file, isolation_level=None):
"""Context manager ensuring that the database resources are
propperly closed upon either success or exception.
On success the latest changes must be commited, while on failure
they must be rolled back.
"""
db_conn = sqlite3.connect(db_file, isolation_level=isolation_level)
try:
with closing(db_conn.cursor()) as db_cursor:
course_db = CourseDb(db_cursor)
yield course_db
except:
db_conn.rollback()
raise
else:
db_conn.commit()
finally:
db_conn.close()
def create_db_tables(db_file):
"""Create vmchecker's tables inside the given db_file"""
with opening_course_db(db_file) as course_db:
course_db.create_tables()
| #!/usr/bin/env python
"""Manage the course database"""
from __future__ import with_statement
import sqlite3
from contextlib import contextmanager, closing
class CourseDb(object):
"""A class to encapsulate the logic behind updates and querries of
the course's db"""
def __init__(self, db_cursor):
self.db_cursor = db_cursor
def create_tables(self):
"""Create the tables needed for vmchecker"""
self.db_cursor.executescript("""
CREATE TABLE assignments (id INTEGER PRIMARY KEY, name TEXT);
CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT);
CREATE TABLE grades (assignment_id INTEGER,
user_id INTEGER,
grade TEXT,
mtime TIMESTAMP NOT NULL,
PRIMARY KEY(assignment_id, user_id));""")
def add_assignment(self, assignment):
"""Creates an id of the homework and returns it."""
self.db_cursor.execute('INSERT INTO assignments (name) values (?)',
(assignment,))
self.db_cursor.execute('SELECT last_insert_rowid()')
assignment_id, = self.db_cursor.fetchone()
return assignment_id
def get_assignment_id(self, assignment):
"""Returns the id of the assignment"""
self.db_cursor.execute('SELECT id FROM assignments WHERE name=?',
(assignment,))
result = self.db_cursor.fetchone()
if result is None:
return self.add_assignment(assignment)
return result[0]
def add_user(self, user):
"""Creates an id of the user and returns it."""
self.db_cursor.execute('INSERT INTO users (name) values (?)', (user,))
self.db_cursor.execute('SELECT last_insert_rowid()')
user_id, = self.db_cursor.fetchone()
return user_id
def get_user_id(self, user):
"""Returns the id of the user"""
self.db_cursor.execute('SELECT id FROM users WHERE name=?', (user,))
result = self.db_cursor.fetchone()
if result is None:
return self.add_user(user)
return result[0]
def get_grade_mtime(self, assignment_id, user_id):
"""Returns the mtime of a grade"""
self.db_cursor.execute('SELECT mtime FROM grades '
'WHERE assignment_id = ? and user_id = ?',
(assignment_id, user_id))
result = self.db_cursor.fetchone()
if result is not None:
return result[0]
def save_grade(self, assignment_id, user_id, grade, mtime):
"""Save the grade into the database
If the grade identified by (assignment_id, user_id)
exists then update the DB, else inserts a new entry.
"""
self.db_cursor.execute('INSERT OR REPLACE INTO grades '
'(grade, mtime, assignment_id, user_id) '
'VALUES (?, ?, ?, ?) ',
(grade, mtime, assignment_id, user_id))
@contextmanager
def opening_course_db(db_file, isolation_level=None):
"""Context manager ensuring that the database resources are
propperly closed upon either success or exception.
On success the latest changes must be commited, while on failure
they must be rolled back.
"""
db_conn = sqlite3.connect(db_file, isolation_level)
try:
with closing(db_conn.cursor()) as db_cursor:
course_db = CourseDb(db_cursor)
yield course_db
except:
db_conn.rollback()
raise
else:
db_conn.commit()
finally:
db_conn.close()
def create_db_tables(db_file):
"""Create vmchecker's tables inside the given db_file"""
with opening_course_db(db_file) as course_db:
course_db.create_tables()
| Python | 0 |
c0259abdd1b34cd195e3f1ffcb7fb5479d76a0fe | bump version to 1.0.0 | vncdotool/__init__.py | vncdotool/__init__.py | __version__ = "1.0.0"
| __version__ = "1.0.0dev"
| Python | 0 |
4e6207361d7ef08a20e343cb5dab500c2c9cdf28 | Update AppleApple!.py | AppleApple!/AppleApple!.py | AppleApple!/AppleApple!.py | import pygame, random
from pygame.locals import *
costPerTree = 0
class Item(object):
def __init__(self, itemName, isMaterial, isFood, isWeapon, isCraftable, cost, recipe=()):
self.name = str(itemName)
self.isMaterial = isMaterial
self.isFood = isFood
self.isWeapon = isWeapon
self.Craftable = isCraftable
self.cost = cost
self.recipe = recipe
class Weapon(Item):
def __init__(self, itemName, harm, cost, recipe):
super(Weapon, self).__init__(itemName, False, False, True, True, cost, recipe) # all weapons are craftable
self.harm = harm
class Food(Item):
def __init__(self, itemName, fullness, craftable, cost, isPotion=False, potionType=None, useDegree=None):
super(Food,self).__init__(itemName, False, True, False, craftable, cost)
self.fullness = fullness
self.isPotion = isPotion
self.type = potionType
self.degree = useDegree
class Material(Item):
def __init__(self, itemName, isCraftable, cost, recipe=()):
super(Material,self).__init__(itemName, True, False, False, isCraftable, cost, recipe)
class Mob(object):
def __init__(self, name, blood, damage, trophies):
self.blood = blood
self.damage = damage
self.trophie = trophies
self.name = name
# class Tool(Item):
# def __init__(self,):
# Coming "soon"!!
def pickApple(appleTree):
tuple = (False, False, True)
doExtra = random.choice(tuple)
if doExtra:
applePerTree = 5
else:
applePerTree = 3
return appleTree * applePerTree
def buyJustice(money, thing):
if thing.cost > money:
return False
else:
return True
def plantTreeJustice(num, apple):
if num*costPerTree < apple:
return True
else:
return False
# materials
wood = Material('wood', False, 3)
stick = Material('stick', True, 1, (wood,))
rock = Material('rock', False, 2)
copper = Material('copper ingot', !
True, 5, (rock, rock))
iron = Material('iron ingot', True, 12, (copper, copper, copper))
gold = Material('gold ingot', True, 27, (iron, iron, iron, iron))
diamond = Material('diamond!', True, 58, (gold, gold, gold, gold, gold))
# foods
flesh = Food('flesh', 2, False, 1)
berry = Food('blue berry',5,False,2)
egg = Food('egg',
cake = Food('cake',20,True,10,(egg,egg,milk,flour))
# mobs
zombie = Mob('zombie', 20, 1, (flesh,))
tree = Mob('tree', 10, 0.5, (wood, stick))
stone = Mob('stone', 30, 0.5, (rock,))
# weapons
wooden_sword = Weapon('wooden sword', 2, 5, (wood, wood, stick))
stone_sword = Weapon('stone sword', 6, 12, (rock, rock, stick))
iron_sword = Weapon('iron sword', 18, 26, (iron, iron, stick))
golden_sword = Weapon('golden sword', 54, 54, (gold, gold, stick))
diamond_sword = Weapon('diamond sword', 162, 110, (diamond, diamond, stick))
better_wooden_sword = Weapon('better wooden sword', 10, 10, (wooden_sword, wooden_sword)) # and so on...
placeToMobs = {'forest':(tree,tree,tree,tree,tree)} | import pygame, random
from pygame.locals import *
costPerTree = 0
class Item(object):
def __init__(self, itemName, isMaterial, isFood, isWeapon, isCraftable, cost, recipe=()):
self.name = str(itemName)
self.isMaterial = isMaterial
self.isFood = isFood
self.isWeapon = isWeapon
self.Craftable = isCraftable
self.cost = cost
self.recipe = recipe
class Weapon(Item):
def __init__(self, itemName, harm, cost, recipe):
super(Weapon, self).__init__(itemName, False, False, True, True, cost, recipe) # all weapons are craftable
self.harm = harm
class Food(Item):
def __init__(self, itemName, fullness, craftable, cost, isPotion=False, potionType=None, useDegree=None):
super(Food,self).__init__(itemName, False, True, False, craftable, cost)
self.fullness = fullness
self.isPotion = isPotion
self.type = potionType
self.degree = useDegree
class Material(Item):
def __init__(self, itemName, isCraftable, cost, recipe=()):
super(Material,self).__init__(itemName, True, False, False, isCraftable, cost, recipe)
class Mob(object):
def __init__(self, name, blood, damage, trophies):
self.blood = blood
self.damage = damage
self.trophie = trophies
self.name = name
# class Tool(Item):
# def __init__(self,):
# Coming "soon"!!
def pickApple(appleTree):
tuple = (False, False, True)
doExtra = random.choice(tuple)
if doExtra:
applePerTree = 5
else:
applePerTree = 3
return appleTree * applePerTree
def buyJustice(money, thing):
if thing.cost > money:
return False
else:
return True
def plantTreeJustice(num, apple):
if num*costPerTree < apple:
return True
else:
return False
# materials
wood = Material('wood', False, 3)
stick = Material('stick', True, 1, (wood,))
rock = Material('rock', False, 2)
copper = Material('copper ingot', True, 5, (rock, rock))
iron = Material('iron ingot', True, 12, (copper, copper, copper))
gold = Material('gold ingot', True, 27, (iron, iron, iron, iron))
diamond = Material('diamond!', True, 58, (gold, gold, gold, gold, gold))
# foods
flesh = Food('flesh', 2, False, 2)
# mobs
zombie = Mob('zombie', 20, 1, (flesh,))
tree = Mob('tree', 10, 0.5, (wood, stick))
stone = Mob('stone', 30, 0.5, (rock,))
# weapons
wooden_sword = Weapon('wooden sword', 2, 5, (wood, wood, stick))
stone_sword = Weapon('stone sword', 6, 12, (rock, rock, stick))
iron_sword = Weapon('iron sword', 18, 26, (iron, iron, stick))
golden_sword = Weapon('golden sword', 54, 54, (gold, gold, stick))
diamond_sword = Weapon('diamond sword', 162, 110, (diamond, diamond, stick))
better_wooden_sword = Weapon('better wooden sword', 10, 10, (wooden_sword, wooden_sword)) # and so on...
placeToMobs = {'forest':(tree,tree,tree,tree,tree)} | Python | 0.000001 |
7d1463fc732cdc6aef3299c6d2bbe916418e6d6e | Add full_name field to API | hkisaml/api.py | hkisaml/api.py | from django.contrib.auth.models import User
from rest_framework import permissions, serializers, generics, mixins
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
if hasattr(obj, 'profile'):
ret['department_name'] = obj.profile.department_name
if obj.first_name and obj.last_name:
ret['full_name'] = '%s %s' % (obj.first_name, obj.last_name)
return ret
class Meta:
fields = [
'last_login', 'username', 'email', 'date_joined',
'first_name', 'last_name'
]
model = User
# ViewSets define the view behavior.
class UserView(generics.RetrieveAPIView,
mixins.RetrieveModelMixin):
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return self.queryset
else:
return self.queryset.filter(id=user.id)
def get_object(self):
username = self.kwargs.get('username', None)
if username:
qs = self.get_queryset()
obj = generics.get_object_or_404(qs, username=username)
else:
obj = self.request.user
return obj
permission_classes = [permissions.IsAuthenticated, TokenHasReadWriteScope]
queryset = User.objects.all()
serializer_class = UserSerializer
#router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
| from django.contrib.auth.models import User
from rest_framework import permissions, routers, serializers, generics, mixins
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
if hasattr(obj, 'profile'):
ret['department_name'] = obj.profile.department_name
return ret
class Meta:
fields = [
'last_login', 'username', 'email', 'date_joined',
'first_name', 'last_name'
]
model = User
# ViewSets define the view behavior.
class UserView(generics.RetrieveAPIView,
mixins.RetrieveModelMixin):
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return self.queryset
else:
return self.queryset.filter(id=user.id)
def get_object(self):
username = self.kwargs.get('username', None)
if username:
qs = self.get_queryset()
obj = generics.get_object_or_404(qs, username=username)
else:
obj = self.request.user
return obj
permission_classes = [permissions.IsAuthenticated, TokenHasReadWriteScope]
queryset = User.objects.all()
serializer_class = UserSerializer
#router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
| Python | 0.000001 |
7fb0e28ad6ef1190e61fc38bfb19744739b2e096 | Remove unused deps from admin view | scoring_engine/web/views/admin.py | scoring_engine/web/views/admin.py | from flask import Blueprint, redirect, render_template, url_for
from flask_login import current_user, login_required
from operator import itemgetter
from scoring_engine.models.user import User
from scoring_engine.models.team import Team
mod = Blueprint('admin', __name__)
@mod.route('/admin')
@mod.route('/admin/status')
@login_required
def status():
if current_user.is_white_team:
return render_template('admin/status.html')
else:
return redirect(url_for('auth.unauthorized'))
@mod.route('/admin/manage')
@login_required
def manage():
if current_user.is_white_team:
users = User.query.with_entities(User.id, User.username).all()
teams = Team.query.with_entities(Team.id, Team.name).all()
return render_template('admin/manage.html', users=sorted(users, key=itemgetter(0)), teams=teams)
else:
return redirect(url_for('auth.unauthorized'))
@mod.route('/admin/stats')
@login_required
def stats():
if current_user.is_white_team:
return render_template('admin/stats.html')
else:
return redirect(url_for('auth.unauthorized'))
| from flask import Blueprint, flash, redirect, render_template, request, url_for,
from flask_login import current_user, login_required
from operator import itemgetter
from scoring_engine.models.user import User
from scoring_engine.models.team import Team
mod = Blueprint('admin', __name__)
@mod.route('/admin')
@mod.route('/admin/status')
@login_required
def status():
if current_user.is_white_team:
return render_template('admin/status.html')
else:
return redirect(url_for('auth.unauthorized'))
@mod.route('/admin/manage')
@login_required
def manage():
if current_user.is_white_team:
users = User.query.with_entities(User.id, User.username).all()
teams = Team.query.with_entities(Team.id, Team.name).all()
return render_template('admin/manage.html', users=sorted(users, key=itemgetter(0)), teams=teams)
else:
return redirect(url_for('auth.unauthorized'))
@mod.route('/admin/stats')
@login_required
def stats():
if current_user.is_white_team:
return render_template('admin/stats.html')
else:
return redirect(url_for('auth.unauthorized'))
| Python | 0 |
a900501804a5a07ed9cea77d5d5348be5e100d67 | Use Acapela TTS if available | src/robots/actions/speech.py | src/robots/actions/speech.py | # coding=utf-8
import logging; logger = logging.getLogger("robot." + __name__)
logger.setLevel(logging.DEBUG)
from robots.action import *
@action
def say(robot, msg, callback = None, feedback =None):
""" Says loudly the message.
Several TTS systems are tested:
- first, try the Acapela TTS (through the acapela-ros Genom module)
- then the ROS 'sound_play' node
- eventually, the Genom 'textos' module
:param msg: a text to say.
"""
def execute(robot):
logger.info("Robot says: " + msg)
if robot.hasROS():
import roslib; roslib.load_manifest('sound_play')
import rospy, os, sys
from sound_play.libsoundplay import SoundClient
soundhandle = SoundClient()
soundhandle.say(msg)
return (True, None)
elif robot.hasmodule("textos"):
return robot.execute([
genom_request(
"textos",
"Say",
[msg],
wait_for_completion = False if callback else True,
callback = callback)])
else:
logger.warning("No ROS, no textos module: can not do speech synthesis.")
return (True, None)
if robot.hasROS():
import rosnode
nodes = rosnode.get_node_names()
if "/acapela" in nodes:
import actionlib
from acapela.msg import SayGoal, SayAction
# use Acapela TTS
client = actionlib.SimpleActionClient('/acapela/Say', SayAction)
ok = client.wait_for_server()
if not ok:
print("Could not connect to the Acapela ROS action server! Aborting action")
return
# Creates a goal to send to the action server.
goal = SayGoal()
goal.message = msg
return [ros_request(client,
goal,
wait_for_completion = False if callback else True,
callback = callback,
feedback=feedback
)] # Return a non-blocking action. Useful to be able to cancel it later!
return [python_request(execute)]
| # coding=utf-8
import logging; logger = logging.getLogger("robot." + __name__)
logger.setLevel(logging.DEBUG)
from robots.action import *
@action
def say(robot, msg):
""" Says loudly the message.
Speech synthesis relies on the ROS wrapper around Festival.
:param msg: a text to say.
"""
def execute(robot):
logger.info("Robot says: " + msg)
if robot.hasROS():
import roslib; roslib.load_manifest('sound_play')
import rospy, os, sys
from sound_play.msg import SoundRequest
from sound_play.libsoundplay import SoundClient
soundhandle = SoundClient()
soundhandle.say(msg)
return (True, None)
elif robot.hasmodule("textos"):
return robot.execute([
genom_request(
"textos",
"Say",
[msg],
wait_for_completion = False if callback else True,
callback = callback)])
else:
logger.warning("No ROS, no textos module: can not do speech synthesis.")
return (True, None)
return [python_request(execute)]
| Python | 0 |
b00ae9a1023bb649171776f9cfdbf8675621272d | Use of `@api.multi`. | base_custom_info/models/custom_info.py | base_custom_info/models/custom_info.py | # -*- coding: utf-8 -*-
# © 2015 Antiun Ingeniería S.L. - Sergio Teruel
# © 2015 Antiun Ingeniería S.L. - Carlos Dauden
# © 2015 Antiun Ingeniería S.L. - Jairo Llopis
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, fields, models
class CustomInfoTemplate(models.Model):
"""Defines custom properties expected for a given database object."""
_name = "custom.info.template"
_description = "Custom information template"
_sql_constraints = [
("name_model",
"UNIQUE (name, model_id)",
"Another template with that name exists for that model."),
]
name = fields.Char(translate=True)
model_id = fields.Many2one(comodel_name='ir.model', string='Model')
info_ids = fields.One2many(
comodel_name='custom.info.property',
inverse_name='template_id',
string='Properties')
class CustomInfoProperty(models.Model):
"""Name of the custom information property."""
_name = "custom.info.property"
_description = "Custom information property"
_sql_constraints = [
("name_template",
"UNIQUE (name, template_id)",
"Another property with that name exists for that template."),
]
name = fields.Char(translate=True)
template_id = fields.Many2one(
comodel_name='custom.info.template',
string='Template')
info_value_ids = fields.One2many(
comodel_name="custom.info.value",
inverse_name="property_id",
string="Property Values")
class CustomInfoValue(models.Model):
_name = "custom.info.value"
_description = "Custom information value"
_rec_name = 'value'
_sql_constraints = [
("property_model_res",
"UNIQUE (property_id, model, res_id)",
"Another property with that name exists for that resource."),
]
model_id = fields.Many2one("ir.model", "Model", required=True)
res_id = fields.Integer("Resource ID", index=True, required=True)
property_id = fields.Many2one(
comodel_name='custom.info.property',
required=True,
string='Property')
name = fields.Char(related='property_id.name')
value = fields.Char(translate=True)
class CustomInfo(models.AbstractModel):
_name = "custom.info"
_description = "Inheritable abstract model to add custom info in any model"
custom_info_template_id = fields.Many2one(
comodel_name='custom.info.template',
string='Custom Information Template')
custom_info_ids = fields.One2many(
comodel_name='custom.info.value',
inverse_name='res_id',
domain=lambda self: [
("model_id", "=",
self.env["ir.model"].search([("model", "=", self._name)]).id)],
auto_join=True,
string='Custom Properties')
@api.multi
@api.onchange('custom_info_template_id')
def _onchange_custom_info_template_id(self):
if not self.custom_info_template_id:
self.custom_info_ids = False
else:
info_list = self.custom_info_ids.mapped('property_id')
for info_name in self.custom_info_template_id.info_ids:
if info_name not in info_list:
self.custom_info_ids |= self.custom_info_ids.new({
'model': self._name,
'property_id': info_name.id,
})
@api.multi
def unlink(self):
info_values = self.mapped('custom_info_ids')
res = super(CustomInfo, self).unlink()
if res:
info_values.unlink()
return res
| # -*- coding: utf-8 -*-
# © 2015 Antiun Ingeniería S.L. - Sergio Teruel
# © 2015 Antiun Ingeniería S.L. - Carlos Dauden
# © 2015 Antiun Ingeniería S.L. - Jairo Llopis
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, fields, models
class CustomInfoTemplate(models.Model):
"""Defines custom properties expected for a given database object."""
_name = "custom.info.template"
_description = "Custom information template"
_sql_constraints = [
("name_model",
"UNIQUE (name, model_id)",
"Another template with that name exists for that model."),
]
name = fields.Char(translate=True)
model_id = fields.Many2one(comodel_name='ir.model', string='Model')
info_ids = fields.One2many(
comodel_name='custom.info.property',
inverse_name='template_id',
string='Properties')
class CustomInfoProperty(models.Model):
"""Name of the custom information property."""
_name = "custom.info.property"
_description = "Custom information property"
_sql_constraints = [
("name_template",
"UNIQUE (name, template_id)",
"Another property with that name exists for that template."),
]
name = fields.Char(translate=True)
template_id = fields.Many2one(
comodel_name='custom.info.template',
string='Template')
info_value_ids = fields.One2many(
comodel_name="custom.info.value",
inverse_name="property_id",
string="Property Values")
class CustomInfoValue(models.Model):
_name = "custom.info.value"
_description = "Custom information value"
_rec_name = 'value'
_sql_constraints = [
("property_model_res",
"UNIQUE (property_id, model, res_id)",
"Another property with that name exists for that resource."),
]
model_id = fields.Many2one("ir.model", "Model", required=True)
res_id = fields.Integer("Resource ID", index=True, required=True)
property_id = fields.Many2one(
comodel_name='custom.info.property',
required=True,
string='Property')
name = fields.Char(related='property_id.name')
value = fields.Char(translate=True)
class CustomInfo(models.AbstractModel):
_name = "custom.info"
_description = "Inheritable abstract model to add custom info in any model"
custom_info_template_id = fields.Many2one(
comodel_name='custom.info.template',
string='Custom Information Template')
custom_info_ids = fields.One2many(
comodel_name='custom.info.value',
inverse_name='res_id',
domain=lambda self: [
("model_id", "=",
self.env["ir.model"].search([("model", "=", self._name)]).id)],
auto_join=True,
string='Custom Properties')
@api.onchange('custom_info_template_id')
def _onchange_custom_info_template_id(self):
if not self.custom_info_template_id:
self.custom_info_ids = False
else:
info_list = self.custom_info_ids.mapped('property_id')
for info_name in self.custom_info_template_id.info_ids:
if info_name not in info_list:
self.custom_info_ids |= self.custom_info_ids.new({
'model': self._name,
'property_id': info_name.id,
})
@api.multi
def unlink(self):
info_values = self.mapped('custom_info_ids')
res = super(CustomInfo, self).unlink()
if res:
info_values.unlink()
return res
| Python | 0 |
74101a9f24b218c036cf32c540cfb911e601080b | fix freeze of ppo2 (#849) | baselines/common/mpi_adam_optimizer.py | baselines/common/mpi_adam_optimizer.py | import numpy as np
import tensorflow as tf
from baselines.common import tf_util as U
from baselines.common.tests.test_with_mpi import with_mpi
try:
from mpi4py import MPI
except ImportError:
MPI = None
class MpiAdamOptimizer(tf.train.AdamOptimizer):
"""Adam optimizer that averages gradients across mpi processes."""
def __init__(self, comm, **kwargs):
self.comm = comm
tf.train.AdamOptimizer.__init__(self, **kwargs)
def compute_gradients(self, loss, var_list, **kwargs):
grads_and_vars = tf.train.AdamOptimizer.compute_gradients(self, loss, var_list, **kwargs)
grads_and_vars = [(g, v) for g, v in grads_and_vars if g is not None]
flat_grad = tf.concat([tf.reshape(g, (-1,)) for g, v in grads_and_vars], axis=0)
shapes = [v.shape.as_list() for g, v in grads_and_vars]
sizes = [int(np.prod(s)) for s in shapes]
num_tasks = self.comm.Get_size()
buf = np.zeros(sum(sizes), np.float32)
countholder = [0] # Counts how many times _collect_grads has been called
stat = tf.reduce_sum(grads_and_vars[0][1]) # sum of first variable
def _collect_grads(flat_grad, np_stat):
self.comm.Allreduce(flat_grad, buf, op=MPI.SUM)
np.divide(buf, float(num_tasks), out=buf)
if countholder[0] % 100 == 0:
check_synced(np_stat, self.comm)
countholder[0] += 1
return buf
avg_flat_grad = tf.py_func(_collect_grads, [flat_grad, stat], tf.float32)
avg_flat_grad.set_shape(flat_grad.shape)
avg_grads = tf.split(avg_flat_grad, sizes, axis=0)
avg_grads_and_vars = [(tf.reshape(g, v.shape), v)
for g, (_, v) in zip(avg_grads, grads_and_vars)]
return avg_grads_and_vars
def check_synced(localval, comm=None):
"""
It's common to forget to initialize your variables to the same values, or
(less commonly) if you update them in some other way than adam, to get them out of sync.
This function checks that variables on all MPI workers are the same, and raises
an AssertionError otherwise
Arguments:
comm: MPI communicator
localval: list of local variables (list of variables on current worker to be compared with the other workers)
"""
comm = comm or MPI.COMM_WORLD
vals = comm.gather(localval)
if comm.rank == 0:
assert all(val==vals[0] for val in vals[1:])
@with_mpi(timeout=5)
def test_nonfreeze():
np.random.seed(0)
tf.set_random_seed(0)
a = tf.Variable(np.random.randn(3).astype('float32'))
b = tf.Variable(np.random.randn(2,5).astype('float32'))
loss = tf.reduce_sum(tf.square(a)) + tf.reduce_sum(tf.sin(b))
stepsize = 1e-2
# for some reason the session config with inter_op_parallelism_threads was causing
# nested sess.run calls to freeze
config = tf.ConfigProto(inter_op_parallelism_threads=1)
sess = U.get_session(config=config)
update_op = MpiAdamOptimizer(comm=MPI.COMM_WORLD, learning_rate=stepsize).minimize(loss)
sess.run(tf.global_variables_initializer())
losslist_ref = []
for i in range(100):
l,_ = sess.run([loss, update_op])
print(i, l)
losslist_ref.append(l)
| import numpy as np
import tensorflow as tf
from mpi4py import MPI
class MpiAdamOptimizer(tf.train.AdamOptimizer):
"""Adam optimizer that averages gradients across mpi processes."""
def __init__(self, comm, **kwargs):
self.comm = comm
tf.train.AdamOptimizer.__init__(self, **kwargs)
def compute_gradients(self, loss, var_list, **kwargs):
grads_and_vars = tf.train.AdamOptimizer.compute_gradients(self, loss, var_list, **kwargs)
grads_and_vars = [(g, v) for g, v in grads_and_vars if g is not None]
flat_grad = tf.concat([tf.reshape(g, (-1,)) for g, v in grads_and_vars], axis=0)
shapes = [v.shape.as_list() for g, v in grads_and_vars]
sizes = [int(np.prod(s)) for s in shapes]
num_tasks = self.comm.Get_size()
buf = np.zeros(sum(sizes), np.float32)
sess = tf.get_default_session()
assert sess is not None
countholder = [0] # Counts how many times _collect_grads has been called
stat = tf.reduce_sum(grads_and_vars[0][1]) # sum of first variable
def _collect_grads(flat_grad):
self.comm.Allreduce(flat_grad, buf, op=MPI.SUM)
np.divide(buf, float(num_tasks), out=buf)
if countholder[0] % 100 == 0:
check_synced(sess, self.comm, stat)
countholder[0] += 1
return buf
avg_flat_grad = tf.py_func(_collect_grads, [flat_grad], tf.float32)
avg_flat_grad.set_shape(flat_grad.shape)
avg_grads = tf.split(avg_flat_grad, sizes, axis=0)
avg_grads_and_vars = [(tf.reshape(g, v.shape), v)
for g, (_, v) in zip(avg_grads, grads_and_vars)]
return avg_grads_and_vars
def check_synced(sess, comm, tfstat):
"""
Check that 'tfstat' evaluates to the same thing on every MPI worker
"""
localval = sess.run(tfstat)
vals = comm.gather(localval)
if comm.rank == 0:
assert all(val==vals[0] for val in vals[1:])
| Python | 0 |
a2982804011e808bd8bf8d9781d9b7bb20328ddc | remove import test line | noteorganiser/tests/test_utils.py | noteorganiser/tests/test_utils.py | """tests for utilities"""
import os
import shutil
import datetime
from PySide import QtGui
from PySide import QtCore
#utils to test
from ..utils import fuzzySearch
from .custom_fixtures import parent
def test_fuzzySearch():
### these should return True
#starts with the searchstring
assert fuzzySearch('g', 'git got gut')
#starts with the (longer) searchstring
assert fuzzySearch('git', 'git got gut')
#searchstring not at the start
assert fuzzySearch('got', 'git got gut')
#multiple substrings (separated by a space) found somewhere in the string
assert fuzzySearch('gi go', 'git got gut')
#empty string
assert fuzzySearch('', 'git got gut')
#strange whitespace
assert fuzzySearch('gi go', 'git got gut')
assert fuzzySearch('gi go', 'git got gut')
### these should return False
#searchstring not found
assert not fuzzySearch('bot', 'git got gut')
#searchstring not found
assert not fuzzySearch('gran', 'this is a great neat thing')
| """tests for utilities"""
import os
import shutil
import datetime
from PySide import QtGui
from PySide import QtCore
import test
#utils to test
from ..utils import fuzzySearch
from .custom_fixtures import parent
def test_fuzzySearch():
### these should return True
#starts with the searchstring
assert fuzzySearch('g', 'git got gut')
#starts with the (longer) searchstring
assert fuzzySearch('git', 'git got gut')
#searchstring not at the start
assert fuzzySearch('got', 'git got gut')
#multiple substrings (separated by a space) found somewhere in the string
assert fuzzySearch('gi go', 'git got gut')
#empty string
assert fuzzySearch('', 'git got gut')
#strange whitespace
assert fuzzySearch('gi go', 'git got gut')
assert fuzzySearch('gi go', 'git got gut')
### these should return False
#searchstring not found
assert not fuzzySearch('bot', 'git got gut')
#searchstring not found
assert not fuzzySearch('gran', 'this is a great neat thing')
| Python | 0.000001 |
6e42e355d6ae60f115c9027ff6fcb17814b346c2 | use mah special charm helpers | hooks/setup.py | hooks/setup.py | import subprocess
def pre_install():
"""
Do any setup required before the install hook.
"""
install_charmhelpers()
def install_charmhelpers():
"""
Install the charmhelpers library, if not present.
"""
try:
import charmhelpers # noqa
except ImportError:
subprocess.check_call(['apt-get', 'install', '-y', 'python-pip'])
subprocess.check_call(['pip', 'install', '-e', 'git+https://github.com/whitmo/charmhelpers.git#egg=charmhelpers'])
| import subprocess
def pre_install():
"""
Do any setup required before the install hook.
"""
install_charmhelpers()
def install_charmhelpers():
"""
Install the charmhelpers library, if not present.
"""
try:
import charmhelpers # noqa
except ImportError:
subprocess.check_call(['apt-get', 'install', '-y', 'python-pip'])
subprocess.check_call(['pip', 'install', 'charmhelpers'])
| Python | 0 |
a02624cdbacd666d4e0cdba6230e2ee67837f874 | add AsText to __all__ list | geoalchemy2/functions.py | geoalchemy2/functions.py | from sqlalchemy.sql import functions
from . import types
__all__ = [
'GenericFunction',
'GeometryType',
'AsText',
'Buffer'
]
class GenericFunction(functions.GenericFunction):
def __init__(self, *args, **kwargs):
expr = kwargs.pop('expr', None)
if expr is not None:
args = (expr,) + args
functions.GenericFunction.__init__(self, *args, **kwargs)
# Functions are classified as in the PostGIS doc.
# <http://www.postgis.org/documentation/manual-svn/reference.html>
#
# Geometry Accessors
#
class GeometryType(GenericFunction):
name = 'ST_GeometryType'
#
# Geometry Outputs
#
class AsText(GenericFunction):
name = 'ST_AsText'
#
# Geometry Processing
#
class Buffer(GenericFunction):
name = 'ST_Buffer'
type = types.Geometry
| from sqlalchemy.sql import functions
from . import types
__all__ = [
'GenericFunction', 'GeometryType', 'Buffer'
]
class GenericFunction(functions.GenericFunction):
def __init__(self, *args, **kwargs):
expr = kwargs.pop('expr', None)
if expr is not None:
args = (expr,) + args
functions.GenericFunction.__init__(self, *args, **kwargs)
# Functions are classified as in the PostGIS doc.
# <http://www.postgis.org/documentation/manual-svn/reference.html>
#
# Geometry Accessors
#
class GeometryType(GenericFunction):
name = 'ST_GeometryType'
#
# Geometry Outputs
#
class AsText(GenericFunction):
name = 'ST_AsText'
#
# Geometry Processing
#
class Buffer(GenericFunction):
name = 'ST_Buffer'
type = types.Geometry
| Python | 0.000861 |
28b2d0d4c92656b2b1fb7a519cb0f33657048e0c | improve plotting and update to pyIEM | scripts/current/q3_today_total.py | scripts/current/q3_today_total.py | """
Create a plot of today's estimated precipitation based on the Q3 data
"""
import datetime
import numpy as np
import os
import sys
sys.path.insert(0, '../mrms')
import util
import pytz
import gzip
from pyiem.plot import MapPlot
def doday(ts, realtime):
"""
Create a plot of precipitation stage4 estimates for some day
"""
# Start at 1 AM
now = ts.replace(hour=1, minute=0)
ets = now + datetime.timedelta(hours=24)
interval = datetime.timedelta(hours=1)
precip = np.zeros( (3500,7000) )
lastts = None
while now < ets:
gmt = now.astimezone(pytz.timezone("UTC"))
# Only need tile 1 and 2 to sufficiently do Iowa
for tile in range(1,3):
fn = util.get_fn('1hrad', gmt, tile)
if os.path.isfile(fn):
lastts = now
tilemeta, val = util.reader(fn)
ysz, xsz = np.shape(val)
if tile == 1:
x0 = 0
y0 = 1750
if tile == 2:
x0 = 3500
y0 = 1750
precip[y0:(y0+ysz),x0:(x0+xsz)] += val
else:
print 'Missing 1HRAD MRMS for q3_today_total', fn
now += interval
lastts = lastts - datetime.timedelta(minutes=1)
subtitle = "Total between 12:00 AM and %s" % (
lastts.strftime("%H:%M %p %Z"),)
routes = 'ac'
if not realtime:
routes = 'a'
pqstr = "plot %s %s00 iowa_q2_1d.png iowa_q2_1d.png png" % (routes,
ts.strftime("%Y%m%d%H"), )
m = MapPlot(title="%s NMQ Q3 Today's Precipitation" % (
ts.strftime("%-d %b %Y"),),
subtitle=subtitle)
clevs = np.arange(0,0.2,0.05)
clevs = np.append(clevs, np.arange(0.2, 1.0, 0.1))
clevs = np.append(clevs, np.arange(1.0, 5.0, 0.25))
clevs = np.append(clevs, np.arange(5.0, 10.0, 1.0))
clevs[0] = 0.01
x,y = np.meshgrid(util.XAXIS, util.YAXIS)
m.pcolormesh(x, y, precip / 24.5, clevs, units='inch')
#map.drawstates(zorder=2)
m.drawcounties()
m.postprocess(pqstr=pqstr)
if __name__ == "__main__":
''' This is how we roll '''
if len(sys.argv) == 4:
date = datetime.datetime(int(sys.argv[1]), int(sys.argv[2]),
int(sys.argv[3]), 12, 0)
realtime = False
else:
date = datetime.datetime.now()
date = date - datetime.timedelta(minutes=60)
date = date.replace(hour=12, minute=0, second=0, microsecond=0)
realtime = True
# Stupid pytz timezone dance
date = date.replace(tzinfo=pytz.timezone("UTC"))
date = date.astimezone(pytz.timezone("America/Chicago"))
doday(date, realtime)
| """
Create a plot of today's estimated precipitation based on the Q3 data
"""
import datetime
import numpy as np
import os
import sys
sys.path.insert(0, '../mrms')
import util
import pytz
import gzip
from iem.plot import MapPlot
def doday(ts):
"""
Create a plot of precipitation stage4 estimates for some day
"""
# Start at 1 AM
now = ts.replace(hour=1, minute=0)
interval = datetime.timedelta(hours=1)
precip = np.zeros( (3500,7000) )
while now < ts:
gmt = now.astimezone(pytz.timezone("UTC"))
# Only need tile 1 and 2 to sufficiently do Iowa
for tile in range(1,3):
fn = util.get_fn('1hrad', gmt, tile)
if os.path.isfile(fn):
tilemeta, val = util.reader(fn)
ysz, xsz = np.shape(val)
if tile == 1:
x0 = 0
y0 = 1750
if tile == 2:
x0 = 3500
y0 = 1750
precip[y0:(y0+ysz),x0:(x0+xsz)] += val
else:
print 'Missing 1HRAD MRMS for q3_today_total', fn
now += interval
subtitle = 'Total up to %s' % (ts.strftime("%d %B %Y %I:%M %p %Z"),)
pqstr = "plot ac %s00 iowa_q2_1d.png iowa_q2_1d.png png" % (
ts.strftime("%Y%m%d%H"), )
m = MapPlot(title="NMQ Q3 Today's Precipitation [inch]",
subtitle=subtitle, pqstr=pqstr)
clevs = np.arange(0,0.2,0.05)
clevs = np.append(clevs, np.arange(0.2, 1.0, 0.1))
clevs = np.append(clevs, np.arange(1.0, 5.0, 0.25))
clevs = np.append(clevs, np.arange(5.0, 10.0, 1.0))
clevs[0] = 0.01
m.contourf(util.XAXIS, util.YAXIS, precip / 24.5, clevs)
#map.drawstates(zorder=2)
m.drawcounties()
m.postprocess()
if __name__ == "__main__":
''' This is how we roll '''
if len(sys.argv) == 4:
ts = datetime.datetime(int(sys.argv[1]),
int(sys.argv[2]), int(sys.argv[3]), 23, 55)
else:
ts = datetime.datetime.now()
ts = pytz.timezone("America/Chicago").localize(ts)
doday(ts)
| Python | 0 |
9fb1c2781582e52c6618b61d4a8a60c3363ee711 | bump controller API to v1.1 | api/__init__.py | api/__init__.py | """
The **api** Django app presents a RESTful web API for interacting with the **deis** system.
"""
__version__ = '1.1.0'
| """
The **api** Django app presents a RESTful web API for interacting with the **deis** system.
"""
__version__ = '1.0.0'
| Python | 0.000001 |
60e60c9d7c5551701eafbfe15dd3931d45b594b6 | Handle Accept headers | api/__init__.py | api/__init__.py | # try and keep Flask imports to a minimum, going to refactor later to use
# just werkzeug, for now, prototype speed is king
from flask import Flask, request
import yaml
import os
import re
from datetime import datetime
from api.config import config, ConfigException
import api.repo
import api.utils
app = Flask(__name__)
app.config.from_object('api.config')
@app.route('/schema', defaults={'path': ''}, methods=['GET'])
@app.route('/schema/<path:path>', methods=['GET'])
def get_schemas(path):
# TODO serve schemas
return utils.json_response({'win': 'scheme away'}, 200)
@app.route('/', defaults={'path': ''}, methods=['GET'])
@app.route('/<path:path>', methods=['GET'])
def get_data(path):
"""Handle all GET requests to the api"""
metadata = {}
file_path = os.path.join('data', path)
latest_version = repo.get_latest_commit()
# check if the request specified a version via header
accept_pattern = re.compile('application/(.+)\+json')
match = accept_pattern.match(request.headers['Accept'])
if match is None:
match = request.accept_mimetypes.best_match(['application/json'])
if match is None:
return utils.err(406)
else:
if match.group(2) is None:
version = latest_version
else:
cid = match.group(2)
try:
version = repo.get_commit(cid)
except (KeyError, ValueError) as e:
return utils.err(406)
if repo.path_files(file_path + config['DATA_FILE_EXT'], version.id) is None:
# .yml file doesn't exist, check if path matches a directory
f_list = repo.path_files(file_path, version.id)
if f_list is None:
return utils.err(404)
data = utils.file_list_to_links(f_list, request.host_url, 'data/')
metadata['data_type'] = 'directory listing'
else:
raw = repo.file_contents(file_path + config['DATA_FILE_EXT'], version.id)
data = yaml.load(raw)
data = utils.refs_to_links(data, request.host_url)
metadata['data_type'] = 'file content'
metadata['version'] = {
'id': str(version.id),
'date': datetime.fromtimestamp(version.commit_time).isoformat()
}
if version.id != latest_version.id:
metadata['latest_version'] = {
'id': str(latest_version.id),
'date': datetime.fromtimestamp(latest_version.commit_time).isoformat()
}
ret_obj = {
'data': data,
'metadata': metadata
}
return utils.json_response(ret_obj)
| # try and keep Flask imports to a minimum, going to refactor later to use
# just werkzeug, for now, prototype speed is king
from flask import Flask, request
import yaml
import os
import re
from datetime import datetime
from api.config import config, ConfigException
import api.repo
import api.utils
app = Flask(__name__)
app.config.from_object('api.config')
@app.route('/schema', defaults={'path': ''}, methods=['GET'])
@app.route('/schema/<path:path>', methods=['GET'])
def get_schemas(path):
# TODO serve schemas
return utils.json_response({'win': 'scheme away'}, 200)
@app.route('/', defaults={'path': ''}, methods=['GET'])
@app.route('/<path:path>', methods=['GET'])
def get_data(path):
"""Handle all GET requests to the api"""
metadata = {}
file_path = os.path.join('data', path)
latest_version = repo.get_latest_commit()
# check if the request specified a version via header
accept_pattern = re.compile('application/(.+)\+json')
match = accept_pattern.match(request.headers['Accept'])
if match is not None:
cid = match.group(1)
try:
version = repo.get_commit(cid)
except KeyError as e:
return utils.err(406)
else:
version = latest_version
if repo.path_files(file_path + config['DATA_FILE_EXT'], version.id) is None:
# .yml file doesn't exist, check if path matches a directory
f_list = repo.path_files(file_path, version.id)
if f_list is None:
return utils.err(404)
data = utils.file_list_to_links(f_list, request.host_url, 'data/')
metadata['data_type'] = 'directory listing'
else:
raw = repo.file_contents(file_path + config['DATA_FILE_EXT'], version.id)
data = yaml.load(raw)
data = utils.refs_to_links(data, request.host_url)
metadata['data_type'] = 'file content'
metadata['version'] = {
'id': str(version.id),
'date': datetime.fromtimestamp(version.commit_time).isoformat()
}
if version.id != latest_version.id:
metadata['latest_version'] = {
'id': str(latest_version.id),
'date': datetime.fromtimestamp(latest_version.commit_time).isoformat()
}
ret_obj = {
'data': data,
'metadata': metadata
}
return utils.json_response(ret_obj)
| Python | 0 |
a652e43ca73eacda7e42e27afb0d91d75000b4df | Fix typing errors | gerber_to_scad/vector.py | gerber_to_scad/vector.py | # Basic vector maths class
import math
class V(object):
def __init__(self, x=0, y=0):
self.x = float(x)
self.y = float(y)
def __unicode__(self):
return "(%s, %s)" % (self.x, self.y)
__repr__ = __unicode__
@classmethod
def from_tuple(cls, coordinates):
x, y = coordinates
return V(x, y)
def as_tuple(self):
return (self.x, self.y)
@classmethod
def intersection(cls, o1, d1, o2, d2):
"""Find intersection of two vectors, if any"""
try:
l2 = ((o2.x - o1.x) * d1.y / d1.x - o2.y + o1.y) / (
d2.y - d2.x * d1.y / d1.x
)
return o2 + d2 * l2
except ZeroDivisionError:
return None
@classmethod
def point_line_projection(cls, v1, v2, p, limit_to_segment=False):
"""Returns the projection of the point p on the line defined
by the two endpoints v1 and v2
"""
d = v2 - v1
l2 = d.abs_sq()
# If v1 and v2 are equal, simply return v1 (the line direction is undefined)
if l2 == 0:
return v1
# Get the projection factor
a = ((p - v1) * d) / l2
# Limit the projection to be limited to stay between v1 and v2, if requested
if limit_to_segment:
if a < 0:
return v1
if a > 1:
return v2
return v1 + d * a
def abs_sq(self):
"""Square of absolute value of vector self"""
return abs(self.x * self.x + self.y * self.y)
def consume_tuple(self, other):
if isinstance(other, tuple) or isinstance(other, list):
return V(other[0], other[1])
return other
def cross(self, other):
"""cross product"""
return V(self.x * other.y - other.x * self.y)
def rotate(self, theta, as_degrees=False):
"""Adapted from https://gist.github.com/mcleonard/5351452.
Rotate this vector by theta in degrees.
"""
if as_degrees:
theta = math.radians(theta)
dc, ds = math.cos(theta), math.sin(theta)
x, y = dc * self.x - ds * self.y, ds * self.x + dc * self.y
return V(x, y)
def __abs__(self):
return math.sqrt(self.abs_sq())
def __cmp__(self, other):
other = self.consume_tuple(other)
if self.x == other.x and self.y == other.y:
return 0
if abs(self) < abs(other):
return -1
return 1
def __nonzero__(self):
if self.x or self.y:
return True
return False
def __neg__(self):
return V(-self.x, -self.y)
def __add__(self, other):
other = self.consume_tuple(other)
return V(self.x + other.x, self.y + other.y)
def __sub__(self, other):
other = self.consume_tuple(other)
return V(self.x - other.x, self.y - other.y)
def __mul__(self, other) -> "V":
other = self.consume_tuple(other)
if isinstance(other, V):
return self.x * other.x + self.y * other.y
return V(other * self.x, other * self.y)
def __div__(self, other):
if not other:
raise Exception("Division by zero")
other = float(other)
return V(self.x / other, self.y / other)
__truediv__ = __div__
| # Basic vector maths class
import math
class V(object):
def __init__(self, x=0, y=0):
self.x = float(x)
self.y = float(y)
def __unicode__(self):
return "(%s, %s)" % (self.x, self.y)
__repr__ = __unicode__
@classmethod
def from_tuple(cls, coordinates):
x, y = coordinates
return V(x, y)
def as_tuple(self):
return (self.x, self.y)
@classmethod
def intersection(cls, o1, d1, o2, d2):
""" Find intersection of two vectors, if any """
try:
l2 = ((o2.x - o1.x) * d1.y / d1.x - o2.y + o1.y) / (d2.y - d2.x * d1.y / d1.x)
return o2 + d2 * l2
except ZeroDivisionError:
return None
@classmethod
def point_line_projection(cls, v1, v2, p, limit_to_segment=False):
""" Returns the projection of the point p on the line defined
by the two endpoints v1 and v2
"""
d = v2 - v1
l2 = d.abs_sq()
# If v1 and v2 are equal, simply return v1 (the line direction is undefined)
if l2 == 0:
return v1
# Get the projection factor
a = ((p - v1) * d) / l2
# Limit the projection to be limited to stay between v1 and v2, if requested
if limit_to_segment:
if a < 0:
return v1
if a > 1:
return v2
return v1 + d * a
def abs_sq(self):
""" Square of absolute value of vector self """
return abs(self.x * self.x + self.y * self.y)
def consume_tuple(self, other):
if isinstance(other, tuple) or isinstance(other, list):
return V(other[0], other[1])
return other
def cross(self, other):
""" cross product """
return V(self.x * other.y - other.x * self.y)
def rotate(self, theta, as_degrees=False):
""" Adapted from https://gist.github.com/mcleonard/5351452.
Rotate this vector by theta in degrees.
"""
if as_degrees:
theta = math.radians(theta)
dc, ds = math.cos(theta), math.sin(theta)
x, y = dc*self.x - ds*self.y, ds*self.x + dc*self.y
return V(x, y)
def __abs__(self):
return math.sqrt(self.abs_sq())
def __cmp__(self, other):
other = self.consume_tuple(other)
if self.x == other.x and self.y == other.y:
return 0
if self.abs() < other.abs():
return -1
return 1
def __nonzero__(self):
if self.x or self.y:
return True
return False
def __neg__(self):
return V(-self.x, -self.y)
def __add__(self, other):
other = self.consume_tuple(other)
return V(self.x + other.x, self.y + other.y)
def __sub__(self, other):
other = self.consume_tuple(other)
return V(self.x - other.x, self.y - other.y)
def __mul__(self, other):
other = self.consume_tuple(other)
if isinstance(other, V):
return (self.x * other.x + self.y * other.y)
return V(other * self.x, other * self.y)
def __div__(self, other):
if not other:
raise Exception("Division by zero")
other = float(other)
return V(self.x / other, self.y / other)
__truediv__ = __div__
| Python | 0.00611 |
ab3f331246e844812fd91b51908a0d0972a9793f | improve run_bin (#885) | gfauto/gfauto/run_bin.py | gfauto/gfauto/run_bin.py | # -*- coding: utf-8 -*-
# Copyright 2019 The GraphicsFuzz Project Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs a binary from the given binary name and settings file."""
import argparse
import subprocess
import sys
from pathlib import Path
from typing import List
from gfauto import binaries_util, settings_util
from gfauto.gflogging import log
def main() -> int:
parser = argparse.ArgumentParser(
description="Runs a binary given the binary name and settings.json file. "
"Use -- to separate args to run_bin and your binary. "
)
parser.add_argument(
"--settings",
help="Path to the settings JSON file for this instance.",
default=str(settings_util.DEFAULT_SETTINGS_FILE_PATH),
)
parser.add_argument(
"binary_name",
help="The name of the binary to run. E.g. spirv-opt, glslangValidator",
type=str,
)
parser.add_argument(
"arguments",
metavar="arguments",
type=str,
nargs="*",
help="The arguments to pass to the binary",
)
parsed_args = parser.parse_args(sys.argv[1:])
# Args.
settings_path: Path = Path(parsed_args.settings)
binary_name: str = parsed_args.binary_name
arguments: List[str] = parsed_args.arguments
try:
settings = settings_util.read_or_create(settings_path)
except settings_util.NoSettingsFile:
log(f"Settings file {str(settings_path)} was created for you; using this.")
settings = settings_util.read_or_create(settings_path)
binary_manager = binaries_util.get_default_binary_manager(settings=settings)
cmd = [str(binary_manager.get_binary_path_by_name(binary_name).path)]
cmd.extend(arguments)
return subprocess.run(cmd, check=False).returncode
if __name__ == "__main__":
sys.exit(main())
| # -*- coding: utf-8 -*-
# Copyright 2019 The GraphicsFuzz Project Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs a binary from the given binary name and settings file."""
import argparse
import subprocess
import sys
from pathlib import Path
from typing import List
from gfauto import binaries_util, settings_util
def main() -> int:
parser = argparse.ArgumentParser(
description="Runs a binary given the binary name and settings.json file."
)
parser.add_argument(
"--settings",
help="Path to the settings JSON file for this instance.",
default=str(settings_util.DEFAULT_SETTINGS_FILE_PATH),
)
parser.add_argument(
"binary_name",
help="The name of the binary to run. E.g. spirv-opt, glslangValidator",
type=str,
)
parser.add_argument(
"arguments",
metavar="arguments",
type=str,
nargs="*",
help="The arguments to pass to the binary",
)
parsed_args = parser.parse_args(sys.argv[1:])
# Args.
settings_path: Path = Path(parsed_args.settings)
binary_name: str = parsed_args.binary_name
arguments: List[str] = parsed_args.arguments
settings = settings_util.read_or_create(settings_path)
binary_manager = binaries_util.get_default_binary_manager(settings=settings)
cmd = [str(binary_manager.get_binary_path_by_name(binary_name).path)]
cmd.extend(arguments)
return subprocess.run(cmd, check=False).returncode
if __name__ == "__main__":
sys.exit(main())
| Python | 0 |
15f22d7c0ac9ddce6cb14cb0cbb35c4d630605d2 | Remove period so input corresponds to output. | api/ud_helper.py | api/ud_helper.py | import re
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
text = text.strip()
# Adding a period improves detection on especially short sentences
period_added = False
last_character = text.strip()[-1]
if re.match(r"\w", last_character, flags=re.UNICODE):
text += "."
period_added = True
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
# Remove the period to make sure input corresponds to output
if period_added:
processed = "\n".join(processed.rstrip().split("\n")[:-1]) + "\n\n"
return processed
class ParserException(Exception):
pass
| import re
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
text = text.strip()
last_character = text.strip()[-1]
if re.match(r"\w", last_character, flags=re.UNICODE):
text += "."
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
| Python | 0.999999 |
ccef871b45f78845a12c3209b463e861244a107e | Fix the moin parser. | external/moin-parser.py | external/moin-parser.py | # -*- coding: utf-8 -*-
"""
The Pygments MoinMoin Parser
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a MoinMoin parser plugin that renders source code to HTML via
Pygments; you need Pygments 0.7 or newer for this parser to work.
To use it, set the options below to match your setup and put this file in
the data/plugin/parser subdirectory of your Moin instance, and give it the
name that the parser directive should have. For example, if you name the
file ``code.py``, you can get a highlighted Python code sample with this
Wiki markup::
{{{
#!code python
[...]
}}}
Additionally, if you set ATTACHMENTS below to True, Pygments will also be
called for all attachments for whose filenames there is no other parser
registered.
You are responsible for including CSS rules that will map the Pygments CSS
classes to colors. You can output a stylesheet file with `pygmentize`, put
it into the `htdocs` directory of your Moin instance and then include it in
the `stylesheets` configuration option in the Moin config, e.g.::
stylesheets = [('screen', '/htdocs/pygments.css')]
If you do not want to do that and are willing to accept larger HTML
output, you can set the INLINESTYLES option below to True.
:copyright: 2007 by Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
# Options
# ~~~~~~~
# Set to True if you want to highlight attachments, in addition to
# {{{ }}} blocks.
ATTACHMENTS = True
# Set to True if you want inline CSS styles instead of classes
INLINESTYLES = False
import sys
from pygments import highlight
from pygments.lexers import get_lexer_by_name, get_lexer_for_filename, TextLexer
from pygments.formatters import HtmlFormatter
from pygments.util import ClassNotFound
# wrap lines in <span>s so that the Moin-generated line numbers work
class MoinHtmlFormatter(HtmlFormatter):
def wrap(self, source, outfile):
for line in source:
yield 1, '<span class="line">' + line[1] + '</span>'
htmlformatter = MoinHtmlFormatter(noclasses=INLINESTYLES)
textlexer = TextLexer()
codeid = [0]
class Parser:
"""
MoinMoin Pygments parser.
"""
if ATTACHMENTS:
extensions = '*'
else:
extensions = []
Dependencies = []
def __init__(self, raw, request, **kw):
self.raw = raw
self.req = request
if "format_args" in kw:
# called from a {{{ }}} block
try:
self.lexer = get_lexer_by_name(kw['format_args'].strip())
except ClassNotFound:
self.lexer = textlexer
return
if "filename" in kw:
# called for an attachment
filename = kw['filename']
else:
# called for an attachment by an older moin
# HACK: find out the filename by peeking into the execution
# frame which might not always work
try:
frame = sys._getframe(1)
filename = frame.f_locals['filename']
except:
filename = 'x.txt'
try:
self.lexer = get_lexer_for_filename(filename)
except ClassNotFound:
self.lexer = textlexer
def format(self, formatter):
codeid[0] += 1
id = "pygments_%s" % codeid[0]
w = self.req.write
w(formatter.code_area(1, id, start=1, step=1))
w(formatter.rawHTML(highlight(self.raw, self.lexer, htmlformatter)))
w(formatter.code_area(0, id))
| # -*- coding: utf-8 -*-
"""
The Pygments MoinMoin Parser
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a MoinMoin parser plugin that renders source code to HTML via
Pygments; you need Pygments 0.7 or newer for this parser to work.
To use it, set the options below to match your setup and put this file in
the data/plugin/parser subdirectory of your Moin instance, and give it the
name that the parser directive should have. For example, if you name the
file ``code.py``, you can get a highlighted Python code sample with this
Wiki markup::
{{{
#!code python
[...]
}}}
Additionally, if you set ATTACHMENTS below to True, Pygments will also be
called for all attachments for whose filenames there is no other parser
registered.
You are responsible for including CSS rules that will map the Pygments CSS
classes to colors. You can output a stylesheet file with `pygmentize`, put
it into the `htdocs` directory of your Moin instance and then include it in
the `stylesheets` configuration option in the Moin config, e.g.::
stylesheets = [('screen', '/htdocs/pygments.css')]
If you do not want to do that and are willing to accept larger HTML
output, you can set the INLINESTYLES option below to True.
:copyright: 2007 by Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
# Options
# ~~~~~~~
# Set to True if you want to highlight attachments, in addition to
# {{{ }}} blocks.
ATTACHMENTS = True
# Set to True if you want inline CSS styles instead of classes
INLINESTYLES = False
import sys
from pygments import highlight
from pygments.lexers import get_lexer_by_name, get_lexer_for_filename, TextLexer
from pygments.formatters import HtmlFormatter
from pygments.util import ObjectNotFound
# wrap lines in <span>s so that the Moin-generated line numbers work
class MoinHtmlFormatter(HtmlFormatter):
def wrap(self, source, outfile):
for line in source:
yield 1, '<span class="line">' + line[1] + '</span>'
htmlformatter = MoinHtmlFormatter(noclasses=INLINESTYLES)
textlexer = TextLexer()
codeid = [0]
class Parser:
"""
MoinMoin Pygments parser.
"""
if ATTACHMENTS:
extensions = '*'
else:
extensions = []
Dependencies = []
def __init__(self, raw, request, **kw):
self.raw = raw
self.req = request
if "format_args" in kw:
# called from a {{{ }}} block
try:
self.lexer = get_lexer_by_name(kw['format_args'].strip())
except ObjectNotFound:
self.lexer = textlexer
return
if "filename" in kw:
# called for an attachment
filename = kw['filename']
else:
# called for an attachment by an older moin
# HACK: find out the filename by peeking into the execution
# frame which might not always work
try:
frame = sys._getframe(1)
filename = frame.f_locals['filename']
except:
filename = 'x.txt'
try:
self.lexer = get_lexer_for_filename(filename)
except ObjectNotFound:
self.lexer = textlexer
def format(self, formatter):
codeid[0] += 1
id = "pygments_%s" % codeid[0]
w = self.req.write
w(formatter.code_area(1, id, start=1, step=1))
w(formatter.rawHTML(highlight(self.raw, self.lexer, htmlformatter)))
w(formatter.code_area(0, id))
| Python | 0.000001 |
dffcfa42fbf4f200a22b739a0cd24f36317b054c | Fix so that /api/login/ follow the specified api documentation. | api/userview.py | api/userview.py | from flask import abort, request, jsonify, make_response, session
from datetime import datetime, timedelta
from api import app
from api.user import *
@require_csrf_token
@app.route('/api/signup/', methods = ['POST'])
def api_user_signup():
generate_csrf_token(session)
status = {}
httpcode = 200
if 'email' in request.json and 'password' in request.json:
if register_user(request.json['email'], request.json['password']):
status['code'] = 0
status['message'] = 'Success'
else:
status['code'] = 1
status['message'] = 'Could not register user, maybe user already exists?'
else:
status['code'] = 2
status['message'] = 'Missing paramter(s)'
httpcode = 400
return make_response(jsonify({ 'csrf_token': session['csrf'], 'status': status }), httpcode)
@require_csrf_token
@app.route('/api/login/', methods = ['POST'])
def api_user_login():
generate_csrf_token(session)
status = {}
httpcode = 200
if 'email' in request.json and 'password' in request.json:
id = check_user_credentials(request.json['email'], request.json['password'])
if id is not None:
session['id'] = id
session['loggedin'] = True
status['code'] = 0
status['message'] = 'Success'
else:
status['code'] = 4
status['message'] = 'Email and password combination did not match'
else:
status['code'] = 2
status['message'] = 'Missing paramter(s)'
httpcode = 400
return make_response(jsonify({ 'csrf_token': session['csrf'], 'status': status }), httpcode)
@require_csrf_token
@require_authentication
@app.route('/api/logout/', methods = ['POST'])
def api_user_logout():
session.destroy()
response = make_response(jsonify({ 'status': 'OK', 'message': 'User logged out successfully'}), 200)
return response
@app.route('/api/')
def api_root():
generate_csrf_token(session)
status = {'code': 0, 'message': 'Sucess'}
response = make_response(jsonify({'csrf_token': session['csrf'], 'status': status}), 200)
return response
| from flask import abort, request, jsonify, make_response, session
from datetime import datetime, timedelta
from api import app
from api.user import *
@require_csrf_token
@app.route('/api/signup/', methods = ['POST'])
def api_user_signup():
generate_csrf_token(session)
status = {}
httpcode = 200
if 'email' in request.json and 'password' in request.json:
if register_user(request.json['email'], request.json['password']):
status['code'] = 0
status['message'] = 'Success'
else:
status['code'] = 1
status['message'] = 'Could not register user, maybe user already exists?'
else:
status['code'] = 2
status['message'] = 'Missing paramter(s)'
httpcode = 400
return make_response(jsonify({ 'csrf_token': session['csrf'], 'status': status }), httpcode)
@require_csrf_token
@app.route('/api/login/', methods = ['POST'])
def api_user_login():
if 'email' in request.json and 'password' in request.json:
id = check_user_credentials(request.json['email'], request.json['password'])
if id is not None:
session = app.open_session(request)
session['id'] = id
session['loggedin'] = True
response = make_response(jsonify({ 'status': 'OK', 'message': 'User logged in successfully'}), 200)
app.save_session(session, response)
else:
response = make_response(jsonify({ 'status': 'FAIL', 'message': 'Email and password combination did not match'}), 200)
return response
return make_response(jsonify({ 'status': 'BAD REQUEST', 'message': 'Missing parameters'}), 400)
@require_csrf_token
@require_authentication
@app.route('/api/logout/', methods = ['POST'])
def api_user_logout():
session.destroy()
response = make_response(jsonify({ 'status': 'OK', 'message': 'User logged out successfully'}), 200)
return response
@app.route('/api/')
def api_root():
generate_csrf_token(session)
status = {'code': 0, 'message': 'Sucess'}
response = make_response(jsonify({'csrf_token': session['csrf'], 'status': status}), 200)
return response
| Python | 0 |
bfb4ba8cb863d80cdd558ebad25f630fef5dc190 | Stop to use the __future__ module. | oslo_middleware/debug.py | oslo_middleware/debug.py | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Debug middleware"""
import sys
import webob.dec
from oslo_middleware import base
class Debug(base.ConfigurableMiddleware):
"""Helper class that returns debug information.
Can be inserted into any WSGI application chain to get information about
the request and response.
"""
@webob.dec.wsgify
def __call__(self, req):
print(("*" * 40) + " REQUEST ENVIRON")
for key, value in req.environ.items():
print(key, "=", value)
print()
resp = req.get_response(self.application)
print(("*" * 40) + " RESPONSE HEADERS")
for (key, value) in resp.headers.items():
print(key, "=", value)
print()
resp.app_iter = self.print_generator(resp.app_iter)
return resp
@staticmethod
def print_generator(app_iter):
"""Prints the contents of a wrapper string iterator when iterated."""
print(("*" * 40) + " BODY")
for part in app_iter:
sys.stdout.write(part)
sys.stdout.flush()
yield part
print()
| # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Debug middleware"""
from __future__ import print_function
import sys
import webob.dec
from oslo_middleware import base
class Debug(base.ConfigurableMiddleware):
"""Helper class that returns debug information.
Can be inserted into any WSGI application chain to get information about
the request and response.
"""
@webob.dec.wsgify
def __call__(self, req):
print(("*" * 40) + " REQUEST ENVIRON")
for key, value in req.environ.items():
print(key, "=", value)
print()
resp = req.get_response(self.application)
print(("*" * 40) + " RESPONSE HEADERS")
for (key, value) in resp.headers.items():
print(key, "=", value)
print()
resp.app_iter = self.print_generator(resp.app_iter)
return resp
@staticmethod
def print_generator(app_iter):
"""Prints the contents of a wrapper string iterator when iterated."""
print(("*" * 40) + " BODY")
for part in app_iter:
sys.stdout.write(part)
sys.stdout.flush()
yield part
print()
| Python | 0.999929 |
2e8a2d2ac8b90a0806bea90c25d9b06ce8cc3a96 | check roi for each layer | dicom_tools/myroi2roi.py | dicom_tools/myroi2roi.py | import numpy as np
from skimage.measure import grid_points_in_poly
from dicom_tools.roiFileHandler import roiFileHandler
def myroi2roi(myrois, shape, verbose=False):
if verbose:
print("myroi2roi: called \n")
outroi = np.full(shape,False,dtype=bool)
if len(myrois) != len(outroi):
print("error: len rois = ",len(rois)," but len dicom=",len(outroi))
for myroi, layer in zip(myrois,outroi):
if not myroi is None:
layer = grid_points_in_poly(layer.shape, myroi['points'])
if verbose:
print("myroi2roi: returning \n")
return outroi
| import numpy as np
from skimage.measure import grid_points_in_poly
from dicom_tools.roiFileHandler import roiFileHandler
def myroi2roi(myrois, shape, verbose=False):
if verbose:
print("myroi2roi: called \n")
outroi = np.full(shape,False,dtype=bool)
if len(myrois) != len(outroi):
print("error: len rois = ",len(rois)," but len dicom=",len(outroi))
for myroi, layer in zip(myrois,outroi):
layer = grid_points_in_poly(layer.shape, myroi['points'])
if verbose:
print("myroi2roi: returning \n")
return outroi
| Python | 0 |
d671acb2c8a381fa49e98c50d967122738ebbd7b | Remove extra slash problem | app/comicbook.py | app/comicbook.py | import os
import sys
from natsort import natsorted
class comicbook(object):
filelist = []
def __init__(self, name, filename=None):
self.name = name
self.path = (name + "/").replace('//','/')
self.localpath = "res/" + name + "/"
self.filename = filename
self.generate_filelist()
if self.filename is None or not self.filename:
self.filename = self.filelist[0]
def generate_filelist(self):
x, self.dirlist, self.filelist = os.walk(self.localpath).next()
#Filter out system files
self.filelist = [ v for v in self.filelist if not v.startswith('.') ]
self.filelist = [ v for v in self.filelist if not v.startswith('thumbs.db') ]
self.filelist = [ v for v in self.filelist if not v.startswith('desktop.ini') ]
self.filelist = [ v for v in self.filelist if not v.endswith('.txt') ]
self.filelist = [ v for v in self.filelist if not v.startswith('README') ]
def thumbnail_path(self):
try:
return self.filelist[0]
except IndexError:
return None
def thumbnail_mimetype(self):
return 'image/jpeg'
def get_prev_image(self):
try:
idx = self.current_image() - 1
if idx >= 0:
return os.path.join('/', self.name, self.filelist[idx])
else:
return os.path.join('/', self.name)
except IndexError:
print "get_prev_image - IndexError"
return os.path.join('/', self.name)
def get_next_image(self):
try:
idx = self.current_image() + 1
if idx < len(self.filelist):
return os.path.join('/', self.name, self.filelist[idx])
else:
return os.path.join('/', self.name, '..')
return
except IndexError:
return os.path.join('/', self.name, '..')
def get_image(self):
try:
return os.path.join('/', self.name, self.filename, 'img')
except IndexError:
return None
def current_image(self):
return self.filelist.index(self.filename) | import os
import sys
from natsort import natsorted
class comicbook(object):
filelist = []
def __init__(self, name, filename=None):
self.name = name
self.path = name + "/"
self.localpath = "res/" + name + "/"
self.filename = filename
self.generate_filelist()
if self.filename is None or not self.filename:
self.filename = self.filelist[0]
self.path.replace('//','/')
def generate_filelist(self):
x, self.dirlist, self.filelist = os.walk(self.localpath).next()
#Filter out system files
self.filelist = [ v for v in self.filelist if not v.startswith('.') ]
self.filelist = [ v for v in self.filelist if not v.startswith('thumbs.db') ]
self.filelist = [ v for v in self.filelist if not v.startswith('desktop.ini') ]
self.filelist = [ v for v in self.filelist if not v.endswith('.txt') ]
self.filelist = [ v for v in self.filelist if not v.startswith('README') ]
def thumbnail_path(self):
try:
return self.filelist[0]
except IndexError:
return None
def thumbnail_mimetype(self):
return 'image/jpeg'
def get_prev_image(self):
try:
idx = self.current_image() - 1
if idx >= 0:
return os.path.join('/', self.name, self.filelist[idx])
else:
return os.path.join('/', self.name)
except IndexError:
print "get_prev_image - IndexError"
return os.path.join('/', self.name)
def get_next_image(self):
try:
idx = self.current_image() + 1
if idx < len(self.filelist):
return os.path.join('/', self.name, self.filelist[idx])
else:
return os.path.join('/', self.name, '..')
return
except IndexError:
return os.path.join('/', self.name, '..')
def get_image(self):
try:
return os.path.join('/', self.name, self.filename, 'img')
except IndexError:
return None
def current_image(self):
return self.filelist.index(self.filename) | Python | 0.000009 |
ba5edd102ddd53f2e95da8b673bf14bdd72dc012 | Add quotes around user-provided values | pw_cli/py/pw_cli/argument_types.py | pw_cli/py/pw_cli/argument_types.py | # Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Defines argument types for use with argparse."""
import argparse
import logging
from pathlib import Path
def directory(arg: str) -> Path:
path = Path(arg)
if path.is_dir():
return path.resolve()
raise argparse.ArgumentTypeError(f'"{path}" is not a directory')
def log_level(arg: str) -> int:
try:
return getattr(logging, arg.upper())
except AttributeError:
raise argparse.ArgumentTypeError(
f'"{arg.upper()}" is not a valid log level')
| # Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Defines argument types for use with argparse."""
import argparse
import logging
from pathlib import Path
def directory(arg: str) -> Path:
path = Path(arg)
if path.is_dir():
return path.resolve()
raise argparse.ArgumentTypeError(f'{path} is not a directory')
def log_level(arg: str) -> int:
try:
return getattr(logging, arg.upper())
except AttributeError:
raise argparse.ArgumentTypeError(
f'{arg.upper()} is not a valid log level')
| Python | 0.000001 |
cf36f9792886c6dd67b37c29af4a5d510b924902 | Use UTF-8 by default instead of locale encoding. | pybtex/io.py | pybtex/io.py | # Copyright (c) 2009, 2010, 2011, 2012 Andrey Golovizin
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Unicode-aware IO routines."""
from __future__ import absolute_import
import io
import sys
from os import path, environ
from pybtex.exceptions import PybtexError
from pybtex.kpathsea import kpsewhich
def get_default_encoding():
return 'UTF-8'
def get_stream_encoding(stream):
stream_encoding = getattr(stream, 'encoding', None)
return stream_encoding or get_default_encoding()
def _open_existing(opener, filename, mode, locate, **kwargs):
if not path.isfile(filename):
found = locate(filename)
if found:
filename = found
return opener(filename, mode, **kwargs)
def _open_or_create(opener, filename, mode, environ, **kwargs):
try:
return opener(filename, mode, **kwargs)
except EnvironmentError, error:
if 'TEXMFOUTPUT' in environ:
new_filename = path.join(environ['TEXMFOUTPUT'], filename)
try:
return opener(new_filename, mode, **kwargs)
except EnvironmentError:
pass
raise error
def _open(opener, filename, mode, **kwargs):
write_mode = 'w' in mode
try:
if write_mode:
return _open_or_create(opener, filename, mode, environ, **kwargs)
else:
return _open_existing(opener, filename, mode, locate=kpsewhich, **kwargs)
except EnvironmentError, error:
raise PybtexError("unable to open %s. %s" % (filename, error.strerror))
def open_raw(filename, mode='rb', encoding=None):
return _open(io.open, filename, mode)
def open_unicode(filename, mode='r', encoding=None):
if encoding is None:
encoding = get_default_encoding()
return _open(io.open, filename, mode, encoding=encoding)
def reader(stream, encoding=None, errors='strict'):
if encoding is None:
encoding = get_stream_encoding(stream)
return io.TextIOWrapper(stream, encoding=encoding, errors=errors)
stdout = sys.stdout
stderr = sys.stderr
| # Copyright (c) 2009, 2010, 2011, 2012 Andrey Golovizin
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Unicode-aware IO routines."""
from __future__ import absolute_import
import io
import sys
import locale
from os import path, environ
from pybtex.exceptions import PybtexError
from pybtex.kpathsea import kpsewhich
def get_default_encoding():
try:
locale_encoding = locale.getpreferredencoding()
except locale.Error:
locale_encoding = None
return locale_encoding or 'UTF-8'
def get_stream_encoding(stream):
stream_encoding = getattr(stream, 'encoding', None)
return stream_encoding or get_default_encoding()
def _open_existing(opener, filename, mode, locate, **kwargs):
if not path.isfile(filename):
found = locate(filename)
if found:
filename = found
return opener(filename, mode, **kwargs)
def _open_or_create(opener, filename, mode, environ, **kwargs):
try:
return opener(filename, mode, **kwargs)
except EnvironmentError, error:
if 'TEXMFOUTPUT' in environ:
new_filename = path.join(environ['TEXMFOUTPUT'], filename)
try:
return opener(new_filename, mode, **kwargs)
except EnvironmentError:
pass
raise error
def _open(opener, filename, mode, **kwargs):
write_mode = 'w' in mode
try:
if write_mode:
return _open_or_create(opener, filename, mode, environ, **kwargs)
else:
return _open_existing(opener, filename, mode, locate=kpsewhich, **kwargs)
except EnvironmentError, error:
raise PybtexError("unable to open %s. %s" % (filename, error.strerror))
def open_raw(filename, mode='rb', encoding=None):
return _open(io.open, filename, mode)
def open_unicode(filename, mode='r', encoding=None):
if encoding is None:
encoding = get_default_encoding()
return _open(io.open, filename, mode, encoding=encoding)
def reader(stream, encoding=None, errors='strict'):
if encoding is None:
encoding = get_stream_encoding(stream)
return io.TextIOWrapper(stream, encoding=encoding, errors=errors)
stdout = sys.stdout
stderr = sys.stderr
| Python | 0 |
4283aaf601482ee2512c642101f587ffe3515ef9 | raise if user doesn't exist in forgotten password form | authentification/forms.py | authentification/forms.py | from django import forms
from django.contrib.auth.models import User
class ForgottenPasswordForm(forms.Form):
username = forms.CharField(label="Identifiant")
email = forms.EmailField(label="Votre adresse e-mail")
def clean_username(self):
username = self.cleaned_data['username']
if not User.objects.filter(username=username).exists():
raise forms.ValidationError("Cet utilisateur n'existe pas")
return username
| from django import forms
class ForgottenPasswordForm(forms.Form):
username = forms.CharField(label="Identifiant")
email = forms.EmailField(label="Votre adresse e-mail")
| Python | 0.000001 |
f55c0bd8db7850668582bb7b47da4d0acafabc46 | Optimize imports | digitalmanifesto/urls.py | digitalmanifesto/urls.py | from __future__ import absolute_import, unicode_literals
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from . import views
urlpatterns = [
# Admin
url(r'^jet/', include('jet.urls', 'jet')), # Django JET URLS
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.IndexView.as_view(), name='index'),
# Simple template views
url(r'^about/$', TemplateView.as_view(template_name='about.html'), name='about'),
url(r'^contact/$', TemplateView.as_view(template_name='contact.html'), name='contact'),
url(r'^news/$', TemplateView.as_view(template_name='news.html'), name='news'),
url(r'^projects-we-like/$', TemplateView.as_view(template_name='projects_we_like.html'), name='projects'),
url(r'^resources/$', TemplateView.as_view(template_name='resources.html'), name='resources'),
url(r'^twitterbot/$', TemplateView.as_view(template_name='twitterbot.html'), name='twitterbot'),
url(r'^manifestos/', include('manifestos.urls', namespace='manifestos')),
url(r'^annotations/', include('annotations.urls', namespace='annotations')),
# Let's Encrypt challenge
url(r'^\.well-known/acme-challenge/(?P<key>.*)/', views.acme_challenge),
# allauth
url(r'^accounts/', include('allauth.urls')),
]
| from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from . import views
urlpatterns = [
# Admin
url(r'^jet/', include('jet.urls', 'jet')), # Django JET URLS
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.IndexView.as_view(), name='index'),
# Simple template views
url(r'^about/$', TemplateView.as_view(template_name='about.html'), name='about'),
url(r'^contact/$', TemplateView.as_view(template_name='contact.html'), name='contact'),
url(r'^news/$', TemplateView.as_view(template_name='news.html'), name='news'),
url(r'^projects-we-like/$', TemplateView.as_view(template_name='projects_we_like.html'), name='projects'),
url(r'^resources/$', TemplateView.as_view(template_name='resources.html'), name='resources'),
url(r'^twitterbot/$', TemplateView.as_view(template_name='twitterbot.html'), name='twitterbot'),
url(r'^manifestos/', include('manifestos.urls', namespace='manifestos')),
url(r'^annotations/', include('annotations.urls', namespace='annotations')),
# Let's Encrypt challenge
url(r'^\.well-known/acme-challenge/(?P<key>.*)/', views.acme_challenge),
# allauth
url(r'^accounts/', include('allauth.urls')),
]
| Python | 0.000002 |
a5f3ad5700aa766fec99a184bae1d732d0754491 | Support of HACluster added | src/reactive/murano_handlers.py | src/reactive/murano_handlers.py | # Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import charms_openstack.charm as charm
import charms.reactive as reactive
import charmhelpers.core.hookenv as hookenv
# This charm's library contains all of the handler code associated with
# sdn_charm
import charm.openstack.murano as murano # noqa
charm.use_defaults(
'charm.installed',
'amqp.connected',
'shared-db.connected',
'identity-service.connected',
'identity-service.available', # enables SSL support
'config.changed',
'update-status')
COMPLETE_INTERFACE_STATES = [
'shared-db.available',
'identity-service.available',
'amqp.available',
]
@reactive.when(*COMPLETE_INTERFACE_STATES)
def render_config(*args):
"""Render the configuration for charm when all the interfaces are
available.
"""
with charm.provide_charm_instance() as charm_class:
charm_class.render_with_interfaces(args)
charm_class.assess_status()
murano.render_novarc_config(args)
reactive.set_state('config.rendered')
# db_sync checks if sync has been done so rerunning is a noop
@reactive.when('config.rendered')
def init_db():
with charm.provide_charm_instance() as charm_class:
charm_class.db_sync()
@reactive.when_not('io-murano.imported')
@reactive.when(*COMPLETE_INTERFACE_STATES)
@reactive.when('config.rendered')
def import_io_murano(*args):
murano.import_io_murano()
reactive.set_state('io-murano.imported')
@reactive.when('ha.connected')
def cluster_connected(hacluster):
murano.configure_ha_resources(hacluster)
murano.assess_status()
@reactive.hook('upgrade-charm')
def upgrade_charm():
murano.install()
| # Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import charms_openstack.charm as charm
import charms.reactive as reactive
import charmhelpers.core.hookenv as hookenv
# This charm's library contains all of the handler code associated with
# sdn_charm
import charm.openstack.murano as murano # noqa
charm.use_defaults(
'charm.installed',
'amqp.connected',
'shared-db.connected',
'identity-service.connected',
'identity-service.available', # enables SSL support
'config.changed',
'update-status')
COMPLETE_INTERFACE_STATES = [
'shared-db.available',
'identity-service.available',
'amqp.available',
]
@reactive.when(*COMPLETE_INTERFACE_STATES)
def render_config(*args):
"""Render the configuration for charm when all the interfaces are
available.
"""
with charm.provide_charm_instance() as charm_class:
charm_class.render_with_interfaces(args)
charm_class.assess_status()
murano.render_novarc_config(args)
reactive.set_state('config.rendered')
# db_sync checks if sync has been done so rerunning is a noop
@reactive.when('config.rendered')
def init_db():
with charm.provide_charm_instance() as charm_class:
charm_class.db_sync()
@reactive.when_not('io-murano.imported')
@reactive.when(*COMPLETE_INTERFACE_STATES)
@reactive.when('config.rendered')
def import_io_murano(*args):
murano.import_io_murano()
reactive.set_state('io-murano.imported')
| Python | 0 |
837aea7b39662a8285df01522461c51ce0f91de5 | fix suppressions - don't overwrite config setting with super() - be explict in debug log what is going on with filters/suppression | nymms/reactor/handlers/Handler.py | nymms/reactor/handlers/Handler.py | import logging
logger = logging.getLogger(__name__)
from nymms.utils import load_object_from_string
class Handler(object):
def __init__(self, config=None):
self.config = config
self._filters = []
self._suppression_enabled = self.config.pop(
'suppression_enabled',
False)
logger.debug("%s suppression enabled is %s",
self.__class__.__name__,
self._suppression_enabled)
def _load_filters(self):
filters = self.config.get('filters', [])
if filters:
for filter_string in filters:
logging.debug("Adding Filter %s to Handler %s.", filter_string,
self.__class__.__name__)
f = load_object_from_string(filter_string)
self._filters.append(f)
else:
logger.debug("No filters configured for Handler %s.",
self.__class__.__name__)
def _filter(self, result, previous_state):
""" Runs the result & previous state through all the configured
filters. A filter should be a callable that accepts two arguments:
the result and the previous state. It should return either True or
False regarding whether the message should be allowed through the
handler.
"""
if not self._filters:
self._load_filters()
# Assume that no filters means just that - that the result is
# not to be filtered for the handler.
if not self._filters:
return True
results = {}
for f in self._filters:
try:
results[f.__name__] = f(result, previous_state)
except Exception as e:
logger.exception("Filter %s on Handler %s had an unhandled "
"exception. Ignoring: %s",
f.__name__, self.__class__.__name__, e)
continue
logger.debug("Handler %s filter results: %s", self.__class__.__name__,
results)
return all(results.values())
def _process(self, result, previous_state, suppressor_check_method):
"""First checks to see if the given event should be filtered and
then sees if it passes the suppressor (if enabled). If pass, then
call the subclass's process() method"""
if self._filter(result, previous_state):
if not self.suppression_enabled:
logger.debug("Handler %s filters returned true" +
" for %s", self.__class__.__name__, result.id)
return self.process(result, previous_state)
elif self.suppression_enabled and suppressor_check_method(result):
logger.debug("Handler %s filters & suppressor returned true" +
" for %s, reacting.",
self.__class__.__name__, result.id)
return self.process(result, previous_state)
else:
logger.debug("Handler %s suppressor returned false" +
" for %s, skipping.",
self.__class__.__name__, result.id)
else:
logger.debug("Handler %s filters returned false for %s, skipping.",
self.__class__.__name__, result.id)
def process(self, result, previous_state):
""" Meant to be overridden by subclasses - should handle the actual
process of reacting to a result.
"""
raise NotImplementedError
@property
def suppressions_enabled(self):
"""Are suppressions enabled for this handler?"""
return self._suppression_enabled
| import logging
logger = logging.getLogger(__name__)
from nymms.utils import load_object_from_string
class Handler(object):
def __init__(self, config=None):
self.config = config
self._filters = []
self._suppressions_enabled = self.config.pop('suppressions_enabled',
False)
def _load_filters(self):
filters = self.config.get('filters', [])
if filters:
for filter_string in filters:
logging.debug("Adding Filter %s to Handler %s.", filter_string,
self.__class__.__name__)
f = load_object_from_string(filter_string)
self._filters.append(f)
else:
logger.debug("No filters configured for Handler %s.",
self.__class__.__name__)
def _filter(self, result, previous_state):
""" Runs the result & previous state through all the configured
filters. A filter should be a callable that accepts two arguments:
the result and the previous state. It should return either True or
False regarding whether the message should be allowed through the
handler.
"""
if not self._filters:
self._load_filters()
# Assume that no filters means just that - that the result is
# not to be filtered for the handler.
if not self._filters:
return True
results = {}
for f in self._filters:
try:
results[f.__name__] = f(result, previous_state)
except Exception as e:
logger.exception("Filter %s on Handler %s had an unhandled "
"exception. Ignoring:",
f.__name__, self.__class__.__name__)
continue
logger.debug("Handler %s filter results: %s", self.__class__.__name__,
results)
return all(results.values())
def _process(self, result, previous_state, suppressor_check_method):
"""First checks to see if the given event should be filtered and
then sees if it passes the suppressor (if enabled). If pass, then
call the subclass's process() method"""
if self._filter(result, previous_state):
if self.suppressions_enabled and suppressor_check_method(result):
logger.debug("Handler %s filters & suppressors returned true" +
" for %s, reacting.", self.__class__.__name__, result.id)
return self.process(result, previous_state)
logger.debug("Handler %s filters returned false for %s, skipping.",
self.__class__.__name__, result.id)
def process(self, result, previous_state):
""" Meant to be overridden by subclasses - should handle the actual
process of reacting to a result.
"""
raise NotImplementedError
@property
def suppressions_enabled(self):
"""Are suppressions enabled for this handler?"""
return self._suppressions_enabled
| Python | 0.000001 |
f5d948c159a4d398a1347220a4fcd4315c725b04 | Fix issue handling Image as a paint source | pyrtist/pyrtist/lib2d/primitive.py | pyrtist/pyrtist/lib2d/primitive.py | __all__ = ('Primitive',)
from .core_types import Point
from .style import Stroke, Fill, StrokeStyle, Style
from .pattern import Pattern
from .path import Path
from .base import Taker, combination
from .cmd_stream import CmdStream, Cmd
from .window import Window
from .bbox import BBox
class Primitive(Taker):
def __init__(self, *args):
super(Primitive, self).__init__()
self.style = Style()
self.take(*args)
def build_path(self):
return []
@combination(Pattern, Primitive)
@combination(StrokeStyle, Primitive)
@combination(Style, Primitive)
def style_at_primitive(style, primitive):
primitive.style.take(style)
@combination(Primitive, Path)
def primitive_at_path(primitive, path):
path.cmd_stream.take(*primitive.build_path())
@combination(Primitive, CmdStream)
def primitive_at_cmd_stream(primitive, cmd_stream):
cmd_stream.take(Path(primitive), primitive.style)
@combination(Primitive, Window)
def primitive_at_window(primitive, window):
window.take(CmdStream(primitive))
@combination(Primitive, Stroke)
def primitive_at_stroke(primitive, stroke):
stroke.take(Path(primitive))
@combination(Primitive, Fill)
def primitive_at_fill(primitive, fill):
fill.take(Path(primitive))
@combination(Primitive, BBox)
def primitive_at_bbox(primitive, bbox):
bbox.take(Window(primitive))
| __all__ = ('Primitive',)
from .core_types import Point
from .style import Color, Stroke, Fill, StrokeStyle, Style
from .path import Path
from .base import Taker, combination
from .cmd_stream import CmdStream, Cmd
from .window import Window
from .bbox import BBox
class Primitive(Taker):
def __init__(self, *args):
super(Primitive, self).__init__()
self.style = Style()
self.take(*args)
def build_path(self):
return []
@combination(Color, Primitive)
@combination(StrokeStyle, Primitive)
@combination(Style, Primitive)
def style_at_primitive(style, primitive):
primitive.style.take(style)
@combination(Primitive, Path)
def primitive_at_path(primitive, path):
path.cmd_stream.take(*primitive.build_path())
@combination(Primitive, CmdStream)
def primitive_at_cmd_stream(primitive, cmd_stream):
cmd_stream.take(Path(primitive), primitive.style)
@combination(Primitive, Window)
def primitive_at_window(primitive, window):
window.take(CmdStream(primitive))
@combination(Primitive, Stroke)
def primitive_at_stroke(primitive, stroke):
stroke.take(Path(primitive))
@combination(Primitive, Fill)
def primitive_at_fill(primitive, fill):
fill.take(Path(primitive))
@combination(Primitive, BBox)
def primitive_at_bbox(primitive, bbox):
bbox.take(Window(primitive))
| Python | 0.000001 |
91064ed8d7c6b6ab7eb8bb9da94136ba34e8a2e5 | use length validator on description | abilian/sbe/apps/communities/forms.py | abilian/sbe/apps/communities/forms.py | import imghdr
from string import strip
import PIL
from flask import request
from flask.ext.babel import lazy_gettext as _l, gettext as _
from wtforms.fields import BooleanField, TextField, TextAreaField
from wtforms.validators import ValidationError, required
from abilian.web.forms import Form
from abilian.web.forms.fields import Select2Field, FileField
from abilian.web.forms.widgets import TextArea, ImageInput, BooleanWidget
from abilian.web.forms.validators import length
from .models import Community
class CommunityForm(Form):
name = TextField(label=_l(u"Name"), validators=[required()])
description = TextAreaField(
label=_l(u"Description"),
validators=[required(), length(max=500)],
widget=TextArea(resizeable="vertical"),)
image = FileField(label=_l('Image'), widget=ImageInput(width=65, height=65),
allow_delete=False)
type = Select2Field(label=_(u"Type"), validators=[required()],
filters=(strip,),
choices=[(_l(u'informative'), 'informative'),
(_l(u'participative'), 'participative')])
has_documents = BooleanField(label=_l(u"Has documents"), widget=BooleanWidget(on_off_mode=True))
has_wiki = BooleanField(label=_l(u"Has a wiki"), widget=BooleanWidget(on_off_mode=True))
has_forum = BooleanField(label=_l(u"Has a forum"), widget=BooleanWidget(on_off_mode=True))
is_crm_visible = BooleanField(label=_l(u'visible in CRM'), widget=BooleanWidget(on_off_mode=True))
def validate_name(self, field):
name = field.data = field.data.strip()
if name and field.object_data:
# form is bound to an existing object, name is not empty
if name != field.object_data:
# name changed: check for duplicates
if len(list(Community.query.filter(Community.name==name).values('id'))) > 0:
raise ValidationError(_(u"A community with this name already exists"))
def validate_description(self, field):
field.data = field.data.strip()
# FIXME: code duplicated from the user edit form (UserProfileForm).
# Needs to be refactored.
def validate_image(self, field):
data = request.files.get('image')
if not data:
return
filename = data.filename
valid = any(map(filename.lower().endswith, ('.png', '.jpg', '.jpeg')))
if not valid:
raise ValidationError(_(u'Only PNG or JPG image files are accepted'))
img_type = imghdr.what('ignored', data.read())
if not img_type in ('png', 'jpeg'):
raise ValidationError(_(u'Only PNG or JPG image files are accepted'))
data.stream.seek(0)
try:
# check this is actually an image file
im = PIL.Image.open(data.stream)
im.load()
except:
raise ValidationError(_(u'Could not decode image file'))
data.stream.seek(0)
field.data = data
| import imghdr
from string import strip
import PIL
from flask import request
from flask.ext.babel import lazy_gettext as _l, gettext as _
from wtforms.fields import BooleanField, TextField, TextAreaField
from wtforms.validators import ValidationError, required
from abilian.web.forms import Form
from abilian.web.forms.fields import Select2Field, FileField
from abilian.web.forms.widgets import TextArea, ImageInput, BooleanWidget
from .models import Community
class CommunityForm(Form):
name = TextField(label=_l(u"Name"), validators=[required()])
description = TextAreaField(label=_l(u"Description"), validators=[required()],
widget=TextArea(resizeable="vertical"))
image = FileField(label=_l('Image'), widget=ImageInput(width=65, height=65),
allow_delete=False)
type = Select2Field(label=_(u"Type"), validators=[required()],
filters=(strip,),
choices=[(_l(u'informative'), 'informative'),
(_l(u'participative'), 'participative')])
has_documents = BooleanField(label=_l(u"Has documents"), widget=BooleanWidget(on_off_mode=True))
has_wiki = BooleanField(label=_l(u"Has a wiki"), widget=BooleanWidget(on_off_mode=True))
has_forum = BooleanField(label=_l(u"Has a forum"), widget=BooleanWidget(on_off_mode=True))
is_crm_visible = BooleanField(label=_l(u'visible in CRM'), widget=BooleanWidget(on_off_mode=True))
def validate_name(self, field):
name = field.data = field.data.strip()
if name and field.object_data:
# form is bound to an existing object, name is not empty
if name != field.object_data:
# name changed: check for duplicates
if len(list(Community.query.filter(Community.name==name).values('id'))) > 0:
raise ValidationError(_(u"A community with this name already exists"))
def validate_description(self, field):
field.data = field.data.strip()
# FIXME: code duplicated from the user edit form (UserProfileForm).
# Needs to be refactored.
def validate_image(self, field):
data = request.files.get('image')
if not data:
return
filename = data.filename
valid = any(map(filename.lower().endswith, ('.png', '.jpg', '.jpeg')))
if not valid:
raise ValidationError(_(u'Only PNG or JPG image files are accepted'))
img_type = imghdr.what('ignored', data.read())
if not img_type in ('png', 'jpeg'):
raise ValidationError(_(u'Only PNG or JPG image files are accepted'))
data.stream.seek(0)
try:
# check this is actually an image file
im = PIL.Image.open(data.stream)
im.load()
except:
raise ValidationError(_(u'Could not decode image file'))
data.stream.seek(0)
field.data = data
| Python | 0.000002 |
848e12dde9685cf1c6e44178bb0f3eff9d4203be | Fix migrations | actistream/migrations/0001_initial.py | actistream/migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-09-15 20:40
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Activity',
options={'verbose_name': 'activity', 'verbose_name_plural': 'activities'},
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('actor_id', models.PositiveIntegerField()),
('target_id', models.PositiveIntegerField()),
('action_object_id', models.PositiveIntegerField()),
('type', models.CharField(max_length=100, verbose_name='type')),
('flags', models.BigIntegerField(default=0)),
('extra_data', models.TextField(blank=True, verbose_name='additional data')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('action_object_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.ContentType')),
('actor_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.ContentType')),
('target_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='Notice',
options={'verbose_name': 'notice', 'verbose_name_plural': 'notices'},
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(verbose_name='created at')),
('read_at', models.DateTimeField(blank=True, null=True, verbose_name='read at')),
('archived_at', models.DateTimeField(blank=True, null=True, verbose_name='archived at')),
('activity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='actistream.Activity')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-09-15 20:40
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Activity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('actor_id', models.PositiveIntegerField()),
('target_id', models.PositiveIntegerField()),
('action_object_id', models.PositiveIntegerField()),
('type', models.CharField(max_length=100, verbose_name='type')),
('flags', models.BigIntegerField(default=0)),
('extra_data', models.TextField(blank=True, verbose_name='additional data')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('action_object_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.ContentType')),
('actor_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.ContentType')),
('target_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='Notice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(verbose_name='created at')),
('read_at', models.DateTimeField(blank=True, null=True, verbose_name='read at')),
('archived_at', models.DateTimeField(blank=True, null=True, verbose_name='archived at')),
('activity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='actistream.Activity')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
),
]
| Python | 0.000006 |
9783844b1597598fad833794b4b291fce49438d4 | Send alerts as one mail | app/hr/tasks.py | app/hr/tasks.py | from django.conf import settings
import logging
from datetime import datetime, timedelta
from celery.decorators import task
from hr.utils import blacklist_values
from django.contrib.auth.models import User
from django.core.mail import send_mail
@task(ignore_result=True)
def blacklist_check():
log = blacklist_check.get_logger()
users = User.objects.filter(is_active=True)
alerts = 0
msg = ""
for u in users:
if u.groups.count() > 0:
# Has groups
val = blacklist_values(u)
if len(val) > 0:
alerts += 1
# Report possible issue
log.warning("Suspect User: %s, %s entries found: %s" % (u.username, len(val), val))
blstr = ""
for i in val:
blstr = "%s%s - %s - %s\n" % (blstr, i.get_type_display(), i.value, i.reason)
msg += "\n\n-----\n\n"
msg += "Suspect User found: %s\nGroups: %s\nBlacklist Items:\n\n%s" % (u.username, ", ".join(u.groups.all().values_list('name', flat=True)), blstr)
if alerts:
send_mail('Automated blacklist checker alerts', msg, 'blacklist@pleaseignore.com', ['abuse@pleaseignore.com'])
| from django.conf import settings
import logging
from datetime import datetime, timedelta
from celery.decorators import task
from hr.utils import blacklist_values
from django.contrib.auth.models import User
from django.core.mail import send_mail
@task(ignore_result=True)
def blacklist_check():
log = blacklist_check.get_logger()
users = User.objects.filter(is_active=True)
for u in users:
if u.groups.count() > 0:
# Has groups
val = blacklist_values(u)
if len(val) > 0:
# Report possible issue
log.warning("Suspect User: %s, %s entries found: %s" % (u.username, len(val), val))
blstr = ""
for i in val:
blstr = "%s%s - %s - %s\n" % (blstr, i.get_type_display(), i.value, i.reason)
msg = "Suspect User found: %s\nGroups: %s\nBlacklist Items:\n\n%s" % (u.username, ", ".join(u.groups.all().values_list('name', flat=True)), blstr)
send_mail('Automated blacklist checker alert - %s' % u.username, msg, 'blacklist@pleaseignore.com', ['abuse@pleaseignore.com'])
| Python | 0 |
0dfd0ec2beb069d56d7b81911bb468199565672a | remove print | python/ccxtpro/base/fast_client.py | python/ccxtpro/base/fast_client.py | """A faster version of aiohttp's websocket client that uses select and other optimizations"""
import asyncio
import collections
from ccxt import NetworkError
from ccxtpro.base.aiohttp_client import AiohttpClient
class FastClient(AiohttpClient):
transport = None
def __init__(self, url, on_message_callback, on_error_callback, on_close_callback, config={}):
super(FastClient, self).__init__(url, on_message_callback, on_error_callback, on_close_callback, config)
# instead of using the deque in aiohttp we implement our own for speed
# https://github.com/aio-libs/aiohttp/blob/1d296d549050aa335ef542421b8b7dad788246d5/aiohttp/streams.py#L534
self.stack = collections.deque()
def receive_loop(self):
def handler():
if not self.stack:
return
message = self.stack.popleft()
self.handle_message(message)
self.asyncio_loop.call_soon(handler)
def feed_data(message, size):
if not self.stack:
self.asyncio_loop.call_soon(handler)
self.stack.append(message)
def feed_eof():
self.on_error(NetworkError(1006))
def wrapper(func):
def parse_frame(buf):
while len(self.stack) > 1:
self.handle_message(self.stack.popleft())
return func(buf)
return parse_frame
connection = self.connection._conn
if connection.closed:
# connection got terminated after the connection was made and before the receive loop ran
self.on_close(1006)
return
self.transport = connection.transport
ws_reader = connection.protocol._payload_parser
ws_reader.parse_frame = wrapper(ws_reader.parse_frame)
ws_reader.queue.feed_data = feed_data
ws_reader.queue.feed_eof = feed_eof
# return a future so super class won't complain
return asyncio.sleep(0)
def reset(self, error):
super(FastClient, self).reset(error)
self.stack.clear()
if self.transport:
self.transport.abort()
| """A faster version of aiohttp's websocket client that uses select and other optimizations"""
import asyncio
import collections
from ccxt import NetworkError
from ccxtpro.base.aiohttp_client import AiohttpClient
class FastClient(AiohttpClient):
transport = None
def __init__(self, url, on_message_callback, on_error_callback, on_close_callback, config={}):
super(FastClient, self).__init__(url, on_message_callback, on_error_callback, on_close_callback, config)
# instead of using the deque in aiohttp we implement our own for speed
# https://github.com/aio-libs/aiohttp/blob/1d296d549050aa335ef542421b8b7dad788246d5/aiohttp/streams.py#L534
self.stack = collections.deque()
def receive_loop(self):
def handler():
if not self.stack:
return
message = self.stack.popleft()
self.handle_message(message)
self.asyncio_loop.call_soon(handler)
def feed_data(message, size):
if not self.stack:
self.asyncio_loop.call_soon(handler)
self.stack.append(message)
def feed_eof():
self.on_error(NetworkError(1006))
def wrapper(func):
def parse_frame(buf):
while len(self.stack) > 1:
self.handle_message(self.stack.popleft())
return func(buf)
return parse_frame
connection = self.connection._conn
if connection.closed:
# connection got terminated after the connection was made and before the receive loop ran
self.on_close(1006)
return
self.transport = connection.transport
ws_reader = connection.protocol._payload_parser
ws_reader.parse_frame = wrapper(ws_reader.parse_frame)
ws_reader.queue.feed_data = feed_data
ws_reader.queue.feed_eof = feed_eof
# return a future so super class won't complain
return asyncio.sleep(0)
def reset(self, error):
super(FastClient, self).reset(error)
self.stack.clear()
if self.transport:
self.transport.abort()
def resolve(self, result, message_hash=None):
super(FastClient, self).resolve(result, message_hash)
print('resolved', message_hash)
| Python | 0.000793 |
d51adea3d19578da9165202696d80c44949c43f6 | remove debug level logging from i2tun.py | i2tun/i2tun.py | i2tun/i2tun.py | #!/usr/bin/env python3.4
from i2p.i2cp import client as i2cp
import pytun
import threading
import logging
import struct
import select
class IPV4Handler(i2cp.I2CPHandler):
def __init__(self, remote_dest, our_addr, their_addr, mtu):
self._them = remote_dest
self._iface = pytun.TunTapDevice()
self._iface.addr = our_addr
self._iface.dstaddr = their_addr
self._iface.mtu = mtu
self._iface.up()
def session_made(self, con):
print ('we are {}'.format(con.dest.base32()))
self.con = con
threading.Thread(target=self.mainloop, args=(con,)).start()
def mainloop(self, con):
while True:
print ('read')
buff = self._iface.read(self._iface.mtu)
print ('send')
self.con.send_dsa_dgram(self._them, buff)
def got_dgram(self, dest, data, srcport, dstport):
if dest.base32() == self._them:
self._iface.write(data)
def main():
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('--remote', required=True, type=str)
ap.add_argument('--our-addr', required=True, type=str)
ap.add_argument('--their-addr', required=True, type=str)
ap.add_argument('--mtu', default=3600 ,type=int)
ap.add_argument('--i2cp-host', default='127.0.0.1', type=str)
ap.add_argument('--i2cp-port', default=7654, type=int)
args = ap.parse_args()
handler = IPV4Handler(args.remote, args.our_addr, args.their_addr, args.mtu)
con = i2cp.Connection(handler, i2cp_host=args.i2cp_host, i2cp_port=args.i2cp_port)
con.open()
con.start()
if __name__ == '__main__':
main()
| #!/usr/bin/env python3.4
from i2p.i2cp import client as i2cp
import pytun
import threading
import logging
import struct
import select
class IPV4Handler(i2cp.I2CPHandler):
def __init__(self, remote_dest, our_addr, their_addr, mtu):
self._them = remote_dest
self._iface = pytun.TunTapDevice()
self._iface.addr = our_addr
self._iface.dstaddr = their_addr
self._iface.mtu = mtu
self._iface.up()
def session_made(self, con):
print ('we are {}'.format(con.dest.base32()))
self.con = con
threading.Thread(target=self.mainloop, args=(con,)).start()
def mainloop(self, con):
while True:
print ('read')
buff = self._iface.read(self._iface.mtu)
print ('send')
self.con.send_dgram(self._them, buff)
def got_dgram(self, dest, data, srcport, dstport):
if dest.base32() == self._them:
self._iface.write(data)
def main():
import argparse
import logging
logging.basicConfig(level=logging.DEBUG)
ap = argparse.ArgumentParser()
ap.add_argument('--remote', required=True, type=str)
ap.add_argument('--our-addr', required=True, type=str)
ap.add_argument('--their-addr', required=True, type=str)
ap.add_argument('--mtu', default=3600 ,type=int)
ap.add_argument('--i2cp-host', default='127.0.0.1', type=str)
ap.add_argument('--i2cp-port', default=7654, type=int)
args = ap.parse_args()
handler = IPV4Handler(args.remote, args.our_addr, args.their_addr, args.mtu)
con = i2cp.Connection(handler, i2cp_host=args.i2cp_host, i2cp_port=args.i2cp_port)
con.open()
con.start()
if __name__ == '__main__':
main()
| Python | 0.000001 |
abe4f0577baef3dbbceb06fc6d569d2bec69257e | Fix internal import | tensorflow_probability/python/internal/backend/jax/rewrite.py | tensorflow_probability/python/internal/backend/jax/rewrite.py | # Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Rewrite script for NP->JAX."""
from __future__ import absolute_import
from __future__ import division
# [internal] enable type annotations
from __future__ import print_function
# Dependency imports
from absl import app
def main(argv):
contents = open(argv[1]).read()
contents = contents.replace(
"tensorflow_probability.python.internal.backend.numpy",
"tensorflow_probability.python.internal.backend.jax")
contents = contents.replace(
"from tensorflow_probability.python.internal.backend import numpy",
"from tensorflow_probability.python.internal.backend import jax")
contents = contents.replace("scipy.linalg", "jax.scipy.linalg")
contents = contents.replace("scipy.special", "jax.scipy.special")
contents = contents.replace(
"MODE_JAX = False",
"MODE_JAX = True\n"
"from jax.config import config; config.update('jax_enable_x64', True)")
contents = contents.replace("\nimport numpy as np",
"\nimport numpy as onp\nimport jax.numpy as np")
contents = contents.replace("np.bool", "onp.bool")
contents = contents.replace("np.dtype", "onp.dtype")
contents = contents.replace("np.generic", "onp.generic")
contents = contents.replace("np.broadcast", "onp.broadcast")
contents = contents.replace("JAX_MODE = False", "JAX_MODE = True")
print(contents)
if __name__ == "__main__":
app.run(main)
| # Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Rewrite script for NP->JAX."""
from __future__ import absolute_import
from __future__ import division
# [internal] enable type annotations
from __future__ import print_function
from absl import app
def main(argv):
contents = open(argv[1]).read()
contents = contents.replace(
"tensorflow_probability.python.internal.backend.numpy",
"tensorflow_probability.python.internal.backend.jax")
contents = contents.replace(
"from tensorflow_probability.python.internal.backend import numpy",
"from tensorflow_probability.python.internal.backend import jax")
contents = contents.replace("scipy.linalg", "jax.scipy.linalg")
contents = contents.replace("scipy.special", "jax.scipy.special")
contents = contents.replace(
"MODE_JAX = False",
"MODE_JAX = True\n"
"from jax.config import config; config.update('jax_enable_x64', True)")
contents = contents.replace("\nimport numpy as np",
"\nimport numpy as onp\nimport jax.numpy as np")
contents = contents.replace("np.bool", "onp.bool")
contents = contents.replace("np.dtype", "onp.dtype")
contents = contents.replace("np.generic", "onp.generic")
contents = contents.replace("np.broadcast", "onp.broadcast")
contents = contents.replace("JAX_MODE = False", "JAX_MODE = True")
print(contents)
if __name__ == "__main__":
app.run(main)
| Python | 0.000012 |
695e171d1eca459075ad03adf0712f5b7427cac4 | Add get_or_404() to __all__ | flask_simon/__init__.py | flask_simon/__init__.py | from flask import abort
from pymongo import uri_parser
import simon.connection
__all__ = ('Simon', 'get_or_404')
class Simon(object):
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
if 'simon' not in app.extensions:
app.extensions['simon'] = {}
if 'MONGO_URI' in app.config:
parsed = uri_parser.parse_uri(app.config['MONGO_URI'])
if not parsed.get('database'):
raise ValueError('MONGO_URI does not contain a database name.')
app.config['MONGO_DBNAME'] = parsed['database']
app.config['MONGO_USERNAME'] = parsed['username']
app.config['MONGO_PASSWORD'] = parsed['password']
app.config['REPLICA_SET'] = parsed['options'].get('replica_set')
host = app.config['MONGO_URI']
name = app.config['MONGO_DBNAME']
username = app.config['MONGO_USERNAME']
password = app.config['MONGO_PASSWORD']
replica_set = app.config['REPLICA_SET']
simon.connection.connect(host_or_uri=host, name=name,
username=username, password=password,
replica_set=replica_set)
else:
host = app.config['HOST'] = 'localhost'
name = app.config['MONGO_DBNAME'] = app.name
simon.connection.connect(host=host, name=name)
def get_or_404(model, *qs, **fields):
try:
return model.get(*qs, **fields)
except (model.NoDocumentFound, model.MultipleDocumentsFound):
abort(404)
| __all__ = ('Simon',)
import simon.connection
from flask import abort
from pymongo import uri_parser
class Simon(object):
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
if 'simon' not in app.extensions:
app.extensions['simon'] = {}
if 'MONGO_URI' in app.config:
parsed = uri_parser.parse_uri(app.config['MONGO_URI'])
if not parsed.get('database'):
raise ValueError('MONGO_URI does not contain a database name.')
app.config['MONGO_DBNAME'] = parsed['database']
app.config['MONGO_USERNAME'] = parsed['username']
app.config['MONGO_PASSWORD'] = parsed['password']
app.config['REPLICA_SET'] = parsed['options'].get('replica_set')
host = app.config['MONGO_URI']
name = app.config['MONGO_DBNAME']
username = app.config['MONGO_USERNAME']
password = app.config['MONGO_PASSWORD']
replica_set = app.config['REPLICA_SET']
simon.connection.connect(host_or_uri=host, name=name,
username=username, password=password,
replica_set=replica_set)
else:
host = app.config['HOST'] = 'localhost'
name = app.config['MONGO_DBNAME'] = app.name
simon.connection.connect(host=host, name=name)
def get_or_404(model, *qs, **fields):
try:
return model.get(*qs, **fields)
except (model.NoDocumentFound, model.MultipleDocumentsFound):
abort(404)
| Python | 0 |
acdb6bbba1d6114f6ccf9dfc3307905fc88e17bb | Put the updated format into the Cryomagnetics device test. | tests/unit/test_devices/test_abstract_cryomagnetics_device.py | tests/unit/test_devices/test_abstract_cryomagnetics_device.py | """
Contains unit tests for :mod:`mr_freeze.devices.abstract_cryomagnetics_device`
"""
import unittest
from mr_freeze.devices.abstract_cryomagnetics_device import \
AbstractCryomagneticsDevice
class ConcreteCryomagneticsDevice(AbstractCryomagneticsDevice):
was_read_called = False
data_to_read = None
written_message = None
was_write_called = False
def __init__(self):
pass
@property
def terminator(self):
return self._terminator
@terminator.setter
def terminator(self, terminator):
self._terminator = terminator
def read(self, *args, **kwargs):
self.was_read_called = True
return self.data_to_read
def write(self, message):
self.written_message = message
self.was_write_called = True
def reset(self):
self.was_read_called = False
self.data_to_read = None
self.written_message = None
self.was_write_called = False
class TestAbstractCryomagneticsDevice(unittest.TestCase):
def setUp(self):
self.device = ConcreteCryomagneticsDevice()
def tearDown(self):
self.device.reset()
class TestQuery(TestAbstractCryomagneticsDevice):
command = "enter"
expected_response = "data"
data_to_read = "%s\r\n%s\r\n" % (
command, expected_response
)
def setUp(self):
TestAbstractCryomagneticsDevice.setUp(self)
self.device.data_to_read = self.data_to_read
def test_query(self):
self.assertEqual(
self.expected_response,
self.device.query(self.command)
)
self.assertTrue(
self.device.was_write_called
)
self.assertTrue(
self.device.was_read_called
)
class TestParseQuery(TestAbstractCryomagneticsDevice):
command = "Testing"
data_format = "%s\r\n%s\r\n"
def test_command_not_echoed_command(self):
bad_echo = "String1"
self.assertNotEqual(self.command, bad_echo)
data_to_return = self.data_format % (bad_echo, "Response")
with self.assertRaises(RuntimeError):
self.device.parse_query(self.command, data_to_return)
def test_command_bad_response(self):
data_to_read = "%s%s" % (self.command, "response")
with self.assertRaises(RuntimeError):
self.device.parse_query(self.command, data_to_read)
| """
Contains unit tests for :mod:`mr_freeze.devices.abstract_cryomagnetics_device`
"""
import unittest
from mr_freeze.devices.abstract_cryomagnetics_device import \
AbstractCryomagneticsDevice
class ConcreteCryomagneticsDevice(AbstractCryomagneticsDevice):
was_read_called = False
data_to_read = None
written_message = None
was_write_called = False
def __init__(self):
pass
@property
def terminator(self):
return self._terminator
@terminator.setter
def terminator(self, terminator):
self._terminator = terminator
def read(self, *args, **kwargs):
self.was_read_called = True
return self.data_to_read
def write(self, message):
self.written_message = message
self.was_write_called = True
def reset(self):
self.was_read_called = False
self.data_to_read = None
self.written_message = None
self.was_write_called = False
class TestAbstractCryomagneticsDevice(unittest.TestCase):
def setUp(self):
self.device = ConcreteCryomagneticsDevice()
def tearDown(self):
self.device.reset()
class TestQuery(TestAbstractCryomagneticsDevice):
command = "enter"
expected_response = "data"
data_to_read = "%s\r\n%s" % (
command, expected_response
)
def setUp(self):
TestAbstractCryomagneticsDevice.setUp(self)
self.device.data_to_read = self.data_to_read
def test_query(self):
self.assertEqual(
self.expected_response,
self.device.query(self.command)
)
self.assertTrue(
self.device.was_write_called
)
self.assertTrue(
self.device.was_read_called
)
class TestParseQuery(TestAbstractCryomagneticsDevice):
command = "Testing"
data_format = "%s\r\n%s"
def test_command_not_echoed_command(self):
bad_echo = "String1"
self.assertNotEqual(self.command, bad_echo)
data_to_return = self.data_format % (bad_echo, "Response")
with self.assertRaises(RuntimeError):
self.device.parse_query(self.command, data_to_return)
def test_command_bad_response(self):
data_to_read = "%s%s" % (self.command, "response")
with self.assertRaises(RuntimeError):
self.device.parse_query(self.command, data_to_read)
| Python | 0 |
24ee61ecf5767d10b2fb92acc5d0217ffbfb3834 | Update get_branches.py | Group8/get_branches.py | Group8/get_branches.py | ny branches we have
#print(branches) # this shows all branches in a list
#print(branches_posi)
from pyfbsdk import *
import math
'''
This file is to read all branches of both target and source skeleton
This should be using motion-builder
I used People.FBX as a testcase
'''
def get_banch(parents, children, index, branches):
parents.append(children.Name)
# if there is no children, append this branch to branches
if len(children.Children) == 0:
branches.append(parents)
# if there is a children, then go to the child
elif len(children.Children) == 1:
parents = get_banch(parents, children.Children[0], index+1, branches)
# if there are several leaves, then search each leaf
else:
for i in range(len(children.Children)):
new = []
new = get_banch(parents[:index+1], children.Children[i], index+1, branches)
return parents
def get_branches(root):
branches = []
if len(root.Children) > 0:
# you need to check len(root.Children)
for i in range(len(root.Children)): # this is to stop the loop
branch = []
branch.append(root.Name) # skeleton[0] -> root
# initialize the node and get its children
parents = branch[:len(branch)]
children = root.Children[i]
# start the loop to find all leaves
# the initial index may be wrong, you'd better check it.
branch = get_banch(parents, children, 1, branches)
return branches
def get_branches_posi(branches, file_name):
out = open(file_name,"w")
branches_posi = []
node = FBVector3d()
for b in branches:
bran_posi = []
for name in b:
n = FBFindModelByLabelName(name)
n.GetVector(node, FBModelTransformationType.kModelTranslation)
bran_posi.append(node)
out.write(repr(node[0]) + " ")
out.write(repr(node[1]) + " ")
out.write(repr(node[2]) + '\n')
out.write('------------------------------------\n')
branches_posi.append(bran_posi)
out.close()
return branches_posi
# Chose the node that has the highest betweeness
#root = FBFindModelByLabelName('PMD_Kristoff__summer_')
root = FBFindModelByLabelName('Bip01')
branches = get_branches(root)
branches_posi = get_branches_posi(branches, "1.txt")
print(len(branches)) # this tells you how many branches we have
#print(branches) # this shows all branches in a list
root2 = FBFindModelByLabelName('PMD_Kristoff__summer_')
branches2 = get_branches(root2)
branches_posi2 = get_branches_posi(branches2, "2.txt")
print(len(branches2))
| from pyfbsdk import *
import math
'''
This file is to read all branches of both target and source skeleton
This should be using motion-builder
I used People.FBX as a testcase
'''
def get_banch(parents, children, index, branches):
parents.append(children.Name)
# if there is no children, append this branch to branches
if len(children.Children) == 0:
branches.append(parents)
# if there is a children, then go to the child
elif len(children.Children) == 1:
parents = get_banch(parents, children.Children[0], index+1, branches)
# if there are several leaves, then search each leaf
else:
for i in range(len(children.Children)):
new = []
new = get_banch(parents[:index+1], children.Children[i], index+1, branches)
return parents
def get_branches(root):
branches = []
if len(root.Children) > 0:
# you need to check len(root.Children)
for i in range(len(root.Children)): # this is to stop the loop
branch = []
branch.append(root.Name) # skeleton[0] -> root
# initialize the node and get its children
parents = branch[:len(branch)]
children = root.Children[i]
# start the loop to find all leaves
# the initial index may be wrong, you'd better check it.
branch = get_banch(parents, children, 1, branches)
#print()
#print("\n\n\n\n")
return branches
def get_branches_posi(branches):
branches_posi = []
node = FBVector3d()
for b in branches:
bran_posi = []
for name in b:
n = FBFindModelByLabelName(name)
n.GetVector(node, FBModelTransformationType.kModelTranslation)
bran_posi.append(node)
branches_posi.append(bran_posi)
return branches_posi
# Chose the node that has the highest betweeness
root = FBFindModelByLabelName('Bip01')
branches = get_branches(root)
branches_posi = get_branches_posi(branches)
#print(len(branches)) # this tells you how many branches we have
#print(branches) # this shows all branches in a list
#print(branches_posi)
| Python | 0 |
c9df16f35af2cf51a4612eb76fab59819a32df64 | Handle TypeError in is_float | src/sentry/utils/__init__.py | src/sentry/utils/__init__.py | """
sentry.utils
~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.utils.encoding import force_unicode
import six
def to_unicode(value):
try:
value = six.text_type(force_unicode(value))
except (UnicodeEncodeError, UnicodeDecodeError):
value = '(Error decoding value)'
except Exception: # in some cases we get a different exception
try:
value = str(repr(type(value)))
except Exception:
value = '(Error decoding value)'
return value
def is_float(var):
try:
float(var)
except (TypeError, ValueError):
return False
return True
| """
sentry.utils
~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.utils.encoding import force_unicode
import six
def to_unicode(value):
try:
value = six.text_type(force_unicode(value))
except (UnicodeEncodeError, UnicodeDecodeError):
value = '(Error decoding value)'
except Exception: # in some cases we get a different exception
try:
value = str(repr(type(value)))
except Exception:
value = '(Error decoding value)'
return value
def is_float(var):
try:
float(var)
except ValueError:
return False
return True
| Python | 0.003037 |
1dff7ff24903f470bf1e1d325c6eb88590b9fa0f | Make generator to get last bot utterance | rasa/core/channels/twilio_voice.py | rasa/core/channels/twilio_voice.py | import inspect
from sanic import Blueprint, response
from sanic.request import Request
from sanic.response import HTTPResponse
from twilio.twiml.voice_response import VoiceResponse, Gather
from typing import Text, Callable, Awaitable, List
from rasa.shared.core.events import BotUttered
from rasa.core.channels.channel import (
InputChannel,
CollectingOutputChannel,
UserMessage,
)
class TwilioVoiceInput(InputChannel):
@classmethod
def name(cls) -> Text:
"""Name of your custom channel."""
return "twilio_voice"
def blueprint(
self, on_new_message: Callable[[UserMessage], Awaitable[None]]
) -> Blueprint:
twilio_voice_webhook = Blueprint(
"custom_webhook_{}".format(type(self).__name__),
inspect.getmodule(self).__name__,
)
@twilio_voice_webhook.route("/", methods=["GET"])
async def health(request: Request) -> HTTPResponse:
return response.json({"status": "ok"})
@twilio_voice_webhook.route("/webhook", methods=["POST"])
async def receive(request: Request):
sender_id = request.form.get("From") # method to get sender_id
text = request.form.get("SpeechResult") # method to fetch text
input_channel = self.name() # method to fetch input channel
call_status = request.form.get("CallStatus") # method to fetch call status
call_sid = request.form.get("CallSid") # Get the call identifier
collector = TwilioVoiceCollectingOutputChannel()
# Provide an initial greeting to answer the user's call.
if (text is None) & (call_status == "ringing"):
text = "hello"
# determine the response.
if text is not None:
await on_new_message(
UserMessage(
text,
collector,
sender_id,
input_channel=input_channel,
)
)
# Parse the text responses and build the Twilio VoiceResponse.
respond_segments = []
for message in collector.messages:
respond_segments.append(message["text"])
if "buttons" in message:
for button in message["buttons"]:
respond_segments.append(button["title"])
twilio_response = build_twilio_voice_response(respond_segments)
return response.text(str(twilio_response), content_type="text/xml")
# If the user doesn't respond to the previous message resend the last message.
elif text is None:
# Get last user utterance from tracker.
tracker = request.app.agent.tracker_store.retrieve(sender_id)
last_response = next((e for e in reversed(tracker.events) if isinstance(e, BotUttered)), None)
# If no previous utterance found say something generic.
if last_response is None:
last_response = "I didn't get that."
else:
last_response = last_response.text
twilio_response = build_twilio_voice_response([last_response])
return response.text(str(twilio_response), content_type="text/xml")
return twilio_voice_webhook
class TwilioVoiceCollectingOutputChannel(CollectingOutputChannel):
"""Output channel that collects send messages in a list
(doesn't send them anywhere, just collects them)."""
@classmethod
def name(cls) -> Text:
return "twilio_voice"
def build_twilio_voice_response(messages: List[Text]) -> VoiceResponse:
"""Builds the Twilio Voice Response object."""
vr = VoiceResponse()
gather = Gather(
input="speech",
action="/webhooks/twilio_voice/webhook",
actionOnEmptyResult=True,
speechTimeout="auto",
)
# Add pauses between messages.
# Add a listener to the last message to listen for user response.
for i, message in enumerate(messages):
if i + 1 == len(messages):
gather.say(message)
vr.append(gather)
else:
vr.say(message)
vr.pause(length=1)
return vr
| import inspect
from sanic import Blueprint, response
from sanic.request import Request
from sanic.response import HTTPResponse
from twilio.twiml.voice_response import VoiceResponse, Gather
from typing import Text, Callable, Awaitable, List
from rasa.core.channels.channel import (
InputChannel,
CollectingOutputChannel,
UserMessage,
)
class TwilioVoiceInput(InputChannel):
@classmethod
def name(cls) -> Text:
"""Name of your custom channel."""
return "twilio_voice"
def blueprint(
self, on_new_message: Callable[[UserMessage], Awaitable[None]]
) -> Blueprint:
twilio_voice_webhook = Blueprint(
"custom_webhook_{}".format(type(self).__name__),
inspect.getmodule(self).__name__,
)
@twilio_voice_webhook.route("/", methods=["GET"])
async def health(request: Request) -> HTTPResponse:
return response.json({"status": "ok"})
@twilio_voice_webhook.route("/webhook", methods=["POST"])
async def receive(request: Request):
sender_id = request.form.get("From") # method to get sender_id
text = request.form.get("SpeechResult") # method to fetch text
input_channel = self.name() # method to fetch input channel
call_status = request.form.get("CallStatus") # method to fetch call status
call_sid = request.form.get("CallSid") # Get the call identifier
collector = TwilioVoiceCollectingOutputChannel()
# Provide an initial greeting to answer the user's call.
if (text is None) & (call_status == "ringing"):
text = "hello"
# If the user doesn't respond to the previous message resend the last message.
elif text is None:
# Get last user utterance from tracker.
tracker = request.app.agent.tracker_store.retrieve(sender_id)
text = tracker.current_state()["latest_message"]["text"]
# determine the response.
if text is not None:
await on_new_message(
UserMessage(
text,
collector,
sender_id,
input_channel=input_channel,
)
)
# Parse the text responses and build the Twilio VoiceResponse.
respond_segments = []
for message in collector.messages:
respond_segments.append(message["text"])
if "buttons" in message:
for button in message["buttons"]:
respond_segments.append(button["title"])
twilio_response = build_twilio_voice_response(respond_segments)
return response.text(str(twilio_response), content_type="text/xml")
return twilio_voice_webhook
class TwilioVoiceCollectingOutputChannel(CollectingOutputChannel):
"""Output channel that collects send messages in a list
(doesn't send them anywhere, just collects them)."""
@classmethod
def name(cls) -> Text:
return "twilio_voice"
def build_twilio_voice_response(messages: List[Text]) -> VoiceResponse:
"""Builds the Twilio Voice Response object."""
vr = VoiceResponse()
gather = Gather(
input="speech",
action="/webhooks/twilio_voice/webhook",
actionOnEmptyResult=True,
speechTimeout="auto",
)
# Add pauses between messages.
# Add a listener to the last message to listen for user response.
for i, message in enumerate(messages):
if i + 1 == len(messages):
gather.say(message)
vr.append(gather)
else:
vr.say(message)
vr.pause(length=1)
return vr
| Python | 0.000011 |
7fb89e4dbe2cbed4ef37e13073d4fa3f2a650049 | Check for missing part thumbnails when the server first runs | InvenTree/part/apps.py | InvenTree/part/apps.py | from __future__ import unicode_literals
import os
from django.db.utils import OperationalError, ProgrammingError
from django.apps import AppConfig
from django.conf import settings
class PartConfig(AppConfig):
name = 'part'
def ready(self):
"""
This function is called whenever the Part app is loaded.
"""
self.generate_part_thumbnails()
def generate_part_thumbnails(self):
from .models import Part
print("Checking Part image thumbnails")
try:
for part in Part.objects.all():
if part.image:
url = part.image.thumbnail.name
#if url.startswith('/'):
# url = url[1:]
loc = os.path.join(settings.MEDIA_ROOT, url)
if not os.path.exists(loc):
print("InvenTree: Generating thumbnail for Part '{p}'".format(p=part.name))
part.image.render_variations(replace=False)
except (OperationalError, ProgrammingError):
print("Could not generate Part thumbnails")
| from __future__ import unicode_literals
from django.apps import AppConfig
class PartConfig(AppConfig):
name = 'part'
| Python | 0 |
7f5f10132334c1f6685497d3fff48c2c65617845 | Remove broken URL (#3623) | InvenTree/part/urls.py | InvenTree/part/urls.py | """URL lookup for Part app. Provides URL endpoints for:
- Display / Create / Edit / Delete PartCategory
- Display / Create / Edit / Delete Part
- Create / Edit / Delete PartAttachment
- Display / Create / Edit / Delete SupplierPart
"""
from django.urls import include, re_path
from . import views
part_detail_urls = [
re_path(r'^bom-download/?', views.BomDownload.as_view(), name='bom-download'),
re_path(r'^pricing/', views.PartPricing.as_view(), name='part-pricing'),
re_path(r'^bom-upload/?', views.BomUpload.as_view(), name='upload-bom'),
re_path(r'^qr_code/?', views.PartQRCode.as_view(), name='part-qr'),
# Normal thumbnail with form
re_path(r'^thumb-select/?', views.PartImageSelect.as_view(), name='part-image-select'),
# Any other URLs go to the part detail page
re_path(r'^.*$', views.PartDetail.as_view(), name='part-detail'),
]
category_urls = [
# Category detail views
re_path(r'(?P<pk>\d+)/', views.CategoryDetail.as_view(), name='category-detail'),
]
# URL list for part web interface
part_urls = [
# Upload a part
re_path(r'^import/', views.PartImport.as_view(), name='part-import'),
re_path(r'^import-api/', views.PartImportAjax.as_view(), name='api-part-import'),
# Download a BOM upload template
re_path(r'^bom_template/?', views.BomUploadTemplate.as_view(), name='bom-upload-template'),
# Individual part using pk
re_path(r'^(?P<pk>\d+)/', include(part_detail_urls)),
# Part category
re_path(r'^category/', include(category_urls)),
# Individual part using IPN as slug
re_path(r'^(?P<slug>[-\w]+)/', views.PartDetailFromIPN.as_view(), name='part-detail-from-ipn'),
# Top level part list (display top level parts and categories)
re_path(r'^.*$', views.PartIndex.as_view(), name='part-index'),
]
| """URL lookup for Part app. Provides URL endpoints for:
- Display / Create / Edit / Delete PartCategory
- Display / Create / Edit / Delete Part
- Create / Edit / Delete PartAttachment
- Display / Create / Edit / Delete SupplierPart
"""
from django.urls import include, re_path
from . import views
part_detail_urls = [
re_path(r'^bom-download/?', views.BomDownload.as_view(), name='bom-download'),
re_path(r'^pricing/', views.PartPricing.as_view(), name='part-pricing'),
re_path(r'^bom-upload/?', views.BomUpload.as_view(), name='upload-bom'),
re_path(r'^qr_code/?', views.PartQRCode.as_view(), name='part-qr'),
# Normal thumbnail with form
re_path(r'^thumb-select/?', views.PartImageSelect.as_view(), name='part-image-select'),
# Any other URLs go to the part detail page
re_path(r'^.*$', views.PartDetail.as_view(), name='part-detail'),
]
category_urls = [
# Top level subcategory display
re_path(r'^subcategory/', views.PartIndex.as_view(template_name='part/subcategory.html'), name='category-index-subcategory'),
# Category detail views
re_path(r'(?P<pk>\d+)/', views.CategoryDetail.as_view(), name='category-detail'),
]
# URL list for part web interface
part_urls = [
# Upload a part
re_path(r'^import/', views.PartImport.as_view(), name='part-import'),
re_path(r'^import-api/', views.PartImportAjax.as_view(), name='api-part-import'),
# Download a BOM upload template
re_path(r'^bom_template/?', views.BomUploadTemplate.as_view(), name='bom-upload-template'),
# Individual part using pk
re_path(r'^(?P<pk>\d+)/', include(part_detail_urls)),
# Part category
re_path(r'^category/', include(category_urls)),
# Individual part using IPN as slug
re_path(r'^(?P<slug>[-\w]+)/', views.PartDetailFromIPN.as_view(), name='part-detail-from-ipn'),
# Top level part list (display top level parts and categories)
re_path(r'^.*$', views.PartIndex.as_view(), name='part-index'),
]
| Python | 0 |
d0e31fdb5ec99e91f7b5f7da5b81fc7a391689df | Update django_facebook/admin.py | django_facebook/admin.py | django_facebook/admin.py | from django.contrib import admin
from django.conf import settings
from django.core.urlresolvers import reverse
from django_facebook import admin_actions
from django_facebook import models
class FacebookUserAdmin(admin.ModelAdmin):
list_display = ('user_id', 'name', 'facebook_id',)
search_fields = ('name',)
class FacebookLikeAdmin(admin.ModelAdmin):
list_display = ('user_id', 'name', 'category', 'facebook_id',)
search_fields = ('name',)
filter_fields = ('category',)
class FacebookProfileAdmin(admin.ModelAdmin):
list_display = ('image_', 'user_', 'facebook_name', 'facebook_id',)
raw_id_fields = ('user',)
search_fields = ('facebook_name', 'facebook_id',)
def image_(self, instance):
return """<span style="
background-image: url({0});
background-size: cover;
width: 21px;
height: 21px;
display: inline-block;
outline: 1px solid #DDD;
position: absolute;
margin-top: -3px;
"></span>""".format(
instance.image.url if (instance and instance.image) else ''
)
image_.allow_tags = True
def user_(self, instance):
admin_url = reverse('admin:auth_user_change', args=[instance.user.pk])
return '<a href="{0}">{1}</a>'.format(
admin_url,
instance.user
)
user_.allow_tags = True
def facebook_profile(open_graph_share):
'''
Nicely displayed version of the facebook user
with user id and image and link to facebook :)
'''
user = open_graph_share.user
profile = user.get_profile()
facebook_id = profile.facebook_id
facebook_url = 'http://www.facebook.com/%s/' % facebook_id
link = '<p><a href="%s"><img src="http://graph.facebook.com/%s/picture/?type=large" width="100px" style="float:left"/>%s</a><br/></p>' % (facebook_url, facebook_id, facebook_id)
return link
facebook_profile.allow_tags = True
facebook_profile.short_description = 'Profile'
class OpenGraphShareAdmin(admin.ModelAdmin):
raw_id_fields = ['user']
list_display = ['user', 'action_domain', facebook_profile,
'completed_at', 'error_message']
actions = [admin_actions.retry_open_graph_share,
admin_actions.retry_open_graph_share_for_user]
if settings.AUTH_PROFILE_MODULE == 'django_facebook.FacebookProfile':
admin.site.register(models.FacebookProfile, FacebookProfileAdmin)
admin.site.register(models.FacebookUser, FacebookUserAdmin)
admin.site.register(models.FacebookLike, FacebookLikeAdmin)
admin.site.register(models.OpenGraphShare, OpenGraphShareAdmin)
| from django.contrib import admin
from django.conf import settings
from django.core.urlresolvers import reverse
from django_facebook import admin_actions
from django_facebook import models
class FacebookUserAdmin(admin.ModelAdmin):
list_display = ('user_id', 'name', 'facebook_id',)
search_fields = ('name',)
class FacebookLikeAdmin(admin.ModelAdmin):
list_display = ('user_id', 'name', 'category', 'facebook_id',)
search_fields = ('name',)
filter_fields = ('category',)
class FacebookProfileAdmin(admin.ModelAdmin):
list_display = ('image_', 'user_', 'facebook_name', 'facebook_id',)
raw_id_fields = ('user',)
search_fields = ('facebook_name', 'facebook_id',)
def image_(self, instance):
return """<span style="
background-image: url({0});
background-size: cover;
width: 21px;
height: 21px;
display: inline-block;
outline: 1px solid #DDD;
position: absolute;
margin-top: -3px;
"></span>""".format(
instance.image.url
)
image_.allow_tags = True
def user_(self, instance):
admin_url = reverse('admin:auth_user_change', args=[instance.user.pk])
return '<a href="{0}">{1}</a>'.format(
admin_url,
instance.user
)
user_.allow_tags = True
def facebook_profile(open_graph_share):
'''
Nicely displayed version of the facebook user
with user id and image and link to facebook :)
'''
user = open_graph_share.user
profile = user.get_profile()
facebook_id = profile.facebook_id
facebook_url = 'http://www.facebook.com/%s/' % facebook_id
link = '<p><a href="%s"><img src="http://graph.facebook.com/%s/picture/?type=large" width="100px" style="float:left"/>%s</a><br/></p>' % (facebook_url, facebook_id, facebook_id)
return link
facebook_profile.allow_tags = True
facebook_profile.short_description = 'Profile'
class OpenGraphShareAdmin(admin.ModelAdmin):
raw_id_fields = ['user']
list_display = ['user', 'action_domain', facebook_profile,
'completed_at', 'error_message']
actions = [admin_actions.retry_open_graph_share,
admin_actions.retry_open_graph_share_for_user]
if settings.AUTH_PROFILE_MODULE == 'django_facebook.FacebookProfile':
admin.site.register(models.FacebookProfile, FacebookProfileAdmin)
admin.site.register(models.FacebookUser, FacebookUserAdmin)
admin.site.register(models.FacebookLike, FacebookLikeAdmin)
admin.site.register(models.OpenGraphShare, OpenGraphShareAdmin)
| Python | 0 |
4d1e3e548ee80d4a3ef42ad22506fcb8dd64ef05 | Make TestBackend compatible with Python 2 (Closes: #72) | django_slack/backends.py | django_slack/backends.py | import pprint
import logging
from six.moves import urllib
from django.http.request import QueryDict
from django.utils.module_loading import import_string
from .utils import Backend
from .app_settings import app_settings
logger = logging.getLogger(__name__)
class UrllibBackend(Backend):
def send(self, url, message_data, **kwargs):
qs = QueryDict(mutable=True)
qs.update(message_data)
r = urllib.request.urlopen(urllib.request.Request(
url,
qs.urlencode().encode('utf-8'),
))
result = r.read().decode('utf-8')
self.validate(r.headers['content-type'], result, message_data)
class RequestsBackend(Backend):
def __init__(self):
# Lazily import to avoid dependency
import requests
self.session = requests.Session()
def send(self, url, message_data, **kwargs):
r = self.session.post(url, data=message_data, verify=False)
self.validate(r.headers['Content-Type'], r.text, message_data)
class ConsoleBackend(Backend):
def send(self, url, message_data, **kwargs):
print("I: Slack message:")
pprint.pprint(message_data, indent=4)
print("-" * 79)
class LoggerBackend(Backend):
def send(self, url, message_data, **kwargs):
logger.info(pprint.pformat(message_data, indent=4))
class DisabledBackend(Backend):
def send(self, url, message_data, **kwargs):
pass
class CeleryBackend(Backend):
def __init__(self):
# Lazily import to avoid dependency
from .tasks import send
self._send = send
# Check we can import our specified backend up-front
import_string(app_settings.BACKEND_FOR_QUEUE)()
def send(self, *args, **kwargs):
# Send asynchronously via Celery
self._send.delay(*args, **kwargs)
class TestBackend(Backend):
"""
This backend is for testing.
Before a test, call `reset_messages`, and after a test, call
`retrieve_messages` for a list of all messages that have been sent during
the test.
"""
def __init__(self, *args, **kwargs):
super(TestBackend, self).__init__(*args, **kwargs)
self.reset_messages()
def send(self, url, message_data, **kwargs):
self.messages.append(message_data)
def reset_messages(self):
self.messages = []
def retrieve_messages(self):
messages = self.messages
self.reset_messages()
return messages
# For backwards-compatibility
Urllib2Backend = UrllibBackend
| import pprint
import logging
from six.moves import urllib
from django.http.request import QueryDict
from django.utils.module_loading import import_string
from .utils import Backend
from .app_settings import app_settings
logger = logging.getLogger(__name__)
class UrllibBackend(Backend):
def send(self, url, message_data, **kwargs):
qs = QueryDict(mutable=True)
qs.update(message_data)
r = urllib.request.urlopen(urllib.request.Request(
url,
qs.urlencode().encode('utf-8'),
))
result = r.read().decode('utf-8')
self.validate(r.headers['content-type'], result, message_data)
class RequestsBackend(Backend):
def __init__(self):
# Lazily import to avoid dependency
import requests
self.session = requests.Session()
def send(self, url, message_data, **kwargs):
r = self.session.post(url, data=message_data, verify=False)
self.validate(r.headers['Content-Type'], r.text, message_data)
class ConsoleBackend(Backend):
def send(self, url, message_data, **kwargs):
print("I: Slack message:")
pprint.pprint(message_data, indent=4)
print("-" * 79)
class LoggerBackend(Backend):
def send(self, url, message_data, **kwargs):
logger.info(pprint.pformat(message_data, indent=4))
class DisabledBackend(Backend):
def send(self, url, message_data, **kwargs):
pass
class CeleryBackend(Backend):
def __init__(self):
# Lazily import to avoid dependency
from .tasks import send
self._send = send
# Check we can import our specified backend up-front
import_string(app_settings.BACKEND_FOR_QUEUE)()
def send(self, *args, **kwargs):
# Send asynchronously via Celery
self._send.delay(*args, **kwargs)
class TestBackend(Backend):
"""
This backend is for testing.
Before a test, call `reset_messages`, and after a test, call
`retrieve_messages` for a list of all messages that have been sent during
the test.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.reset_messages()
def send(self, url, message_data, **kwargs):
self.messages.append(message_data)
def reset_messages(self):
self.messages = []
def retrieve_messages(self):
messages = self.messages
self.reset_messages()
return messages
# For backwards-compatibility
Urllib2Backend = UrllibBackend
| Python | 0 |
d06e5e51695d40b8248d5854454b7d291b76bafd | Fix a few first run issues. | observy/notifications/__init__.py | observy/notifications/__init__.py | #!/usr/bin/python
#
# Copyright 2016 Eldon Ahrold
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import json
import glob
import importlib
import socket
from datetime import datetime as date
from notifications import *
__version__ = '0.1'
class NotificationManager(object):
''' Notification Manager class responsible for running
any defined notification class in the subdirectory.
'''
def __init__(self, errors):
super(NotificationManager, self).__init__()
self.errors = errors
def send(self):
for c in self.notificationClasses():
print "sending to %s" % c
notifier = c(self.errors)
notifier.send()
def notificationClasses(self):
paths = glob.glob('./notifications/*Notification.py')
classes =[]
for p in paths:
class_name = os.path.splitext(os.path.basename(p))[0]
NotificationClass = getattr(importlib.import_module(
'%s.%s' % (__name__,class_name)), class_name)
classes.append(NotificationClass)
return classes
@staticmethod
def webhooks_file():
return os.path.join(os.path.dirname(os.path.realpath(__file__)),'webhooks.conf.json')
@staticmethod
def register_webhook(name, webhook):
NotificationManager.modify_webhooks(name, webhook, True)
@staticmethod
def remove_webhook(name, webhook):
NotificationManager.modify_webhooks(name, webhook, False)
@staticmethod
def modify_webhooks(name, webhook, add):
webhook_file = NotificationManager.webhooks_file()
if os.path.isfile(webhook_file):
data = open(webhook_file, 'r').read()
all_webhooks = json.loads(data)
else:
all_webhooks = {}
registered_webhooks = set(all_webhooks.get(name, []))
if add:
registered_webhooks.add(webhook)
elif webhook in registered_webhooks:
registered_webhooks.remove(webhook)
all_webhooks[name] = list(registered_webhooks)
file = open(webhook_file, 'w+')
data = json.dumps(all_webhooks)
file.write(data)
file.close()
class Notifications(object):
"""Base class for service notifications"""
errors = None
def __init__(self, errors):
super(Notifications, self).__init__()
self.errors = errors
def webhooks(self, name):
#url webhooks
path = os.path.join()
data = open('webhooks.conf.json', 'r').read()
return json.dumps().get(name,[]);
def send(self):
"""Send Notification"""
raise('Subclass must implement')
def host_info(self):
hostname = socket.gethostname()
return {
"host": hostname,
"ip" : socket.gethostbyname(hostname),
}
def timestamp(self):
return date.now()
class HookableNotifications(Notifications):
"""Notification class that uses webhooks"""
_webhook_service_name = ''
def __init__(self, errors):
super(HookableNotifications, self).__init__(errors)
def _all_hooks(self):
data = open(NotificationManager.webhooks_file(), 'r').read()
return json.loads(data)
def webhooks(self):
return self._all_hooks().get(self._webhook_service_name, []);
| #!/usr/bin/python
#
# Copyright 2016 Eldon Ahrold
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import json
import glob
import importlib
import socket
from datetime import datetime as date
from notifications import *
__version__ = '0.1'
class NotificationManager(object):
''' Notification Manager class responsible for running
any defined notification class in the subdirectory.
'''
def __init__(self, errors):
super(NotificationManager, self).__init__()
self.errors = errors
def send(self):
for c in self.notificationClasses():
print "sending to %s" % c
notifier = c(self.errors)
notifier.send()
def notificationClasses(self):
paths = glob.glob('./notifications/*Notification.py')
classes =[]
for p in paths:
class_name = os.path.splitext(os.path.basename(p))[0]
NotificationClass = getattr(importlib.import_module(
'%s.%s' % (__name__,class_name)), class_name)
classes.append(NotificationClass)
return classes
@staticmethod
def webhooks_file():
return os.path.join(os.path.dirname(os.path.realpath(__file__)),'webhooks.conf.json')
@staticmethod
def register_webhook(name, webhook):
NotificationManager.modify_webhooks(name, webhook, True)
@staticmethod
def remove_webhook(name, webhook):
NotificationManager.modify_webhooks(name, webhook, False)
@staticmethod
def modify_webhooks(name, webhook, add):
webhook_file = NotificationManager.webhooks_file()
data = open(webhook_file, 'r').read()
all_webhooks = json.loads(data)
registered_webhooks = set(all_webhooks.get(name, []))
if add:
registered_webhooks.add(webhook)
elif webhook in registered_webhooks:
registered_webhooks.remove(webhook)
all_webhooks[name] = list(registered_webhooks)
file = open(webhook_file, 'w')
data = json.dumps(all_webhooks)
file.write(data)
file.close()
class Notifications(object):
"""Base class for service notifications"""
errors = None
def __init__(self, errors):
super(Notifications, self).__init__()
self.errors = errors
def webhooks(self, name):
#url webhooks
path = os.path.join()
data = open('webhooks.conf.json', 'r').read()
return json.dumps().get(name,[]);
def send(self):
"""Send Notification"""
raise('Subclass must implement')
def host_info(self):
hostname = socket.gethostname()
return {
"host": hostname,
"ip" : socket.gethostbyname(hostname),
}
def timestamp(self):
return date.now()
class HookableNotifications(Notifications):
"""Notification class that uses webhooks"""
_webhook_service_name = ''
def __init__(self, errors):
super(HookableNotifications, self).__init__(errors)
def _all_hooks(self):
data = open(NotificationManager.webhooks_file(), 'r').read()
return json.loads(data)
def webhooks(self):
return self._all_hooks().get(self._webhook_service_name, []);
| Python | 0 |
dbf736ba66fe6b530bfe3d9d503caa2e24ee8f01 | Make /config more CORS-y | synapse/rest/media/v1/config_resource.py | synapse/rest/media/v1/config_resource.py | # -*- coding: utf-8 -*-
# Copyright 2018 Will Hunt <will@half-shot.uk>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from twisted.internet import defer
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from synapse.http.server import respond_with_json, wrap_json_request_handler, set_cors_headers
class MediaConfigResource(Resource):
isLeaf = True
def __init__(self, hs):
Resource.__init__(self)
config = hs.get_config()
self.clock = hs.get_clock()
self.auth = hs.get_auth()
self.limits_dict = {
"m.upload.size": config.max_upload_size,
}
def render_GET(self, request):
self._async_render_GET(request)
return NOT_DONE_YET
@wrap_json_request_handler
@defer.inlineCallbacks
def _async_render_GET(self, request):
set_cors_headers(request)
yield self.auth.get_user_by_req(request)
respond_with_json(request, 200, self.limits_dict)
def render_OPTIONS(self, request):
set_cors_headers(request)
respond_with_json(request, 200, {}, send_cors=True)
return NOT_DONE_YET
| # -*- coding: utf-8 -*-
# Copyright 2018 Will Hunt <will@half-shot.uk>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from twisted.internet import defer
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from synapse.http.server import respond_with_json, wrap_json_request_handler
class MediaConfigResource(Resource):
isLeaf = True
def __init__(self, hs):
Resource.__init__(self)
config = hs.get_config()
self.clock = hs.get_clock()
self.auth = hs.get_auth()
self.limits_dict = {
"m.upload.size": config.max_upload_size,
}
def render_GET(self, request):
self._async_render_GET(request)
return NOT_DONE_YET
@wrap_json_request_handler
@defer.inlineCallbacks
def _async_render_GET(self, request):
yield self.auth.get_user_by_req(request)
respond_with_json(request, 200, self.limits_dict)
def render_OPTIONS(self, request):
respond_with_json(request, 200, {}, send_cors=True)
return NOT_DONE_YET
| Python | 0 |
1a00800940b64fe33bbba22eb33da14df84de1a1 | Fix broken TPShim | nupic/research/TP_shim.py | nupic/research/TP_shim.py | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
A shim for the TP class that transparently implements TemporalMemory,
for use with OPF.
"""
import numpy
from nupic.research.temporal_memory import TemporalMemory
class TPShim(TemporalMemory):
"""
TP => Temporal Memory shim class.
"""
def __init__(self,
numberOfCols=500,
cellsPerColumn=10,
initialPerm=0.11,
connectedPerm=0.50,
minThreshold=8,
newSynapseCount=15,
permanenceInc=0.10,
permanenceDec=0.10,
permanenceMax=1.0,
globalDecay=0.10,
activationThreshold=12,
seed=42):
"""
Translate parameters and initialize member variables specific to `TP.py`.
"""
super(TPShim, self).__init__(
columnDimensions=(numberOfCols,),
cellsPerColumn=cellsPerColumn,
activationThreshold=activationThreshold,
initialPermanence=initialPerm,
connectedPermanence=connectedPerm,
minThreshold=minThreshold,
maxNewSynapseCount=newSynapseCount,
permanenceIncrement=permanenceInc,
permanenceDecrement=permanenceDec,
seed=seed)
self.infActiveState = {"t": None}
def compute(self, bottomUpInput, enableLearn, computeInfOutput=None):
"""
(From `TP.py`)
Handle one compute, possibly learning.
@param bottomUpInput The bottom-up input, typically from a spatial pooler
@param enableLearn If true, perform learning
@param computeInfOutput If None, default behavior is to disable the inference
output when enableLearn is on.
If true, compute the inference output
If false, do not compute the inference output
"""
super(TPShim, self).compute(set(bottomUpInput.nonzero()[0]),
learn=enableLearn)
numberOfCells = self.numberOfCells()
activeState = numpy.zeros(numberOfCells)
activeState[list(self.activeCells)] = 1
self.infActiveState["t"] = activeState
output = numpy.zeros(numberOfCells)
output[list(self.predictiveCells | self.activeCells)] = 1
return output
| # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
A shim for the TP class that transparently implements TemporalMemory,
for use with OPF.
"""
import numpy
from nupic.research.temporal_memory import TemporalMemory
class TPShim(TemporalMemory):
"""
TP => Temporal Memory shim class.
"""
def __init__(self,
numberOfCols=500,
cellsPerColumn=10,
initialPerm=0.11,
connectedPerm=0.50,
minThreshold=8,
newSynapseCount=15,
permanenceInc=0.10,
permanenceDec=0.10,
permanenceMax=1.0,
globalDecay=0.10,
activationThreshold=12,
seed=42):
"""
Translate parameters and initialize member variables specific to `TP.py`.
"""
super(TPShim, self).__init__(
columnDimensions=(numberOfCols,),
cellsPerColumn=cellsPerColumn,
activationThreshold=activationThreshold,
initialPermanence=initialPerm,
connectedPermanence=connectedPerm,
minThreshold=minThreshold,
maxNewSynapseCount=newSynapseCount,
permanenceIncrement=permanenceInc,
permanenceDecrement=permanenceDec,
seed=seed)
self.infActiveState = {"t": None}
def compute(self, bottomUpInput, enableLearn, computeInfOutput=None):
"""
(From `TP.py`)
Handle one compute, possibly learning.
@param bottomUpInput The bottom-up input, typically from a spatial pooler
@param enableLearn If true, perform learning
@param computeInfOutput If None, default behavior is to disable the inference
output when enableLearn is on.
If true, compute the inference output
If false, do not compute the inference output
"""
super(TPShim, self).compute(set(bottomUpInput.nonzero()[0]),
learn=enableLearn)
numberOfCells = self.connections.numberOfCells()
activeState = numpy.zeros(numberOfCells)
activeState[list(self.activeCells)] = 1
self.infActiveState["t"] = activeState
output = numpy.zeros(numberOfCells)
output[list(self.predictiveCells | self.activeCells)] = 1
return output
| Python | 0.000116 |
f54690eb9962489a387674985055e305b9b57aa9 | remove discription by message body | addons/project_mailgate/project_mailgate.py | addons/project_mailgate/project_mailgate.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
from tools.translate import _
import tools
import binascii
class project_tasks(osv.osv):
_inherit = 'project.task'
def message_new(self, cr, uid, msg, custom_values=None, context=None):
res_id = super(project_tasks,self).message_new(cr, uid, msg, custom_values=custom_values, context=context)
subject = msg.get('subject')
msg_from = msg.get('from')
data = {
'name': subject,
'planned_hours': 0.0,
}
data.update(self.message_partner_by_email(cr, uid, msg_from))
self.write(cr, uid, [res_id], data, context)
return res_id
def message_update(self, cr, uid, ids, msg, data={}, default_act='pending'):
act = 'do_'+default_act
maps = {
'cost':'planned_hours',
}
for line in msg['body_text'].split('\n'):
line = line.strip()
res = tools.misc.command_re.match(line)
if res:
match = res.group(1).lower()
field = maps.get(match)
if field:
try:
data[field] = float(res.group(2).lower())
except (ValueError, TypeError):
pass
elif match.lower() == 'state' \
and res.group(2).lower() in ['cancel','close','draft','open','pending']:
act = 'do_%s' % res.group(2).lower()
self.write(cr, uid, ids, data, context=context)
getattr(self,act)(cr, uid, ids, context=context)
self.message_append_note(cr, uid, [res_id], msg, context=context)
return True
def message_thread_followers(self, cr, uid, ids, context=None):
followers = super(project_tasks,self).message_thread_followers(cr, uid, ids, context=context)
for task in self.browse(cr, uid, followers.keys(), context=context):
task_followers = set(followers[task.id])
task_followers.add(task.user_id.user_email)
followers[task.id] = filter(None, task_followers)
return followers
project_tasks()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
from tools.translate import _
import tools
import binascii
class project_tasks(osv.osv):
_inherit = 'project.task'
def message_new(self, cr, uid, msg, custom_values=None, context=None):
res_id = super(project_tasks,self).message_new(cr, uid, msg, custom_values=custom_values, context=context)
subject = msg.get('subject')
body = msg.get('body_text')
msg_from = msg.get('from')
data = {
'name': subject,
'description': body,
'planned_hours': 0.0,
}
data.update(self.message_partner_by_email(cr, uid, msg_from))
self.write(cr, uid, [res_id], data, context)
return res_id
def message_update(self, cr, uid, ids, msg, data={}, default_act='pending'):
data.update({
'description': msg['body_text'],
})
act = 'do_'+default_act
maps = {
'cost':'planned_hours',
}
for line in msg['body_text'].split('\n'):
line = line.strip()
res = tools.misc.command_re.match(line)
if res:
match = res.group(1).lower()
field = maps.get(match)
if field:
try:
data[field] = float(res.group(2).lower())
except (ValueError, TypeError):
pass
elif match.lower() == 'state' \
and res.group(2).lower() in ['cancel','close','draft','open','pending']:
act = 'do_%s' % res.group(2).lower()
self.write(cr, uid, ids, data, context=context)
getattr(self,act)(cr, uid, ids, context=context)
self.message_append_dict(cr, uid, [res_id], msg, context=context)
return True
def message_thread_followers(self, cr, uid, ids, context=None):
followers = super(project_tasks,self).message_thread_followers(cr, uid, ids, context=context)
for task in self.browse(cr, uid, followers.keys(), context=context):
task_followers = set(followers[task.id])
task_followers.add(task.user_id.user_email)
followers[task.id] = filter(None, task_followers)
return followers
project_tasks()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Python | 0.000001 |
54e1cb0048ffd0024feae4e5dc0c1e047ca55328 | remove debug print | openaps/devices/device.py | openaps/devices/device.py | import json
from openaps.configurable import Configurable
class ExtraConfig (Configurable):
prefix = 'device'
pass
class Device (Configurable):
vendor = None
required = ['name', 'vendor']
optional = [ ]
prefix = 'device'
_uses = [ ]
def __init__ (self, name, vendor):
self.name = name
self.vendor = vendor
self.fields = dict(vendor=vendor.__name__)
self.extra = ExtraConfig(name)
def read (self, args=None, config=None):
if args:
self.name = args.name
if getattr(args, 'extra', None):
self.fields['extra'] = args.extra.format(name=self.name, **self.fields)
self.vendor.set_config(args, self.extra)
else:
self.vendor.set_config(args, self)
if config:
# self.vendor.read_config(config)
self.fields.update(dict(config.items(self.section_name( ))))
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.fields.update(dict(extra.items(self.section_name( ))))
def get (self, k, *args):
return self.fields.get(k, self.extra.get(k, *args))
def format_url (self):
parts = ['{0:s}://{1:s}'.format(self.vendor.__name__.split('.').pop( ), self.name), ]
parts.append(self.vendor.display_device(self))
return ''.join(parts)
def register_uses (self, uses):
for u in uses.usages:
if u not in self._uses:
self._uses.append(u)
def store (self, config):
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.store(extra)
extra.save( )
super(Device, self).store(config)
@classmethod
def FromConfig (klass, vendors, config):
devices = [ ]
for candidate in config.sections( ):
if candidate.startswith(klass.prefix):
name = json.loads(candidate.split(' ').pop( ))
vendor = vendors.lookup(config.get(candidate, 'vendor').split('.').pop( ), config)
device = klass(name, vendor)
device.read(config=config)
devices.append(device)
return devices
| import json
from openaps.configurable import Configurable
class ExtraConfig (Configurable):
prefix = 'device'
pass
class Device (Configurable):
vendor = None
required = ['name', 'vendor']
optional = [ ]
prefix = 'device'
_uses = [ ]
def __init__ (self, name, vendor):
self.name = name
self.vendor = vendor
self.fields = dict(vendor=vendor.__name__)
self.extra = ExtraConfig(name)
def read (self, args=None, config=None):
if args:
self.name = args.name
print "args", args
if getattr(args, 'extra', None):
self.fields['extra'] = args.extra.format(name=self.name, **self.fields)
self.vendor.set_config(args, self.extra)
else:
self.vendor.set_config(args, self)
if config:
# self.vendor.read_config(config)
self.fields.update(dict(config.items(self.section_name( ))))
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.fields.update(dict(extra.items(self.section_name( ))))
def get (self, k, *args):
return self.fields.get(k, self.extra.get(k, *args))
def format_url (self):
parts = ['{0:s}://{1:s}'.format(self.vendor.__name__.split('.').pop( ), self.name), ]
parts.append(self.vendor.display_device(self))
return ''.join(parts)
def register_uses (self, uses):
for u in uses.usages:
if u not in self._uses:
self._uses.append(u)
def store (self, config):
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.store(extra)
extra.save( )
super(Device, self).store(config)
@classmethod
def FromConfig (klass, vendors, config):
devices = [ ]
for candidate in config.sections( ):
if candidate.startswith(klass.prefix):
name = json.loads(candidate.split(' ').pop( ))
vendor = vendors.lookup(config.get(candidate, 'vendor').split('.').pop( ), config)
device = klass(name, vendor)
device.read(config=config)
devices.append(device)
return devices
| Python | 0.000008 |
294e8b120d507237f1129338c476939b20604f26 | Save release test metrics under a single column (#30215) | release/ray_release/reporter/db.py | release/ray_release/reporter/db.py | import time
import json
import boto3
from botocore.config import Config
from ray_release.reporter.reporter import Reporter
from ray_release.result import Result
from ray_release.config import Test
from ray_release.logger import logger
class DBReporter(Reporter):
def __init__(self):
self.firehose = boto3.client("firehose", config=Config(region_name="us-west-2"))
def report_result(self, test: Test, result: Result):
logger.info("Persisting result to the databricks delta lake...")
result_json = {
"_table": "release_test_result",
"report_timestamp_ms": int(time.time() * 1000),
"status": result.status or "",
"results": result.results or {},
"name": test.get("name", ""),
"group": test.get("group", ""),
"team": test.get("team", ""),
"frequency": test.get("frequency", ""),
"cluster_url": result.cluster_url or "",
"wheel_url": result.wheels_url or "",
"buildkite_url": result.buildkite_url or "",
"runtime": result.runtime or -1.0,
"stable": result.stable,
"return_code": result.return_code,
"smoke_test": result.smoke_test,
"prometheus_metrics": result.prometheus_metrics or {},
}
logger.debug(f"Result json: {json.dumps(result_json)}")
try:
self.firehose.put_record(
DeliveryStreamName="ray-ci-results",
Record={"Data": json.dumps(result_json)},
)
except Exception:
logger.exception("Failed to persist result to the databricks delta lake")
else:
logger.info("Result has been persisted to the databricks delta lake")
| import time
import json
import boto3
from botocore.config import Config
from ray_release.reporter.reporter import Reporter
from ray_release.result import Result
from ray_release.config import Test
from ray_release.logger import logger
class DBReporter(Reporter):
def __init__(self):
self.firehose = boto3.client("firehose", config=Config(region_name="us-west-2"))
def report_result(self, test: Test, result: Result):
logger.info("Persisting result to the databricks delta lake...")
result_json = {
"_table": "release_test_result",
"report_timestamp_ms": int(time.time() * 1000),
"status": result.status or "",
"results": result.results or {},
"name": test.get("name", ""),
"group": test.get("group", ""),
"team": test.get("team", ""),
"frequency": test.get("frequency", ""),
"cluster_url": result.cluster_url or "",
"wheel_url": result.wheels_url or "",
"buildkite_url": result.buildkite_url or "",
"runtime": result.runtime or -1.0,
"stable": result.stable,
"return_code": result.return_code,
"smoke_test": result.smoke_test,
}
result_json.update(result.prometheus_metrics)
logger.debug(f"Result json: {json.dumps(result_json)}")
try:
self.firehose.put_record(
DeliveryStreamName="ray-ci-results",
Record={"Data": json.dumps(result_json)},
)
except Exception:
logger.exception("Failed to persist result to the databricks delta lake")
else:
logger.info("Result has been persisted to the databricks delta lake")
| Python | 0 |
0a94b8a4756e9b46211567c430560a314c554a1d | add help for org command | parse.py | parse.py | import argparse
class Parser(argparse.ArgumentParser):
def populate(self):
self.add_argument('--output', choices=('xml', 'text', 'html'),
default='text')
subparsers = self.add_subparsers(title='Commands', metavar='',
dest='call')
self._add_org(subparsers)
self._add_orgs(subparsers)
def _add_org(self, subparsers):
self.org = subparsers.add_parser('org', help='HANDLE')
self.org.add_argument('handle', metavar='HANDLE', help='Org handle')
def _add_orgs(self, subparsers):
self.org = subparsers.add_parser('orgs', help='--handle HANDLE --name NAME --dba DBA')
self.org.add_argument('--handle', help='Org handle')
self.org.add_argument('--name', help='Org name')
self.org.add_argument('--dba', help='Org DBA')
def run(self):
return self.parse_args()
| import argparse
class Parser(argparse.ArgumentParser):
def populate(self):
self.add_argument('--output', choices=('xml', 'text', 'html'),
default='text')
subparsers = self.add_subparsers(title='Commands', metavar='',
dest='call')
self._add_org(subparsers)
self._add_orgs(subparsers)
def _add_org(self, subparsers):
self.org = subparsers.add_parser('org')
self.org.add_argument('handle', metavar='HANDLE', help='Org handle')
def _add_orgs(self, subparsers):
self.org = subparsers.add_parser('orgs', help='--handle HANDLE --name NAME --dba DBA')
self.org.add_argument('--handle', help='Org handle')
self.org.add_argument('--name', help='Org name')
self.org.add_argument('--dba', help='Org DBA')
def run(self):
return self.parse_args()
| Python | 0.000001 |
70477e0a8da15592f5f2197e8d1bffe57eece871 | Add back import of operations, which was lost during cleanup. | nuage_amp/nuage_amp.py | nuage_amp/nuage_amp.py | #!/usr/bin/python
"""
Usage:
nuage-amp sync [--once] [options]
nuage-amp audit-vports [options]
nuage-amp network-macro-from-url (create|delete) <url> <enterprise> [options]
nuage-amp vsdmanaged-tenant (create|delete) <name> [--force] [options]
nuage-amp vsdmanaged-tenant list
nuage-amp (-h | --help)
Options:
-h --help Show this screen
-v --version Show version
--log-file=<file> Log file location
--config-file=<file> Configuration file location [default: /etc/nuage-amp/nuage-amp.conf]
Sync Options:
--once Run the sync only once
Tenant Operations:
--force Forces tenant deletion. Will remove existing VMs and VSD objects(domains,subnets)
"""
"""
@author: Philippe Jeurissen
@copyright: Alcatel-Lucent 2014
@version: 0.0.2
"""
from utils.config import cfg, readconfig
from utils.log import logger, setlogpath, setloglevel
from docopt import docopt
from operations import *
import time
import sys
def getargs():
return docopt(__doc__, version="nuage-amp 0.1.2")
def main(args):
try:
readconfig(args['--config-file'])
except Exception, e:
logger.error("Error reading config file from location: {0:s}".format(args['--config-file']))
logger.error(str(e))
sys.exit(1)
if args['--log-file']:
try:
setlogpath(args['--log-file'], logconfig=cfg)
except Exception, e:
logger.error("Error setting log location: {0:s}".format(args['--log-file']))
logger.error(str(e))
sys.exit(1)
if cfg.has_option('logging', 'loglevel'):
try:
setloglevel(cfg.get('logging', 'loglevel'))
except Exception, e:
logger.error("Error setting logging level to {0:s}".format(cfg.get('logging', 'loglevel')))
logger.error(str(e))
if args['sync']:
if args['--once']:
sync.sync_subnets()
else:
while True:
sync.sync_subnets()
time.sleep(10)
elif args['audit-vports']:
audit_vport.audit_vports()
elif args['network-macro-from-url']:
if args['create']:
nw_macro.create(args['<url>'], args['<enterprise>'])
elif args['delete']:
nw_macro.delete(args['<url>'], args['<enterprise>'])
elif args['vsdmanaged-tenant']:
if args['create']:
tenant.create_vsd_managed_tenant(args['<name>'])
elif args['delete']:
tenant.delete_vsd_managed_tenant(args['<name>'], args['--force'])
elif args['list']:
tenant.list_vsd_managed_tenants()
if __name__ == "__main__":
main(getargs())
| #!/usr/bin/python
"""
Usage:
nuage-amp sync [--once] [options]
nuage-amp audit-vports [options]
nuage-amp network-macro-from-url (create|delete) <url> <enterprise> [options]
nuage-amp vsdmanaged-tenant (create|delete) <name> [--force] [options]
nuage-amp vsdmanaged-tenant list
nuage-amp (-h | --help)
Options:
-h --help Show this screen
-v --version Show version
--log-file=<file> Log file location
--config-file=<file> Configuration file location [default: /etc/nuage-amp/nuage-amp.conf]
Sync Options:
--once Run the sync only once
Tenant Operations:
--force Forces tenant deletion. Will remove existing VMs and VSD objects(domains,subnets)
"""
"""
@author: Philippe Jeurissen
@copyright: Alcatel-Lucent 2014
@version: 0.0.2
"""
from utils.config import cfg, readconfig
from utils.log import logger, setlogpath, setloglevel
from docopt import docopt
import time
import sys
def getargs():
return docopt(__doc__, version="nuage-amp 0.1.2")
def main(args):
try:
readconfig(args['--config-file'])
except Exception, e:
logger.error("Error reading config file from location: {0:s}".format(args['--config-file']))
logger.error(str(e))
sys.exit(1)
if args['--log-file']:
try:
setlogpath(args['--log-file'], logconfig=cfg)
except Exception, e:
logger.error("Error setting log location: {0:s}".format(args['--log-file']))
logger.error(str(e))
sys.exit(1)
if cfg.has_option('logging', 'loglevel'):
try:
setloglevel(cfg.get('logging', 'loglevel'))
except Exception, e:
logger.error("Error setting logging level to {0:s}".format(cfg.get('logging', 'loglevel')))
logger.error(str(e))
if args['sync']:
if args['--once']:
sync.sync_subnets()
else:
while True:
sync.sync_subnets()
time.sleep(10)
elif args['audit-vports']:
audit_vport.audit_vports()
elif args['network-macro-from-url']:
if args['create']:
nw_macro.create(args['<url>'], args['<enterprise>'])
elif args['delete']:
nw_macro.delete(args['<url>'], args['<enterprise>'])
elif args['vsdmanaged-tenant']:
if args['create']:
tenant.create_vsd_managed_tenant(args['<name>'])
elif args['delete']:
tenant.delete_vsd_managed_tenant(args['<name>'], args['--force'])
elif args['list']:
tenant.list_vsd_managed_tenants()
if __name__ == "__main__":
main(getargs())
| Python | 0 |
6b3363b1486bd92f5355023074db9a52e60b1b34 | Set AWS MQTT timeouts to 120 / 60. | src/scs_core/aws/client/mqtt_client.py | src/scs_core/aws/client/mqtt_client.py | """
Created on 6 Oct 2017
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
https://github.com/aws/aws-iot-device-sdk-python
https://stackoverflow.com/questions/20083858/how-to-extract-value-from-bound-method-in-python
"""
import AWSIoTPythonSDK.exception.AWSIoTExceptions as AWSIoTExceptions
import AWSIoTPythonSDK.MQTTLib as MQTTLib
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
from scs_core.data.json import JSONify
# --------------------------------------------------------------------------------------------------------------------
class MQTTClient(object):
"""
classdocs
"""
__KEEP_ALIVE_INTERVAL = 600 # recommended: 600 (sec)
__PORT = 8883
__QUEUE_SIZE = -1 # recommended: infinite
__QUEUE_DROP_BEHAVIOUR = MQTTLib.DROP_OLDEST # not required for infinite queue
__QUEUE_DRAINING_FREQUENCY = 1 # recommended: 2 (Hz)
__RECONN_BASE = 1 # recommended: 1 (sec)
__RECONN_MAX = 32 # recommended: 32 (sec)
__RECONN_STABLE = 20 # recommended: 20 (sec)
__DISCONNECT_TIMEOUT = 120 # recommended: 10 (sec) was 30
__OPERATION_TIMEOUT = 60 # recommended: 5 (sec) was 30
__PUB_QOS = 1
__SUB_QOS = 1
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, *subscribers):
"""
Constructor
"""
self.__client = None
self.__subscribers = subscribers
# ----------------------------------------------------------------------------------------------------------------
def connect(self, auth):
# client...
self.__client = AWSIoTMQTTClient(auth.client_id)
# configuration...
self.__client.configureEndpoint(auth.endpoint, self.__PORT)
self.__client.configureCredentials(auth.root_ca_file_path, auth.private_key_path, auth.certificate_path)
self.__client.configureAutoReconnectBackoffTime(self.__RECONN_BASE, self.__RECONN_MAX, self.__RECONN_STABLE)
self.__client.configureOfflinePublishQueueing(self.__QUEUE_SIZE)
self.__client.configureDrainingFrequency(self.__QUEUE_DRAINING_FREQUENCY)
self.__client.configureConnectDisconnectTimeout(self.__DISCONNECT_TIMEOUT)
self.__client.configureMQTTOperationTimeout(self.__OPERATION_TIMEOUT)
# subscriptions...
for subscriber in self.__subscribers:
self.__client.subscribe(subscriber.topic, self.__SUB_QOS, subscriber.handler)
# connect...
try:
self.__client.connect(self.__KEEP_ALIVE_INTERVAL)
return True
except AWSIoTExceptions.connectTimeoutException:
return False
def disconnect(self):
try:
self.__client.disconnect()
except AWSIoTExceptions.disconnectError:
pass
# ----------------------------------------------------------------------------------------------------------------
def publish(self, publication):
payload = JSONify.dumps(publication.payload)
try:
return self.__client.publish(publication.topic, payload, self.__PUB_QOS)
except AWSIoTExceptions.publishTimeoutException as ex:
raise TimeoutError(ex)
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
subscribers = '[' + ', '.join(str(subscriber) for subscriber in self.__subscribers) + ']'
return "MQTTClient:{subscribers:%s}" % subscribers
# --------------------------------------------------------------------------------------------------------------------
class MQTTSubscriber(object):
"""
classdocs
"""
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, topic, handler):
"""
Constructor
"""
self.__topic = topic
self.__handler = handler
# ----------------------------------------------------------------------------------------------------------------
@property
def topic(self):
return self.__topic
@property
def handler(self):
return self.__handler
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "MQTTSubscriber:{topic:%s, handler:%s}" % (self.topic, self.handler.__self__)
| """
Created on 6 Oct 2017
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
https://github.com/aws/aws-iot-device-sdk-python
https://stackoverflow.com/questions/20083858/how-to-extract-value-from-bound-method-in-python
"""
import AWSIoTPythonSDK.exception.AWSIoTExceptions as AWSIoTExceptions
import AWSIoTPythonSDK.MQTTLib as MQTTLib
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
from scs_core.data.json import JSONify
# --------------------------------------------------------------------------------------------------------------------
class MQTTClient(object):
"""
classdocs
"""
__KEEP_ALIVE_INTERVAL = 600 # recommended: 600 (sec)
__PORT = 8883
__QUEUE_SIZE = -1 # recommended: infinite
__QUEUE_DROP_BEHAVIOUR = MQTTLib.DROP_OLDEST # not required for infinite queue
__QUEUE_DRAINING_FREQUENCY = 1 # recommended: 2 (Hz)
__RECONN_BASE = 1 # recommended: 1 (sec)
__RECONN_MAX = 32 # recommended: 32 (sec)
__RECONN_STABLE = 20 # recommended: 20 (sec)
__DISCONNECT_TIMEOUT = 10 # recommended: 10 (sec) was 30
__OPERATION_TIMEOUT = 5 # recommended: 5 (sec) was 30
__PUB_QOS = 1
__SUB_QOS = 1
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, *subscribers):
"""
Constructor
"""
self.__client = None
self.__subscribers = subscribers
# ----------------------------------------------------------------------------------------------------------------
def connect(self, auth):
# client...
self.__client = AWSIoTMQTTClient(auth.client_id)
# configuration...
self.__client.configureEndpoint(auth.endpoint, self.__PORT)
self.__client.configureCredentials(auth.root_ca_file_path, auth.private_key_path, auth.certificate_path)
self.__client.configureAutoReconnectBackoffTime(self.__RECONN_BASE, self.__RECONN_MAX, self.__RECONN_STABLE)
self.__client.configureOfflinePublishQueueing(self.__QUEUE_SIZE)
self.__client.configureDrainingFrequency(self.__QUEUE_DRAINING_FREQUENCY)
self.__client.configureConnectDisconnectTimeout(self.__DISCONNECT_TIMEOUT)
self.__client.configureMQTTOperationTimeout(self.__OPERATION_TIMEOUT)
# subscriptions...
for subscriber in self.__subscribers:
self.__client.subscribe(subscriber.topic, self.__SUB_QOS, subscriber.handler)
# connect...
try:
self.__client.connect(self.__KEEP_ALIVE_INTERVAL)
return True
except AWSIoTExceptions.connectTimeoutException:
return False
def disconnect(self):
try:
self.__client.disconnect()
except AWSIoTExceptions.disconnectError:
pass
# ----------------------------------------------------------------------------------------------------------------
def publish(self, publication):
payload = JSONify.dumps(publication.payload)
try:
return self.__client.publish(publication.topic, payload, self.__PUB_QOS)
except AWSIoTExceptions.publishTimeoutException as ex:
raise TimeoutError(ex)
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
subscribers = '[' + ', '.join(str(subscriber) for subscriber in self.__subscribers) + ']'
return "MQTTClient:{subscribers:%s}" % subscribers
# --------------------------------------------------------------------------------------------------------------------
class MQTTSubscriber(object):
"""
classdocs
"""
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, topic, handler):
"""
Constructor
"""
self.__topic = topic
self.__handler = handler
# ----------------------------------------------------------------------------------------------------------------
@property
def topic(self):
return self.__topic
@property
def handler(self):
return self.__handler
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "MQTTSubscriber:{topic:%s, handler:%s}" % (self.topic, self.handler.__self__)
| Python | 0 |
400f127fb3264b5a4f403a67a89c25238ff192a4 | Fix missing import in fs.sshfs.error_tools | fs/sshfs/error_tools.py | fs/sshfs/error_tools.py | from __future__ import absolute_import
from __future__ import unicode_literals
import errno
import six
import sys
from .. import errors
class _ConvertSSHFSErrors(object):
"""Context manager to convert OSErrors in to FS Errors."""
FILE_ERRORS = {
64: errors.RemoteConnectionError, # ENONET
errno.ENOENT: errors.ResourceNotFound,
errno.EFAULT: errors.ResourceNotFound,
errno.ESRCH: errors.ResourceNotFound,
errno.ENOTEMPTY: errors.DirectoryNotEmpty,
errno.EEXIST: errors.FileExists,
183: errors.DirectoryExists,
#errno.ENOTDIR: errors.DirectoryExpected,
errno.ENOTDIR: errors.ResourceNotFound,
errno.EISDIR: errors.FileExpected,
errno.EINVAL: errors.FileExpected,
errno.ENOSPC: errors.InsufficientStorage,
errno.EPERM: errors.PermissionDenied,
errno.ENETDOWN: errors.RemoteConnectionError,
errno.ECONNRESET: errors.RemoteConnectionError,
errno.ENAMETOOLONG: errors.PathError,
errno.EOPNOTSUPP: errors.Unsupported,
errno.ENOSYS: errors.Unsupported,
}
#
DIR_ERRORS = FILE_ERRORS.copy()
DIR_ERRORS[errno.ENOTDIR] = errors.DirectoryExpected
DIR_ERRORS[errno.EEXIST] = errors.DirectoryExists
DIR_ERRORS[errno.EINVAL] = errors.DirectoryExpected
# if _WINDOWS_PLATFORM: # pragma: no cover
# DIR_ERRORS[13] = errors.DirectoryExpected
# DIR_ERRORS[267] = errors.DirectoryExpected
# FILE_ERRORS[13] = errors.FileExpected
def __init__(self, opname, path, directory=False):
self._opname = opname
self._path = path
self._directory = directory
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
ssh_errors = (
self.DIR_ERRORS
if self._directory
else self.FILE_ERRORS
)
if exc_type and isinstance(exc_value, EnvironmentError):
_errno = exc_value.errno
fserror = ssh_errors.get(_errno, errors.OperationFailed)
if _errno == errno.EACCES and sys.platform == "win32":
if getattr(exc_value, 'args', None) == 32: # pragma: no cover
fserror = errors.ResourceLocked
six.reraise(
fserror,
fserror(
self._path,
exc=exc_value
),
traceback
)
# Stops linter complaining about invalid class name
convert_sshfs_errors = _ConvertSSHFSErrors
| from __future__ import absolute_import
from __future__ import unicode_literals
import errno
import six
from .. import errors
class _ConvertSSHFSErrors(object):
"""Context manager to convert OSErrors in to FS Errors."""
FILE_ERRORS = {
64: errors.RemoteConnectionError, # ENONET
errno.ENOENT: errors.ResourceNotFound,
errno.EFAULT: errors.ResourceNotFound,
errno.ESRCH: errors.ResourceNotFound,
errno.ENOTEMPTY: errors.DirectoryNotEmpty,
errno.EEXIST: errors.FileExists,
183: errors.DirectoryExists,
#errno.ENOTDIR: errors.DirectoryExpected,
errno.ENOTDIR: errors.ResourceNotFound,
errno.EISDIR: errors.FileExpected,
errno.EINVAL: errors.FileExpected,
errno.ENOSPC: errors.InsufficientStorage,
errno.EPERM: errors.PermissionDenied,
errno.ENETDOWN: errors.RemoteConnectionError,
errno.ECONNRESET: errors.RemoteConnectionError,
errno.ENAMETOOLONG: errors.PathError,
errno.EOPNOTSUPP: errors.Unsupported,
errno.ENOSYS: errors.Unsupported,
}
#
DIR_ERRORS = FILE_ERRORS.copy()
DIR_ERRORS[errno.ENOTDIR] = errors.DirectoryExpected
DIR_ERRORS[errno.EEXIST] = errors.DirectoryExists
DIR_ERRORS[errno.EINVAL] = errors.DirectoryExpected
# if _WINDOWS_PLATFORM: # pragma: no cover
# DIR_ERRORS[13] = errors.DirectoryExpected
# DIR_ERRORS[267] = errors.DirectoryExpected
# FILE_ERRORS[13] = errors.FileExpected
def __init__(self, opname, path, directory=False):
self._opname = opname
self._path = path
self._directory = directory
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
ssh_errors = (
self.DIR_ERRORS
if self._directory
else self.FILE_ERRORS
)
if exc_type and isinstance(exc_value, EnvironmentError):
_errno = exc_value.errno
fserror = ssh_errors.get(_errno, errors.OperationFailed)
if _errno == errno.EACCES and sys.platform == "win32":
if getattr(exc_value, 'args', None) == 32: # pragma: no cover
fserror = errors.ResourceLocked
six.reraise(
fserror,
fserror(
self._path,
exc=exc_value
),
traceback
)
# Stops linter complaining about invalid class name
convert_sshfs_errors = _ConvertSSHFSErrors
| Python | 0.00021 |
65fb9244df69646721c8273afae22fe6248976f0 | optimise common.py | backend/service/common.py | backend/service/common.py | from service.base import BaseService
import config
### need to add rs
class CommonService(BaseService):
def __init__(self, db, rs):
super().__init__(db, rs)
CommonService.inst = self
def get_execute_type(self):
res ={ x['id']: x for x in (yield self.db.execute("SELECT * FROM execute_types order by id"))}
return res
def get_verdict_type(self):
res = { x['id']: x for x in (yield self.db.execute("SELECT * FROM map_verdict_string order by id"))}
return res
| from service.base import BaseService
import config
### need to add rs
class CommonService(BaseService):
def __init__(self, db, rs):
super().__init__(db, rs)
CommonService.inst = self
def get_execute_type(self):
res = (yield self.db.execute("SELECT * FROM execute_types order by id")).fetchall()
ret = {}
for x in res:
ret[x['id']] = x
return ret
def get_verdict_type(self):
res = (yield self.db.execute("SELECT * FROM map_verdict_string order by id")).fetchall()
ret = {}
for x in res:
ret[x['id']] = x
return ret
| Python | 0.022141 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.