commit
stringlengths 40
40
| subject
stringlengths 1
1.49k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| new_contents
stringlengths 1
29.8k
| old_contents
stringlengths 0
9.9k
| lang
stringclasses 3
values | proba
float64 0
1
|
|---|---|---|---|---|---|---|---|
db2f6f4c2a70875aade3741fb57d0bc1b109ce3c
|
Add regexp to create_user form logic
|
app/views/create_user.py
|
app/views/create_user.py
|
from flask import request, flash, render_template
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
|
from flask import request, flash, render_template
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip()
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if not username or username == '' or not password or password == '':
flash('Please enter a username and password.')
else:
# Form was completed
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
return render_template('create_user.html')
|
Python
| 0
|
e4097fc77139abde6311886c2a7792d675e5f805
|
Update merge_intervals.py
|
array/merge_intervals.py
|
array/merge_intervals.py
|
"""
Given a collection of intervals, merge all overlapping intervals.
"""
class Interval:
"""
In mathematics, a (real) interval is a set of real
numbers with the property that any number that lies
between two numbers in the set is also included in the set.
"""
def __init__(self, start=0, end=0):
self.start = start
self.end = end
def __repr__(self):
return f"Interval ({self.start}, {self.end})"
def __iter__(self):
return iter(range(self.start, self.end))
def __getitem__(self, index):
if index < 0:
return self.end + index
return self.start + index
def __len__(self):
return self.end - self.start
def __contains__(self, item):
if self.start >= item >= self.end:
return True
return False
def __eq__(self, other):
if self.start == other.start and self.end == other.end:
return True
return False
def as_list(self):
""" Return interval as list. """
return list(self)
@staticmethod
def merge(intervals):
""" Merges two intervals into one. """
out = []
for i in sorted(intervals, key=lambda i: i.start):
if out and i.start <= out[-1].end:
out[-1].end = max(out[-1].end, i.end)
else:
out += i,
return out
@staticmethod
def print_intervals(intervals):
"""
Prints out the intervals.
"""
res = []
for i in intervals:
res.append(repr(i))
print("".join(res))
def merge_v2(intervals):
""" Merges intervals in the form of list. """
if intervals is None:
return None
intervals.sort(key=lambda i: i[0])
out = [intervals.pop(0)]
for i in intervals:
if out[-1][-1] >= i[0]:
out[-1][-1] = max(out[-1][-1], i[-1])
else:
out.append(i)
return out
import unittest
class TestMergeInterval(unittest.TestCase):
def test_merge(self):
interval_list = [[1, 3], [2, 6], [8, 10], [15, 18]]
intervals = [Interval(i[0], i[1]) for i in interval_list]
merged_intervals = Interval.merge(intervals)
self.assertEqual(
merged_intervals,
[Interval(1, 6), Interval(8, 10), Interval(15, 18)]
)
def test_merge_v2(self):
interval_list = [[1, 3], [2, 6], [8, 10], [15, 18]]
merged_intervals = merge_v2(interval_list)
self.assertEqual(
merged_intervals,
[[1, 6], [8, 10], [15, 18]]
)
if __name__ == "__main__":
unittest.main()
|
"""
Given a collection of intervals, merge all overlapping intervals.
"""
class Interval:
"""
In mathematics, a (real) interval is a set of real
numbers with the property that any number that lies
between two numbers in the set is also included in the set.
"""
def __init__(self, start=0, end=0):
self.start = start
self.end = end
def __repr__(self):
return f"Interval ({self.start}, {self.end})"
def __iter__(self):
return iter(range(self.start, self.end))
def __getitem__(self, index):
if index < 0:
return self.end + index
return self.start + index
def __len__(self):
return self.end - self.start
def __contains__(self, item):
if self.start >= item >= self.end:
return True
return False
def __eq__(self, other):
if self.start == other.start and self.end == other.end:
return True
return False
def as_list(self):
""" Return interval as list. """
return list(self)
@staticmethod
def merge(intervals):
""" Merges two intervals into one. """
out = []
for i in sorted(intervals, key=lambda i: i.start):
if out and i.start <= out[-1].end:
out[-1].end = max(out[-1].end, i.end)
else:
out += i,
return out
@staticmethod
def print_intervals(intervals):
"""
Prints out the intervals.
"""
res = []
for i in intervals:
res.append(repr(i))
print("".join(res))
@staticmethod
def merge_v2(intervals):
""" Merges intervals in the form of list. """
if intervals is None:
return None
intervals.sort(key=lambda i: i[0])
out = [intervals.pop(0)]
for i in intervals:
if out[-1][-1] >= i[0]:
out[-1][-1] = max(out[-1][-1], i[-1])
else:
out.append(i)
return out
import unittest
class TestMergeInterval(unittest.TestCase):
def test_merge(self):
interval_list = [[1, 3], [2, 6], [8, 10], [15, 18]]
intervals = [Interval(i[0], i[1]) for i in interval_list]
merged_intervals = Interval.merge(intervals)
self.assertEqual(
merged_intervals,
[Interval(1, 6), Interval(8, 10), Interval(15, 18)]
)
def test_merge_v2(self):
interval_list = [[1, 3], [2, 6], [8, 10], [15, 18]]
merged_intervals = Interval.merge_v2(interval_list)
self.assertEqual(
merged_intervals,
[[1, 6], [8, 10], [15, 18]]
)
if __name__ == "__main__":
unittest.main()
|
Python
| 0.000002
|
5e11d6766bea9098b89a8c5246518b4a09a163d5
|
Add some paramters to the generic REST API class: - api_root - timeout - api_version
|
atlassian/rest_client.py
|
atlassian/rest_client.py
|
import json
import logging
from urllib.parse import urlencode, urljoin
import requests
log = logging.getLogger("atlassian")
class AtlassianRestAPI:
default_headers={'Content-Type': 'application/json', 'Accept': 'application/json'}
def __init__(self, url, username, password, timeout=60, api_root='rest/api', api_version='latest'):
self.url = url
self.username = username
self.password = password
self.timeout = timeout
self.api_root = api_root
self.api_version = api_version
self._session = requests.Session()
if username and password:
self._session.auth = (username, password)
def log_curl_debug(self, method, path, data=None, headers={}, level=logging.DEBUG):
message = "curl --silent -X {method} -u '{username}':'{password}' -H {headers} {data} '{url}'".format(
method=method,
username=self.username,
password=self.password,
headers=' -H '.join(["'{0}: {1}'".format(key, value) for key, value in headers.items()]),
data='' if not data else "--data '{0}'".format(json.dumps(data)),
url='{0}{1}'.format(self.url, path))
log.log(level=level, msg=message)
def resource_url(self, resource):
return '/'.join([self.api_root, self.api_version, resource])
def request(self, method='GET', path='/', data=None, flags=None, params=None,
headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
self.log_curl_debug(method=method, path=path, headers=headers, data=data)
url = urljoin(self.url, path)
if params or flags:
url += '?'
if params:
url += urlencode(params or {})
if flags:
url += ('&' if params else '') + '&'.join(flags or [])
response = requests.request(
response = self._session.request(
method=method,
url=url,
headers=headers,
data=json.dumps(data),
auth=(self.username, self.password),
timeout=self.timeout
)
if response.status_code == 200:
log.debug('Received: {0}'.format(response.json()))
elif response.status_code == 204:
log.debug('Received "204 No Content" response')
else:
self.log_curl_debug(method=method, path=path, headers=headers, data=data, level=logging.DEBUG)
log.info(response.json())
response.raise_for_status()
return response
def get(self, path, data=None, flags=None, params=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
return self.request('GET', path=path, flags=flags, params=params, data=data, headers=headers).json()
def post(self, path, data=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
try:
return self.request('POST', path=path, data=data, headers=headers).json()
except ValueError:
log.debug('Received response with no content')
return None
def put(self, path, data=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
try:
return self.request('PUT', path=path, data=data, headers=headers).json()
except ValueError:
log.debug('Received response with no content')
return None
def delete(self, path, data=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
"""
Deletes resources at given paths.
:rtype: dict
:return: Empty dictionary to have consistent interface. Some of Atlassian rest resources don't return any content.
"""
self.request('DELETE', path=path, data=data, headers=headers)
|
import json
import logging
from urllib.parse import urlencode, urljoin
import requests
log = logging.getLogger("atlassian")
class AtlassianRestAPI:
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
self._session = requests.Session()
if username and password:
self._session.auth = (username, password)
def log_curl_debug(self, method, path, data=None, headers={}, level=logging.DEBUG):
message = "curl --silent -X {method} -u '{username}':'{password}' -H {headers} {data} '{url}'".format(
method=method,
username=self.username,
password=self.password,
headers=' -H '.join(["'{0}: {1}'".format(key, value) for key, value in headers.items()]),
data='' if not data else "--data '{0}'".format(json.dumps(data)),
url='{0}{1}'.format(self.url, path))
log.log(level=level, msg=message)
def resource_url(self, resource, version='latest'):
return '/'.join(['rest', 'api', version, resource])
def request(self, method='GET', path='/', data=None, flags=None, params=None,
headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
self.log_curl_debug(method=method, path=path, headers=headers, data=data)
url = urljoin(self.url, path)
if params or flags:
url += '?'
if params:
url += urlencode(params or {})
if flags:
url += ('&' if params else '') + '&'.join(flags or [])
response = requests.request(
response = self._session.request(
method=method,
url=url,
headers=headers,
data=json.dumps(data),
auth=(self.username, self.password),
timeout=60)
if response.status_code == 200:
log.debug('Received: {0}'.format(response.json()))
elif response.status_code == 204:
log.debug('Received "204 No Content" response')
else:
self.log_curl_debug(method=method, path=path, headers=headers, data=data, level=logging.DEBUG)
log.info(response.json())
response.raise_for_status()
return response
def get(self, path, data=None, flags=None, params=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
return self.request('GET', path=path, flags=flags, params=params, data=data, headers=headers).json()
def post(self, path, data=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
try:
return self.request('POST', path=path, data=data, headers=headers).json()
except ValueError:
log.debug('Received response with no content')
return None
def put(self, path, data=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
try:
return self.request('PUT', path=path, data=data, headers=headers).json()
except ValueError:
log.debug('Received response with no content')
return None
def delete(self, path, data=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
"""
Deletes resources at given paths.
:rtype: dict
:return: Empty dictionary to have consistent interface. Some of Atlassian rest resources don't return any content.
"""
self.request('DELETE', path=path, data=data, headers=headers)
|
Python
| 0.999987
|
8bcc09e4d3d0a14abd132e023bb4b4896aaac4f2
|
make imports Python 3 friendly
|
barak/absorb/__init__.py
|
barak/absorb/__init__.py
|
from .absorb import *
from .equiv_width import *
from .aod import *
|
from absorb import *
from equiv_width import *
from aod import *
|
Python
| 0.000012
|
4b6bffdb048aa44b42cb80a54fca9a204ede833f
|
Update version to 0.0.3
|
boto3facade/metadata.py
|
boto3facade/metadata.py
|
# -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
# The package name, which is also the "UNIX name" for the project.
package = 'boto3facade'
project = "boto3facade"
project_no_spaces = project.replace(' ', '')
version = '0.0.3'
description = 'A simple facade for boto3'
authors = ['German Gomez-Herrero', 'Innovative Travel Ltd']
authors_string = ', '.join(authors)
emails = ['german@innovativetravel.eu']
license = 'MIT'
copyright = '2015 ' + authors_string
url = 'http://github.com/InnovativeTravel/boto3facade'
|
# -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
# The package name, which is also the "UNIX name" for the project.
package = 'boto3facade'
project = "boto3facade"
project_no_spaces = project.replace(' ', '')
version = '0.0.2'
description = 'A simple facade for boto3'
authors = ['German Gomez-Herrero', 'Innovative Travel Ltd']
authors_string = ', '.join(authors)
emails = ['german@innovativetravel.eu']
license = 'MIT'
copyright = '2015 ' + authors_string
url = 'http://github.com/InnovativeTravel/boto3facade'
|
Python
| 0.000001
|
81df185279a8d46ca2e8ed9fbed4c3204522965e
|
Extend potential life of social media queue entries
|
bvspca/social/models.py
|
bvspca/social/models.py
|
import logging
from datetime import datetime, timedelta
from django.db import models
from wagtail.core.models import Page
logger = logging.getLogger('bvspca.social')
class SocialMediaPostable():
def social_media_ready_to_post(self):
raise NotImplemented()
def social_media_post_text(self):
raise NotImplemented()
def social_media_post_image(self):
raise NotImplemented()
class Meta:
abstract = True
class SocialMediaQueueManager(models.Manager):
def delete_old_entries(self):
"""
Delete all entries from queue older than 14 days
:return:
"""
count, counts_by_object_type = self.filter(date__lt=datetime.now() - timedelta(14)).delete()
if count > 0:
for object_type, object_count in counts_by_object_type.items():
logger.info('Deleted {} objects of type {}'.format(object_count, object_type))
def next_postable_entry(self):
"""
Get the next queued entry that is ready to post
:return:
"""
entries = self.order_by('+priority', '+date')
for entry in entries:
if entry.page.ready_to_post():
return entry
class SocialMediaQueue(models.Model):
"""
A queue of potential pages to post to social media
"""
PRIORITIES = ((1, 1), (2, 2), (3, 3), (4, 4), (5, 5))
date = models.DateTimeField(verbose_name='timestamp', auto_now_add=True)
priority = models.PositiveSmallIntegerField(choices=PRIORITIES)
page = models.OneToOneField(
Page,
on_delete=models.DO_NOTHING,
related_name='+',
)
objects = SocialMediaQueueManager()
class Meta:
pass
def ready(self):
return self.page.specific.social_media_ready_to_post()
def __str__(self):
return self.page.title
|
import logging
from datetime import datetime, timedelta
from django.db import models
from wagtail.core.models import Page
logger = logging.getLogger('bvspca.social')
class SocialMediaPostable():
def social_media_ready_to_post(self):
raise NotImplemented()
def social_media_post_text(self):
raise NotImplemented()
def social_media_post_image(self):
raise NotImplemented()
class Meta:
abstract = True
class SocialMediaQueueManager(models.Manager):
def delete_old_entries(self):
"""
Delete all entries from queue older than 7 days
:return:
"""
count, counts_by_object_type = self.filter(date__lt=datetime.now() - timedelta(7)).delete()
if count > 0:
for object_type, object_count in counts_by_object_type.items():
logger.info('Deleted {} objects of type {}'.format(object_count, object_type))
def next_postable_entry(self):
"""
Get the next queued entry that is ready to post
:return:
"""
entries = self.order_by('+priority', '+date')
for entry in entries:
if entry.page.ready_to_post():
return entry
class SocialMediaQueue(models.Model):
"""
A queue of potential pages to post to social media
"""
PRIORITIES = ((1, 1), (2, 2), (3, 3), (4, 4), (5, 5))
date = models.DateTimeField(verbose_name='timestamp', auto_now_add=True)
priority = models.PositiveSmallIntegerField(choices=PRIORITIES)
page = models.OneToOneField(
Page,
on_delete=models.DO_NOTHING,
related_name='+',
)
objects = SocialMediaQueueManager()
class Meta:
pass
def ready(self):
return self.page.specific.social_media_ready_to_post()
def __str__(self):
return self.page.title
|
Python
| 0.000003
|
77f4b5b1bc3c30fb454212d3c4d2aa62d8c06ca8
|
Update exportyaml.py
|
canmatrix/exportyaml.py
|
canmatrix/exportyaml.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from .canmatrix import *
import codecs
import yaml
from yaml.representer import SafeRepresenter
from builtins import *
import copy
#Copyright (c) 2013, Eduard Broecker
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without modification, are permitted provided that
# the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
#WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
#PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
#DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
#PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
#OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
#DAMAGE.
#
# this script exports yaml-files from a canmatrix-object
# yaml-files are just object-dumps human readable.
# This export is complete, no information lost
representers = False
try:
yaml.add_representer(int, SafeRepresenter.represent_int)
yaml.add_representer(long, SafeRepresenter.represent_long)
yaml.add_representer(unicode, SafeRepresenter.represent_unicode)
yaml.add_representer(str, SafeRepresenter.represent_unicode)
yaml.add_representer(list, SafeRepresenter.represent_list)
representers = True
except:
representers = False
# some error with representers ... continue anyway
def exportYaml(db, filename, **options):
newdb = copy.deepcopy(db)
for i,frame in enumerate(newdb._fl._list):
for j,signal in enumerate(frame._signals):
if signal._is_little_endian == False:
signal._startbit = signal.getStartbit(bitNumbering = 1, startLittle = True)
newdb._fl._list[i]._signals[j]._startbit = signal._startbit
f = open(filename,"wb")
if representers:
f.write(unicode(yaml.dump(newdb)))
else:
f.write(yaml.dump(newdb))
|
#!/usr/bin/env python
from __future__ import absolute_import
from .canmatrix import *
import codecs
import yaml
from yaml.representer import SafeRepresenter
from builtins import *
import copy
#Copyright (c) 2013, Eduard Broecker
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without modification, are permitted provided that
# the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
#WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
#PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
#DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
#PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
#OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
#DAMAGE.
#
# this script exports yaml-files from a canmatrix-object
# yaml-files are just object-dumps human readable.
# This export is complete, no information lost
representers = False
try:
yaml.add_representer(int, SafeRepresenter.represent_int)
yaml.add_representer(long, SafeRepresenter.represent_long)
yaml.add_representer(unicode, SafeRepresenter.represent_unicode)
yaml.add_representer(str, SafeRepresenter.represent_unicode)
yaml.add_representer(list, SafeRepresenter.represent_list)
representers = True
except:
representers = False
# some error with representers ... continue anyway
def exportYaml(db, filename, **options):
newdb = copy.deepcopy(db)
for i,frame in enumerate(newdb._fl._list):
for j,signal in enumerate(frame._signals):
if signal._is_little_endian == False:
signal._startbit = signal.getStartbit(bitNumbering = 1, startLittle = True)
newdb._fl._list[i]._signals[j]._startbit = signal._startbit
f = open(filename,"w")
if representers:
f.write(unicode(yaml.dump(newdb)))
else:
f.write(yaml.dump(newdb))
|
Python
| 0.000001
|
df679af352f156ad4846fbc53a8efc43814b897c
|
Update ce.transformer.utils
|
ce/transformer/utils.py
|
ce/transformer/utils.py
|
import itertools
from ce.expr import Expr
from ce.transformer.core import TreeTransformer
from ce.transformer.biop import associativity, distribute_for_distributivity, \
BiOpTreeTransformer
from ce.analysis import expr_frontier
def closure(tree, depth=None):
return BiOpTreeTransformer(tree, depth=depth).closure()
def transform(tree,
reduction_methods=None, transform_methods=None, depth=None):
t = TreeTransformer(tree)
t.reduction_methods = reduction_methods or []
t.transform_methods = transform_methods or []
return t.closure()
def expand(tree):
return transform(tree, [distribute_for_distributivity]).pop()
def reduce(tree):
try:
tree = Expr(tree)
except TypeError:
return {reduce(t) for t in tree}
s = set(transform(tree, BiOpTreeTransformer.reduction_methods))
if len(s) > 1:
s.remove(tree)
if len(s) == 1:
return s.pop()
raise Exception
def parsings(tree):
return transform(tree, None, [associativity])
def martel_closure(tree, depth=None):
t = BiOpTreeTransformer(tree, depth=depth)
t.transform_methods.remove(distribute_for_distributivity)
return t.closure()
class MartelExpr(Expr):
def traces(self, var_env=None, depth=None):
def subtraces(a):
try:
return MartelExpr(a).traces(depth)
except (ValueError, TypeError):
return {a}
stl = [subtraces(a) for a in self.args]
sts = set(Expr(self.op, args) for args in itertools.product(*stl))
logger.debug('Generating %s~=%d traces for tree: %s' %
('*'.join([str(len(s)) for s in stl]),
len(sts), str(self)))
cll = martel_closure(sts, depth=depth)
if var_env:
cll = expr_frontier(cll, var_env)
return cll
def __repr__(self):
return "MartelExpr(op='%s', a1=%s, a2=%s)" % \
(self.op, repr(self.a1), repr(self.a2))
def martel(tree, var_env=None, depth=2):
return reduce(MartelExpr(expand(tree)).traces(var_env, depth))
if __name__ == '__main__':
import ce.logger as logger
from ce.common import timeit
from ce.semantics import cast_error
from ce.analysis import analyse, frontier, Plot
logger.set_context(level=logger.levels.info)
Expr.__repr__ = Expr.__str__
logger.info('Expand', expand('(a + 3) * (a + 3)'))
logger.info('Parsings', parsings('a + b + c'))
logger.info('Reduction', reduce('a + 2 * 3 * 4 + 6 * b + 3'))
e = 'a * a * b * b + a * a * b + 2 * a * b + 3 * a + 4'
v = {
'a': cast_error('0.1', '0.2'),
'b': cast_error('100', '200'),
}
@timeit
def closure_frontier(e, v):
c = closure(e)
return c, expr_frontier(c, v)
complete, complete_front = closure_frontier(e, v)
martel_front = timeit(martel)(e, v)
logger.info('Closure', len(complete_front), complete_front)
logger.info('Martel', len(martel_front), martel_front)
p = Plot()
p.add(analyse(complete, v), legend='Complete')
p.add(analyse(martel_front, v), legend='Martel')
p.show()
|
import itertools
from ce.expr import Expr
from ce.transformer.core import TreeTransformer
from ce.transformer.biop import associativity, distribute_for_distributivity, \
BiOpTreeTransformer
from ce.analysis import expr_frontier
def closure(tree, depth=None):
return BiOpTreeTransformer(tree, depth=depth).closure()
def transform(tree,
reduction_methods=None, transform_methods=None, depth=None):
t = TreeTransformer(tree)
t.reduction_methods = reduction_methods or []
t.transform_methods = transform_methods or []
return t.closure()
def expand(tree):
return transform(tree, [distribute_for_distributivity]).pop()
def reduce(tree):
try:
tree = Expr(tree)
except TypeError:
return {reduce(t) for t in tree}
s = set(transform(tree, BiOpTreeTransformer.reduction_methods))
if len(s) > 1:
s.remove(tree)
if len(s) == 1:
return s.pop()
raise Exception
def parsings(tree):
return transform(tree, None, [associativity])
def martel_closure(tree, depth=None):
t = BiOpTreeTransformer(tree, depth=depth)
t.transform_methods.remove(distribute_for_distributivity)
return t.closure()
class MartelExpr(Expr):
def traces(self, var_env=None, depth=None):
def subtraces(a):
try:
return MartelExpr(a).traces(depth)
except (ValueError, TypeError):
return {a}
stl = [subtraces(a) for a in self.args]
sts = set(Expr(self.op, args) for args in itertools.product(*stl))
logger.debug('Generating %s~=%d traces for tree: %s' %
('*'.join([str(len(s)) for s in stl]),
len(sts), str(self)))
cll = martel_closure(sts, depth=depth)
if var_env:
cll = expr_frontier(cll, var_env)
return cll
def __repr__(self):
return "MartelExpr(op='%s', a1=%s, a2=%s)" % \
(self.op, repr(self.a1), repr(self.a2))
def martel(tree, var_env=None, depth=2):
return reduce(MartelExpr(expand(tree)).traces(var_env, depth))
if __name__ == '__main__':
from matplotlib import pyplot as plt
import ce.logger as logger
from ce.common import timeit
from ce.semantics import cast_error
from ce.analysis import analyse, frontier, zip_result
logger.set_context(level=logger.levels.info)
Expr.__repr__ = Expr.__str__
logger.info('Expand', expand('(a + 3) * (a + 3)'))
logger.info('Parsings', parsings('a + b + c'))
logger.info('Reduction', reduce('a + 2 * 3 * 4 + 6 * b + 3'))
e = 'a * a * b * b + a * a * b + 2 * a * b + 3 * a + 4'
v = {
'a': cast_error('0.1', '0.2'),
'b': cast_error('100', '200'),
}
def closure_frontier(e, v):
c = closure(e)
return c, expr_frontier(c, v)
complete, complete_front = timeit(closure_frontier)(e, v)
martel_front = timeit(martel)(e, v)
logger.info('Closure', len(complete_front), complete_front)
logger.info('Martel', len(martel_front), martel_front)
plt.scatter(*zip_result(analyse(complete, v)))
plt.plot(*zip_result(frontier(complete, v)))
plt.plot(*zip_result(analyse(martel_front, v)))
plt.show()
|
Python
| 0.000001
|
1a50aaf6be0f866046d88944607802a4e8661c61
|
Revert "Test jenkins failure"
|
ceam_tests/test_util.py
|
ceam_tests/test_util.py
|
# ~/ceam/tests/test_util.py
from unittest import TestCase
from datetime import timedelta
from unittest.mock import Mock
import numpy as np
import pandas as pd
from ceam.engine import SimulationModule
from ceam.util import from_yearly, to_yearly, rate_to_probability, probability_to_rate
class TestRateConversions(TestCase):
"""
Simple regression tests for rate functions
"""
def test_from_yearly(self):
one_month = timedelta(days=30.5)
rate = 0.01
new_rate = from_yearly(rate, one_month)
self.assertAlmostEqual(new_rate, 0.0008356164383561645)
def test_to_yearly(self):
one_month = timedelta(days=30.5)
rate = 0.0008356164383561645
new_rate = to_yearly(rate, one_month)
self.assertAlmostEqual(new_rate, 0.01)
def test_rate_to_probability(self):
rate = 0.001
prob = rate_to_probability(rate)
self.assertAlmostEqual(prob, 0.00099950016662497809)
def test_probablity_to_rate(self):
prob = 0.00099950016662497809
rate = probability_to_rate(prob)
self.assertAlmostEqual(rate, 0.001)
def test_rate_to_probability_symmetry(self):
rate = 0.0001
for _ in range(100):
prob = rate_to_probability(rate)
self.assertAlmostEqual(rate, probability_to_rate(prob))
rate += (1-0.0001)/100.0
def test_rate_to_probablity_vectorizability(self):
rate = 0.001
rate = np.array([rate]*100)
prob = rate_to_probability(rate)
self.assertAlmostEqual(prob[10], 0.00099950016662497809)
self.assertAlmostEqual(np.sum(rate), np.sum(probability_to_rate(prob)))
# End.
|
# ~/ceam/tests/test_util.py
from unittest import TestCase
from datetime import timedelta
from unittest.mock import Mock
import numpy as np
import pandas as pd
from ceam.engine import SimulationModule
from ceam.util import from_yearly, to_yearly, rate_to_probability, probability_to_rate
class TestRateConversions(TestCase):
"""
Simple regression tests for rate functions
"""
def test_from_yearly(self):
one_month = timedelta(days=30.5)
rate = 0.01
new_rate = from_yearly(rate, one_month)
self.assertAlmostEqual(new_rate, 0.0008356164383561645)
def test_to_yearly(self):
one_month = timedelta(days=30.5)
rate = 0.0008356164383561645
new_rate = to_yearly(rate, one_month)
self.assertAlmostEqual(new_rate, 0.01)
def test_rate_to_probability(self):
rate = 0.001
prob = rate_to_probability(rate)
self.assertAlmostEqual(prob, 0.00099950016662497809)
def test_probablity_to_rate(self):
prob = 0.00099950016662497809
rate = probability_to_rate(prob)
self.assertAlmostEqual(rate, 0.001)
def test_rate_to_probability_symmetry(self):
rate = 0.0001
for _ in range(100):
prob = rate_to_probability(rate)
self.assertAlmostEqual(rate, probability_to_rate(prob))
rate += (1-0.0001)/100.0
def test_rate_to_probablity_vectorizability(self):
rate = 0.001
rate = np.array([rate]*100)
prob = rate_to_probability(rate)
self.assertAlmostEqual(prob[10], 0.00099950016662497809)
self.assertAlmostEqual(np.sum(rate), np.sum(probability_to_rate(prob)))
def test_failure(self):
assert False
# End.
|
Python
| 0
|
c00c7e6099269c66b64a15c15318093eadbf3851
|
Fix excluded_extensions when ignore_hidden is False
|
checksumdir/__init__.py
|
checksumdir/__init__.py
|
"""
Function for deterministically creating a single hash for a directory of files,
taking into account only file contents and not filenames.
Usage:
from checksumdir import dirhash
dirhash('/path/to/directory', 'md5')
"""
import os
import hashlib
import re
import pkg_resources
__version__ = pkg_resources.require("checksumdir")[0].version
HASH_FUNCS = {
'md5': hashlib.md5,
'sha1': hashlib.sha1,
'sha256': hashlib.sha256,
'sha512': hashlib.sha512
}
def dirhash(dirname, hashfunc='md5', excluded_files=None, ignore_hidden=False,
followlinks=False, excluded_extensions=None):
hash_func = HASH_FUNCS.get(hashfunc)
if not hash_func:
raise NotImplementedError('{} not implemented.'.format(hashfunc))
if not excluded_files:
excluded_files = []
if not excluded_extensions:
excluded_extensions = []
if not os.path.isdir(dirname):
raise TypeError('{} is not a directory.'.format(dirname))
hashvalues = []
for root, dirs, files in os.walk(dirname, topdown=True, followlinks=followlinks):
if ignore_hidden:
if not re.search(r'/\.', root):
hashvalues.extend(
[_filehash(os.path.join(root, f),
hash_func) for f in files if not
f.startswith('.') and not re.search(r'/\.', f)
and f not in excluded_files
and f.split('.')[-1:][0] not in excluded_extensions
]
)
else:
hashvalues.extend(
[
_filehash(os.path.join(root, f), hash_func)
for f in files
if f not in excluded_files
and f.split('.')[-1:][0] not in excluded_extensions
]
)
return _reduce_hash(hashvalues, hash_func)
def _filehash(filepath, hashfunc):
hasher = hashfunc()
blocksize = 64 * 1024
with open(filepath, 'rb') as fp:
while True:
data = fp.read(blocksize)
if not data:
break
hasher.update(data)
return hasher.hexdigest()
def _reduce_hash(hashlist, hashfunc):
hasher = hashfunc()
for hashvalue in sorted(hashlist):
hasher.update(hashvalue.encode('utf-8'))
return hasher.hexdigest()
|
"""
Function for deterministically creating a single hash for a directory of files,
taking into account only file contents and not filenames.
Usage:
from checksumdir import dirhash
dirhash('/path/to/directory', 'md5')
"""
import os
import hashlib
import re
import pkg_resources
__version__ = pkg_resources.require("checksumdir")[0].version
HASH_FUNCS = {
'md5': hashlib.md5,
'sha1': hashlib.sha1,
'sha256': hashlib.sha256,
'sha512': hashlib.sha512
}
def dirhash(dirname, hashfunc='md5', excluded_files=None, ignore_hidden=False,
followlinks=False, excluded_extensions=None):
hash_func = HASH_FUNCS.get(hashfunc)
if not hash_func:
raise NotImplementedError('{} not implemented.'.format(hashfunc))
if not excluded_files:
excluded_files = []
if not excluded_extensions:
excluded_extensions = []
if not os.path.isdir(dirname):
raise TypeError('{} is not a directory.'.format(dirname))
hashvalues = []
for root, dirs, files in os.walk(dirname, topdown=True, followlinks=followlinks):
if ignore_hidden:
if not re.search(r'/\.', root):
hashvalues.extend(
[_filehash(os.path.join(root, f),
hash_func) for f in files if not
f.startswith('.') and not re.search(r'/\.', f)
and f not in excluded_files
and f.split('.')[-1:][0] not in excluded_extensions
]
)
else:
hashvalues.extend([_filehash(os.path.join(root, f),
hash_func) for f in files if f not in excluded_files])
return _reduce_hash(hashvalues, hash_func)
def _filehash(filepath, hashfunc):
hasher = hashfunc()
blocksize = 64 * 1024
with open(filepath, 'rb') as fp:
while True:
data = fp.read(blocksize)
if not data:
break
hasher.update(data)
return hasher.hexdigest()
def _reduce_hash(hashlist, hashfunc):
hasher = hashfunc()
for hashvalue in sorted(hashlist):
hasher.update(hashvalue.encode('utf-8'))
return hasher.hexdigest()
|
Python
| 0
|
f1dd824978ad8581113a088afe1d1bdf99a00802
|
Move to dev.
|
command_line/griddex.py
|
command_line/griddex.py
|
# LIBTBX_SET_DISPATCHER_NAME dev.dials.griddex
from __future__ import absolute_import, division, print_function
import libtbx.phil
import libtbx.load_env
help_message = '''
Cross reference indexing solutions.
Examples::
%s expts0.json refl0.json
''' % libtbx.env.dispatcher_name
phil_scope = libtbx.phil.parse("""
d_min = None
.type = float(value_min=0.0)
""")
def test_index(experiment, reflections):
from dials.algorithms.indexing import indexer
# map reflections to reciprocal space from image space
refl = indexer.indexer_base.map_spots_pixel_to_mm_rad(
reflections, experiment.detector, experiment.scan)
indexer.indexer_base.map_centroids_to_reciprocal_space(
refl, experiment.detector, experiment.beam, experiment.goniometer)
# now compute fractional indices - in Python rather than trying to push
# everything to C++ for the moment
from scitbx import matrix
ub = matrix.sqr(experiment.crystal.get_A())
rub = ub.inverse()
from dials.array_family import flex
hkl_real = flex.vec3_double(len(reflections))
for j, rlp in enumerate(reflections['rlp']):
hkl_real[j] = rub * rlp
hkl = hkl_real.iround()
ms = 0.0
for (_h, _k, _l), (_hr, _kr, _lr) in zip(hkl, hkl_real):
ms += (_hr - _h) ** 2 + (_kr - _k) ** 2 + (_lr - _l) ** 2
import math
return math.sqrt(ms / len(reflections))
def run(args):
from dials.util.options import OptionParser
from dials.util.options import flatten_experiments
from dials.util.options import flatten_reflections
import libtbx.load_env
usage = "%s [options] datablock.json reflections.pickle" % (
libtbx.env.dispatcher_name)
parser = OptionParser(
usage=usage,
phil=phil_scope,
read_experiments=True,
read_reflections=True,
check_format=False,
epilog=help_message)
params, options = parser.parse_args(show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
reflections = flatten_reflections(params.input.reflections)
assert len(experiments) == len(reflections)
nn = len(experiments)
# FIXME check that all the crystals are in the primitive setting...
# now compute grid of reciprocal RMSD's
result = { }
for j, expt in enumerate(experiments):
for k, refl in enumerate(reflections):
result[j, k] = test_index(expt, refl)
# print matrix of results
print(' ' + ''.join(['%7d' % j for j in range(nn)]))
for k in range(nn):
record = ''.join([' %6.3f' % result[j, k] for j in range(nn)])
print('%8d' % k + record)
if __name__ == '__main__':
import sys
run(sys.argv[1:])
|
from __future__ import absolute_import, division, print_function
import libtbx.phil
import libtbx.load_env
help_message = '''
Cross reference indexing solutions.
Examples::
%s expts0.json refl0.json
''' % libtbx.env.dispatcher_name
phil_scope = libtbx.phil.parse("""
d_min = None
.type = float(value_min=0.0)
""")
def test_index(experiment, reflections):
from dials.algorithms.indexing import indexer
# map reflections to reciprocal space from image space
refl = indexer.indexer_base.map_spots_pixel_to_mm_rad(
reflections, experiment.detector, experiment.scan)
indexer.indexer_base.map_centroids_to_reciprocal_space(
refl, experiment.detector, experiment.beam, experiment.goniometer)
# now compute fractional indices - in Python rather than trying to push
# everything to C++ for the moment
from scitbx import matrix
ub = matrix.sqr(experiment.crystal.get_A())
rub = ub.inverse()
from dials.array_family import flex
hkl_real = flex.vec3_double(len(reflections))
for j, rlp in enumerate(reflections['rlp']):
hkl_real[j] = rub * rlp
hkl = hkl_real.iround()
ms = 0.0
for (_h, _k, _l), (_hr, _kr, _lr) in zip(hkl, hkl_real):
ms += (_hr - _h) ** 2 + (_kr - _k) ** 2 + (_lr - _l) ** 2
import math
return math.sqrt(ms / len(reflections))
def run(args):
from dials.util.options import OptionParser
from dials.util.options import flatten_experiments
from dials.util.options import flatten_reflections
import libtbx.load_env
usage = "%s [options] datablock.json reflections.pickle" % (
libtbx.env.dispatcher_name)
parser = OptionParser(
usage=usage,
phil=phil_scope,
read_experiments=True,
read_reflections=True,
check_format=False,
epilog=help_message)
params, options = parser.parse_args(show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
reflections = flatten_reflections(params.input.reflections)
assert len(experiments) == len(reflections)
nn = len(experiments)
# FIXME check that all the crystals are in the primitive setting...
# now compute grid of reciprocal RMSD's
result = { }
for j, expt in enumerate(experiments):
for k, refl in enumerate(reflections):
result[j, k] = test_index(expt, refl)
# print matrix of results
print(' ' + ''.join(['%7d' % j for j in range(nn)]))
for k in range(nn):
record = ''.join([' %6.3f' % result[j, k] for j in range(nn)])
print('%8d' % k + record)
if __name__ == '__main__':
import sys
run(sys.argv[1:])
|
Python
| 0
|
9ebc7c3aee73f4a950d4975034f3c41417d59444
|
clean up unused imports
|
common/util/__init__.py
|
common/util/__init__.py
|
import sublime
from plistlib import readPlistFromBytes
syntax_file_map = {}
def move_cursor(view, line_no, char_no):
# Line numbers are one-based, rows are zero-based.
line_no -= 1
# Negative line index counts backwards from the last line.
if line_no < 0:
last_line, _ = view.rowcol(view.size())
line_no = last_line + line_no + 1
pt = view.text_point(line_no, char_no)
view.sel().clear()
view.sel().add(sublime.Region(pt))
view.show(pt)
def _region_within_regions(all_outer, inner):
for outer in all_outer:
if outer.begin() <= inner.begin() and outer.end() >= inner.end():
return True
return False
def get_lines_from_regions(view, regions, valid_ranges=None):
full_line_regions = (view.full_line(region) for region in regions)
valid_regions = ([region for region in full_line_regions if _region_within_regions(valid_ranges, region)]
if valid_ranges else
full_line_regions)
return [line for region in valid_regions for line in view.substr(region).split("\n")]
def determine_syntax_files():
syntax_files = sublime.find_resources("*.tmLanguage")
for syntax_file in syntax_files:
try:
# Use `sublime.load_resource`, in case Package is `*.sublime-package`.
resource = sublime.load_resource(syntax_file)
plist = readPlistFromBytes(bytearray(resource, encoding="utf-8"))
for extension in plist["fileTypes"]:
if extension not in syntax_file_map:
syntax_file_map[extension] = []
extension_list = syntax_file_map[extension]
extension_list.append(syntax_file)
except:
continue
def get_syntax_for_file(filename):
extension = get_file_extension(filename)
try:
# Return last syntax file applicable to this extension.
return syntax_file_map[extension][-1]
except KeyError:
pass
return "Packages/Text/Plain text.tmLanguage"
def get_file_extension(filename):
period_delimited_segments = filename.split(".")
return "" if len(period_delimited_segments) < 2 else period_delimited_segments[-1]
|
import itertools
import sublime
from plistlib import readPlistFromBytes
from .parse_diff import parse_diff
syntax_file_map = {}
def move_cursor(view, line_no, char_no):
# Line numbers are one-based, rows are zero-based.
line_no -= 1
# Negative line index counts backwards from the last line.
if line_no < 0:
last_line, _ = view.rowcol(view.size())
line_no = last_line + line_no + 1
pt = view.text_point(line_no, char_no)
view.sel().clear()
view.sel().add(sublime.Region(pt))
view.show(pt)
def _region_within_regions(all_outer, inner):
for outer in all_outer:
if outer.begin() <= inner.begin() and outer.end() >= inner.end():
return True
return False
def get_lines_from_regions(view, regions, valid_ranges=None):
full_line_regions = (view.full_line(region) for region in regions)
valid_regions = ([region for region in full_line_regions if _region_within_regions(valid_ranges, region)]
if valid_ranges else
full_line_regions)
return [line for region in valid_regions for line in view.substr(region).split("\n")]
def determine_syntax_files():
syntax_files = sublime.find_resources("*.tmLanguage")
for syntax_file in syntax_files:
try:
# Use `sublime.load_resource`, in case Package is `*.sublime-package`.
resource = sublime.load_resource(syntax_file)
plist = readPlistFromBytes(bytearray(resource, encoding="utf-8"))
for extension in plist["fileTypes"]:
if extension not in syntax_file_map:
syntax_file_map[extension] = []
extension_list = syntax_file_map[extension]
extension_list.append(syntax_file)
except:
continue
def get_syntax_for_file(filename):
extension = get_file_extension(filename)
try:
# Return last syntax file applicable to this extension.
return syntax_file_map[extension][-1]
except KeyError:
pass
return "Packages/Text/Plain text.tmLanguage"
def get_file_extension(filename):
period_delimited_segments = filename.split(".")
return "" if len(period_delimited_segments) < 2 else period_delimited_segments[-1]
|
Python
| 0.000001
|
8c3b20f8aa655a7f8fe1ae485e493aa4a5f24abd
|
Remove __getattr__ method from CompositeField
|
composite_field/l10n.py
|
composite_field/l10n.py
|
from copy import deepcopy
from django.conf import settings
from django.db.models.fields import Field, CharField, TextField, FloatField
from django.utils.functional import lazy
from django.utils import six
from . import CompositeField
LANGUAGES = map(lambda lang: lang[0], getattr(settings, 'LANGUAGES', ()))
class LocalizedField(CompositeField):
def __init__(self, field_class, *args, **kwargs):
self.languages = kwargs.pop('languages', LANGUAGES)
if not self.languages:
raise RuntimeError('Set LANGUAGES in your settings.py or pass a non empty "languages" argument before using LocalizedCharField')
super(LocalizedField, self).__init__()
self.verbose_name = kwargs.pop('verbose_name', None)
for language in self.languages:
self[language] = field_class(*args, **kwargs)
def contribute_to_class(self, cls, field_name):
if self.verbose_name is None:
self.verbose_name = field_name.replace('_', ' ')
for language in self:
# verbose_name must be lazy in order for the admin to show the
# translated verbose_names of the fields
self[language].verbose_name = lazy(lambda language: self.verbose_name + ' (' + language + ')', six.text_type)(language)
super(LocalizedField, self).contribute_to_class(cls, field_name)
def get_proxy(self, model):
return LocalizedField.Proxy(self, model)
class Proxy(CompositeField.Proxy):
def __bool__(self):
return bool(six.text_type(self))
def __unicode__(self):
from django.utils.translation import get_language
language = get_language()
translation = None
# 1. complete language code
translation = getattr(self, language, None)
if translation is not None:
return translation
# 2. base of language code
if '-' in language:
base_lang = language.split('-')[0]
translation = getattr(self, base_lang, None)
if translation is not None:
return translation
# 3. first available translation
for language in settings.LANGUAGES:
getattr(self, base_lang, None)
if translation is not None:
return translation
return None
class LocalizedCharField(LocalizedField):
def __init__(self, *args, **kwargs):
super(LocalizedCharField, self).__init__(CharField, *args, **kwargs)
class LocalizedTextField(LocalizedField):
def __init__(self, *args, **kwargs):
super(LocalizedTextField, self).__init__(TextField, *args, **kwargs)
|
from copy import deepcopy
from django.conf import settings
from django.db.models.fields import Field, CharField, TextField, FloatField
from django.utils.functional import lazy
from django.utils import six
from . import CompositeField
LANGUAGES = map(lambda lang: lang[0], getattr(settings, 'LANGUAGES', ()))
class LocalizedField(CompositeField):
def __init__(self, field_class, *args, **kwargs):
self.languages = kwargs.pop('languages', LANGUAGES)
if not self.languages:
raise RuntimeError('Set LANGUAGES in your settings.py or pass a non empty "languages" argument before using LocalizedCharField')
super(LocalizedField, self).__init__()
self.verbose_name = kwargs.pop('verbose_name', None)
for language in self.languages:
self[language] = field_class(*args, **kwargs)
def __getattr__(self, name):
# Proxy all other getattr calls to the first language field. This makes
# it possible to access subfield specific details like 'max_length',
# 'blank', etc. without duplication.
return getattr(self[self.languages[0]], name)
def contribute_to_class(self, cls, field_name):
if self.verbose_name is None:
self.verbose_name = field_name.replace('_', ' ')
for language in self:
# verbose_name must be lazy in order for the admin to show the
# translated verbose_names of the fields
self[language].verbose_name = lazy(lambda language: self.verbose_name + ' (' + language + ')', six.text_type)(language)
super(LocalizedField, self).contribute_to_class(cls, field_name)
def get_proxy(self, model):
return LocalizedField.Proxy(self, model)
class Proxy(CompositeField.Proxy):
def __bool__(self):
return bool(six.text_type(self))
def __unicode__(self):
from django.utils.translation import get_language
language = get_language()
translation = None
# 1. complete language code
translation = getattr(self, language, None)
if translation is not None:
return translation
# 2. base of language code
if '-' in language:
base_lang = language.split('-')[0]
translation = getattr(self, base_lang, None)
if translation is not None:
return translation
# 3. first available translation
for language in settings.LANGUAGES:
getattr(self, base_lang, None)
if translation is not None:
return translation
return None
class LocalizedCharField(LocalizedField):
def __init__(self, *args, **kwargs):
super(LocalizedCharField, self).__init__(CharField, *args, **kwargs)
class LocalizedTextField(LocalizedField):
def __init__(self, *args, **kwargs):
super(LocalizedTextField, self).__init__(TextField, *args, **kwargs)
|
Python
| 0
|
5caf134eedc4ace933da8c2f21aacc5f5b1224ef
|
bump version
|
confetti/__version__.py
|
confetti/__version__.py
|
__version__ = "2.2.1"
|
__version__ = "2.2.0"
|
Python
| 0
|
b48a29e1f940f6b9c0305dbcc15e98ea37057232
|
Update moscow.py
|
russian_metro/parser/providers/moscow.py
|
russian_metro/parser/providers/moscow.py
|
# -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
from russian_metro.parser.base import BaseDataProvider
class DataProvider(BaseDataProvider):
metro_lines_src = u"http://ru.wikipedia.org/wiki/Модуль:MoscowMetro#ColorByNum"
metro_stations_src = u"http://ru.wikipedia.org/w/index.php?title=\
Список_станций_Московского_метрополитена"
def download_lines(self):
html = BeautifulSoup(requests.get(self.metro_lines_src).content)
table = html.find('table')
for i, row in enumerate(table.find_all('tr')):
if i == 0:
continue
number = 0
for j, cell in enumerate(row.find_all('td')):
value = cell.string
if j == 0:
if value and value.isdigit():
number = int(value)
elif j == 1:
title = value
elif j == 2:
color = value
if number > 0:
self.line_model.objects.get_or_create(
number=number,
defaults=dict(
title=title, color='#' + color
)
)
def download_stations(self):
html = BeautifulSoup(requests.get(self.metro_stations_src).content)
table = html.find('table', 'wikitable')
lines = self.line_model.get_all()
for i, row in enumerate(table.find_all('tr')):
if i == 0:
continue
for j, cell in enumerate(row.find_all('td')):
if j == 0:
line = 0
value = cell.find('span', 'sortkey').string
if value and value.isdigit():
line = int(value)
elif j == 1:
title = cell.find('span').string
try:
line_inst = lines[line]
except KeyError:
continue
self.station_model\
.objects\
.get_or_create(line=line_inst, title=title)
|
# -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
from russian_metro.parser.base import BaseDataProvider
class DataProvider(BaseDataProvider):
metro_lines_src = u"http://ru.wikipedia.org/wiki/Модуль:MoscowMetro#ColorByNum"
metro_stations_src = u"http://ru.wikipedia.org/w/index.php?title=\
Список_станций_Московского_метрополитена"
def download_lines(self):
html = BeautifulSoup(requests.get(self.metro_lines_src).content)
table = html.find('table')
for i, row in enumerate(table.find_all('tr')):
if i == 0:
continue
number = 0
for j, cell in enumerate(row.find_all('td')):
value = cell.string
if j == 0:
if value and value.isdigit():
number = int(value)
elif j == 1:
title = value
elif j == 2:
color = value
if number > 0:
self.line_model.objects.get_or_create(
number=number,
defaults=dict(
title=title, color='#' + color
)
)
def download_stations(self):
html = BeautifulSoup(requests.get(self.metro_stations_src).content)
table = html.find('table', 'wikitable')
lines = self.line_model.get_all()
for i, row in enumerate(table.find_all('tr')):
if i == 0:
continue
for j, cell in enumerate(row.find_all('td')):
if j == 0:
line = 0
value = cell.find('span', 'sortkey').string
if value and value.isdigit():
line = int(value)
elif j == 1:
title = cell.find('span').string
try:
line_inst = lines[line]
except KeyError:
logger.warning(
u'MetroLine with number %d does not exist' % line
)
continue
self.station_model\
.objects\
.get_or_create(line=line_inst, title=title)
|
Python
| 0
|
3dae8f25cda4827397ab3812ea552ed27d37e757
|
Remove contraints on dotted names
|
base_vat_optional_vies/models/res_partner.py
|
base_vat_optional_vies/models/res_partner.py
|
# Copyright 2015 Tecnativa - Antonio Espinosa
# Copyright 2017 Tecnativa - David Vidal
# Copyright 2019 FactorLibre - Rodrigo Bonilla
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
vies_passed = fields.Boolean(
string="VIES validation", readonly=True)
@api.model
def simple_vat_check(self, country_code, vat_number):
res = super(ResPartner, self).simple_vat_check(
country_code, vat_number,
)
partner = self.env.context.get('vat_partner')
if partner and self.vies_passed:
# Can not be sure that this VAT is signed up in VIES
partner.update({'vies_passed': False})
return res
@api.model
def vies_vat_check(self, country_code, vat_number):
partner = self.env.context.get('vat_partner')
if partner:
# If there's an exception checking VIES, the upstream method will
# call simple_vat_check and thus the flag will be removed
partner.update({'vies_passed': True})
res = super(ResPartner, self).vies_vat_check(country_code, vat_number)
if not res:
return self.simple_vat_check(country_code, vat_number)
return res
@api.constrains('vat')
def check_vat(self):
for partner in self:
partner = partner.with_context(vat_partner=partner)
super(ResPartner, partner).check_vat()
|
# Copyright 2015 Tecnativa - Antonio Espinosa
# Copyright 2017 Tecnativa - David Vidal
# Copyright 2019 FactorLibre - Rodrigo Bonilla
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
vies_passed = fields.Boolean(
string="VIES validation", readonly=True)
@api.model
def simple_vat_check(self, country_code, vat_number):
res = super(ResPartner, self).simple_vat_check(
country_code, vat_number,
)
partner = self.env.context.get('vat_partner')
if partner and self.vies_passed:
# Can not be sure that this VAT is signed up in VIES
partner.update({'vies_passed': False})
return res
@api.model
def vies_vat_check(self, country_code, vat_number):
partner = self.env.context.get('vat_partner')
if partner:
# If there's an exception checking VIES, the upstream method will
# call simple_vat_check and thus the flag will be removed
partner.update({'vies_passed': True})
res = super(ResPartner, self).vies_vat_check(country_code, vat_number)
if not res:
return self.simple_vat_check(country_code, vat_number)
return res
@api.constrains('vat', 'commercial_partner.country_id')
def check_vat(self):
for partner in self:
partner = partner.with_context(vat_partner=partner)
super(ResPartner, partner).check_vat()
|
Python
| 0.000001
|
6ed3f5d97fe8f8967df5624f62e69ce2a58a9413
|
Add color to pytest tests on CI (#20723)
|
scripts/ci/images/ci_run_docker_tests.py
|
scripts/ci/images/ci_run_docker_tests.py
|
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import shlex
import subprocess
import sys
from pathlib import Path
from typing import List
AIRFLOW_SOURCE = Path(__file__).resolve().parent.parent.parent
BUILD_CACHE_DIR = AIRFLOW_SOURCE / ".build"
CBLUE = '\033[94m'
CEND = '\033[0m'
def get_parser():
parser = argparse.ArgumentParser(
prog="ci_run_docker_tests",
description="Running Docker tests using pytest",
epilog="Unknown arguments are passed unchanged to Pytest.",
)
parser.add_argument(
"--interactive",
"-i",
action='store_true',
help="Activates virtual environment ready to run tests and drops you in",
)
parser.add_argument("--initialize", action="store_true", help="Initialize virtual environment and exit")
parser.add_argument("pytestopts", nargs=argparse.REMAINDER, help="Tests to run")
return parser
def run_verbose(cmd: List[str], *, check=True, **kwargs):
print(f"{CBLUE}$ {' '.join(shlex.quote(c) for c in cmd)}{CEND}")
subprocess.run(cmd, check=check, **kwargs)
def create_virtualenv():
virtualenv_path = (
BUILD_CACHE_DIR / ".docker_venv" / f"host_python_{sys.version_info[0]}.{sys.version_info[1]}"
)
virtualenv_path.parent.mkdir(parents=True, exist_ok=True)
if not virtualenv_path.exists():
print("Creating virtualenv environment")
run_verbose([sys.executable, "-m", "venv", str(virtualenv_path)])
python_bin = virtualenv_path / "bin" / "python"
run_verbose([str(python_bin), "-m", "pip", "install", "pytest", "pytest-xdist", "requests"])
return python_bin
def main():
parser = get_parser()
args = parser.parse_args()
python_bin = create_virtualenv()
if args.initialize:
return
if args.interactive:
activate_bin = python_bin.parent / "activate"
bash_trampoline = f"source {shlex.quote(str(activate_bin))}"
print("To enter virtual environment, run:")
print(f" {bash_trampoline}")
return
extra_pytest_args = (
args.pytestopts[1:] if args.pytestopts and args.pytestopts[0] == "--" else args.pytestopts
)
if not extra_pytest_args:
raise SystemExit("You must select the tests to run.")
pytest_args = ("-n", "auto", "--color=yes")
run_verbose([str(python_bin), "-m", "pytest", *pytest_args, *extra_pytest_args])
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import shlex
import subprocess
import sys
from pathlib import Path
from typing import List
AIRFLOW_SOURCE = Path(__file__).resolve().parent.parent.parent
BUILD_CACHE_DIR = AIRFLOW_SOURCE / ".build"
CBLUE = '\033[94m'
CEND = '\033[0m'
def get_parser():
parser = argparse.ArgumentParser(
prog="ci_run_docker_tests",
description="Running Docker tests using pytest",
epilog="Unknown arguments are passed unchanged to Pytest.",
)
parser.add_argument(
"--interactive",
"-i",
action='store_true',
help="Activates virtual environment ready to run tests and drops you in",
)
parser.add_argument("--initialize", action="store_true", help="Initialize virtual environment and exit")
parser.add_argument("pytestopts", nargs=argparse.REMAINDER, help="Tests to run")
return parser
def run_verbose(cmd: List[str], *, check=True, **kwargs):
print(f"{CBLUE}$ {' '.join(shlex.quote(c) for c in cmd)}{CEND}")
subprocess.run(cmd, check=check, **kwargs)
def create_virtualenv():
virtualenv_path = (
BUILD_CACHE_DIR / ".docker_venv" / f"host_python_{sys.version_info[0]}.{sys.version_info[1]}"
)
virtualenv_path.parent.mkdir(parents=True, exist_ok=True)
if not virtualenv_path.exists():
print("Creating virtualenv environment")
run_verbose([sys.executable, "-m", "venv", str(virtualenv_path)])
python_bin = virtualenv_path / "bin" / "python"
run_verbose([str(python_bin), "-m", "pip", "install", "pytest", "pytest-xdist", "requests"])
return python_bin
def main():
parser = get_parser()
args = parser.parse_args()
python_bin = create_virtualenv()
if args.initialize:
return
if args.interactive:
activate_bin = python_bin.parent / "activate"
bash_trampoline = f"source {shlex.quote(str(activate_bin))}"
print("To enter virtual environment, run:")
print(f" {bash_trampoline}")
return
extra_pytest_args = (
args.pytestopts[1:] if args.pytestopts and args.pytestopts[0] == "--" else args.pytestopts
)
if not extra_pytest_args:
raise SystemExit("You must select the tests to run.")
pytest_args = (
"-n",
"auto",
)
run_verbose([str(python_bin), "-m", "pytest", *pytest_args, *extra_pytest_args])
if __name__ == "__main__":
main()
|
Python
| 0
|
b5b31136ff716b423d78d307e107df4b8d8cfedc
|
Add images field on article model abstract, is many to many
|
opps/core/models/article.py
|
opps/core/models/article.py
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from opps.core.models.published import Published
from opps.core.models.date import Date
from opps.core.models.channel import Channel
from opps.core.models.image import Image
from tagging.models import Tag
from tagging.fields import TagField
class Article(Published, Date):
title = models.CharField(_(u"Title"), max_length=140)
slug = models.SlugField(_(u"URL"), max_length=150, unique=True,
db_index=True)
short_title = models.CharField(_(u"Short title"), max_length=140,
blank=False, null=True)
headline = models.TextField(_(u"Headline"), blank=True)
channel = models.ForeignKey(Channel, verbose_name=_(u"Channel"))
content = models.TextField(_(u"Content"))
images = models.ManyToManyField(Image, through='ArticleImage',
related_name='article_images')
tags = TagField(null=True, verbose_name=_(u"Tags"))
class Meta:
abstract = True
def __unicode__(self):
return "{0}/{1}".format(self.site.name, self.slug)
class Post(Article):
credit = models.CharField(_("Credit"), blank=True, max_length=255)
class Meta:
app_label = 'core'
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from opps.core.models.published import Published
from opps.core.models.date import Date
from opps.core.models.channel import Channel
from tagging.models import Tag
from tagging.fields import TagField
class Article(Published, Date):
title = models.CharField(_(u"Title"), max_length=140)
slug = models.SlugField(_(u"URL"), max_length=150, unique=True,
db_index=True)
short_title = models.CharField(_(u"Short title"), max_length=140,
blank=False, null=True)
headline = models.TextField(_(u"Headline"), blank=True)
channel = models.ForeignKey(Channel, verbose_name=_(u"Channel"))
content = models.TextField(_(u"Content"))
tags = TagField(null=True, verbose_name=_(u"Tags"))
class Meta:
abstract = True
def __unicode__(self):
return "{0}/{1}".format(self.site.name, self.slug)
class Post(Article):
credit = models.CharField(_("Credit"), blank=True, max_length=255)
class Meta:
app_label = 'core'
|
Python
| 0.000059
|
5cf84d646796bf5d2f96c67b12a21dc557532c4f
|
move recv_threads checking loop to run()
|
orchard/cli/socketclient.py
|
orchard/cli/socketclient.py
|
# Adapted from https://github.com/benthor/remotty/blob/master/socketclient.py
from select import select
import sys
import tty
import fcntl
import os
import termios
import threading
import time
import errno
import logging
log = logging.getLogger(__name__)
class SocketClient:
def __init__(self,
socket_in=None,
socket_out=None,
socket_err=None,
raw=True,
):
self.socket_in = socket_in
self.socket_out = socket_out
self.socket_err = socket_err
self.raw = raw
self.stdin_fileno = sys.stdin.fileno()
self.recv_threads = []
def __enter__(self):
self.create()
return self
def __exit__(self, type, value, trace):
self.destroy()
def create(self):
if os.isatty(sys.stdin.fileno()):
self.settings = termios.tcgetattr(sys.stdin.fileno())
else:
self.settings = None
if self.socket_in is not None:
self.set_blocking(sys.stdin, False)
self.set_blocking(sys.stdout, True)
self.set_blocking(sys.stderr, True)
if self.raw:
tty.setraw(sys.stdin.fileno())
def set_blocking(self, file, blocking):
fd = file.fileno()
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
flags = (flags & ~os.O_NONBLOCK) if blocking else (flags | os.O_NONBLOCK)
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def run(self):
if self.socket_in is not None:
self.start_send_thread(self.socket_in, sys.stdin)
if self.socket_out is not None:
self.start_recv_thread(self.socket_out, sys.stdout)
if self.socket_err is not None:
self.start_recv_thread(self.socket_err, sys.stderr)
while any(t.is_alive() for t in self.recv_threads):
time.sleep(1)
def start_send_thread(self, *args):
thread = threading.Thread(target=self.send_ws, args=args)
thread.daemon = True
thread.start()
def start_recv_thread(self, *args):
thread = threading.Thread(target=self.recv_ws, args=args)
thread.daemon = True
thread.start()
self.recv_threads.append(thread)
def recv_ws(self, socket, stream):
try:
while True:
chunk = socket.recv()
if chunk:
stream.write(chunk)
stream.flush()
else:
break
except Exception, e:
log.debug(e)
def send_ws(self, socket, stream):
while True:
r, w, e = select([stream.fileno()], [], [])
if r:
chunk = stream.read(1)
if chunk == '':
socket.send_close()
break
else:
try:
socket.send(chunk)
except Exception, e:
if hasattr(e, 'errno') and e.errno == errno.EPIPE:
break
else:
raise e
def destroy(self):
if self.settings is not None:
termios.tcsetattr(self.stdin_fileno, termios.TCSADRAIN, self.settings)
sys.stdout.flush()
if __name__ == '__main__':
import websocket
if len(sys.argv) != 2:
sys.stderr.write("Usage: python socketclient.py WEBSOCKET_URL\n")
exit(1)
url = sys.argv[1]
socket = websocket.create_connection(url)
print "connected\r"
with SocketClient(socket, interactive=True) as client:
client.run()
|
# Adapted from https://github.com/benthor/remotty/blob/master/socketclient.py
from select import select
import sys
import tty
import fcntl
import os
import termios
import threading
import time
import errno
import logging
log = logging.getLogger(__name__)
class SocketClient:
def __init__(self,
socket_in=None,
socket_out=None,
socket_err=None,
raw=True,
):
self.socket_in = socket_in
self.socket_out = socket_out
self.socket_err = socket_err
self.raw = raw
self.stdin_fileno = sys.stdin.fileno()
self.recv_threads = []
def __enter__(self):
self.create()
return self
def __exit__(self, type, value, trace):
self.destroy()
def create(self):
if os.isatty(sys.stdin.fileno()):
self.settings = termios.tcgetattr(sys.stdin.fileno())
else:
self.settings = None
if self.socket_in is not None:
self.set_blocking(sys.stdin, False)
self.set_blocking(sys.stdout, True)
self.set_blocking(sys.stderr, True)
if self.raw:
tty.setraw(sys.stdin.fileno())
def set_blocking(self, file, blocking):
fd = file.fileno()
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
flags = (flags & ~os.O_NONBLOCK) if blocking else (flags | os.O_NONBLOCK)
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def run(self):
if self.socket_in is not None:
self.start_send_thread(self.socket_in, sys.stdin)
if self.socket_out is not None:
self.start_recv_thread(self.socket_out, sys.stdout)
if self.socket_err is not None:
self.start_recv_thread(self.socket_err, sys.stderr)
self.alive_check()
def start_send_thread(self, *args):
thread = threading.Thread(target=self.send_ws, args=args)
thread.daemon = True
thread.start()
def start_recv_thread(self, *args):
thread = threading.Thread(target=self.recv_ws, args=args)
thread.daemon = True
thread.start()
self.recv_threads.append(thread)
def recv_ws(self, socket, stream):
try:
while True:
chunk = socket.recv()
if chunk:
stream.write(chunk)
stream.flush()
else:
break
except Exception, e:
log.debug(e)
def send_ws(self, socket, stream):
while True:
r, w, e = select([stream.fileno()], [], [])
if r:
chunk = stream.read(1)
if chunk == '':
socket.send_close()
break
else:
try:
socket.send(chunk)
except Exception, e:
if hasattr(e, 'errno') and e.errno == errno.EPIPE:
break
else:
raise e
def alive_check(self):
while True:
time.sleep(1)
if not any(t.is_alive() for t in self.recv_threads):
break
def destroy(self):
if self.settings is not None:
termios.tcsetattr(self.stdin_fileno, termios.TCSADRAIN, self.settings)
sys.stdout.flush()
if __name__ == '__main__':
import websocket
if len(sys.argv) != 2:
sys.stderr.write("Usage: python socketclient.py WEBSOCKET_URL\n")
exit(1)
url = sys.argv[1]
socket = websocket.create_connection(url)
print "connected\r"
with SocketClient(socket, interactive=True) as client:
client.run()
|
Python
| 0
|
6aa92f13673ec49a67b5f9e2970c7751a852c19b
|
Fix typos in test_handler.py (#1953)
|
opentelemetry-sdk/tests/logs/test_handler.py
|
opentelemetry-sdk/tests/logs/test_handler.py
|
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
from unittest.mock import Mock
from opentelemetry.sdk import trace
from opentelemetry.sdk.logs import LogEmitter, OTLPHandler
from opentelemetry.sdk.logs.severity import SeverityNumber
from opentelemetry.trace import INVALID_SPAN_CONTEXT
def get_logger(level=logging.NOTSET, log_emitter=None):
logger = logging.getLogger(__name__)
handler = OTLPHandler(level=level, log_emitter=log_emitter)
logger.addHandler(handler)
return logger
class TestOTLPHandler(unittest.TestCase):
def test_handler_default_log_level(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(log_emitter=emitter_mock)
# Make sure debug messages are ignored by default
logger.debug("Debug message")
self.assertEqual(emitter_mock.emit.call_count, 0)
# Assert emit gets called for warning message
logger.warning("Warning message")
self.assertEqual(emitter_mock.emit.call_count, 1)
def test_handler_custom_log_level(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(level=logging.ERROR, log_emitter=emitter_mock)
logger.warning("Warning message test custom log level")
# Make sure any log with level < ERROR is ignored
self.assertEqual(emitter_mock.emit.call_count, 0)
logger.error("Mumbai, we have a major problem")
logger.critical("No Time For Caution")
self.assertEqual(emitter_mock.emit.call_count, 2)
def test_log_record_no_span_context(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(log_emitter=emitter_mock)
# Assert emit gets called for warning message
logger.warning("Warning message")
args, _ = emitter_mock.emit.call_args_list[0]
log_record = args[0]
self.assertIsNotNone(log_record)
self.assertEqual(log_record.trace_id, INVALID_SPAN_CONTEXT.trace_id)
self.assertEqual(log_record.span_id, INVALID_SPAN_CONTEXT.span_id)
self.assertEqual(
log_record.trace_flags, INVALID_SPAN_CONTEXT.trace_flags
)
def test_log_record_trace_correlation(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(log_emitter=emitter_mock)
tracer = trace.TracerProvider().get_tracer(__name__)
with tracer.start_as_current_span("test") as span:
logger.critical("Critical message within span")
args, _ = emitter_mock.emit.call_args_list[0]
log_record = args[0]
self.assertEqual(log_record.body, "Critical message within span")
self.assertEqual(log_record.severity_text, "CRITICAL")
self.assertEqual(log_record.severity_number, SeverityNumber.FATAL)
span_context = span.get_span_context()
self.assertEqual(log_record.trace_id, span_context.trace_id)
self.assertEqual(log_record.span_id, span_context.span_id)
self.assertEqual(log_record.trace_flags, span_context.trace_flags)
|
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
from unittest.mock import Mock
from opentelemetry.sdk import trace
from opentelemetry.sdk.logs import LogEmitter, OTLPHandler
from opentelemetry.sdk.logs.severity import SeverityNumber
from opentelemetry.trace import INVALID_SPAN_CONTEXT
def get_logger(level=logging.NOTSET, log_emitter=None):
logger = logging.getLogger(__name__)
handler = OTLPHandler(level=level, log_emitter=log_emitter)
logger.addHandler(handler)
return logger
class TestOTLPHandler(unittest.TestCase):
def test_handler_default_log_level(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(log_emitter=emitter_mock)
# Make sure debug messages are ignored by default
logger.debug("Debug message")
self.assertEqual(emitter_mock.emit.call_count, 0)
# Assert emit gets called for warning message
logger.warning("Wanrning message")
self.assertEqual(emitter_mock.emit.call_count, 1)
def test_handler_custom_log_level(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(level=logging.ERROR, log_emitter=emitter_mock)
logger.warning("Warning message test custom log level")
# Make sure any log with level < ERROR is ignored
self.assertEqual(emitter_mock.emit.call_count, 0)
logger.error("Mumbai, we have a major problem")
logger.critical("No Time For Caution")
self.assertEqual(emitter_mock.emit.call_count, 2)
def test_log_record_no_span_context(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(log_emitter=emitter_mock)
# Assert emit gets called for warning message
logger.warning("Wanrning message")
args, _ = emitter_mock.emit.call_args_list[0]
log_record = args[0]
self.assertIsNotNone(log_record)
self.assertEqual(log_record.trace_id, INVALID_SPAN_CONTEXT.trace_id)
self.assertEqual(log_record.span_id, INVALID_SPAN_CONTEXT.span_id)
self.assertEqual(
log_record.trace_flags, INVALID_SPAN_CONTEXT.trace_flags
)
def test_log_record_trace_correlation(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(log_emitter=emitter_mock)
tracer = trace.TracerProvider().get_tracer(__name__)
with tracer.start_as_current_span("test") as span:
logger.critical("Critical message within span")
args, _ = emitter_mock.emit.call_args_list[0]
log_record = args[0]
self.assertEqual(log_record.body, "Critical message within span")
self.assertEqual(log_record.severity_text, "CRITICAL")
self.assertEqual(log_record.severity_number, SeverityNumber.FATAL)
span_context = span.get_span_context()
self.assertEqual(log_record.trace_id, span_context.trace_id)
self.assertEqual(log_record.span_id, span_context.span_id)
self.assertEqual(log_record.trace_flags, span_context.trace_flags)
|
Python
| 0.0006
|
a25141dca6ce6f8ead88c43fa7f5726afb2a9dba
|
Fix currency dialog to match model changes
|
cbpos/mod/currency/views/dialogs/currency.py
|
cbpos/mod/currency/views/dialogs/currency.py
|
from PySide import QtGui
import cbpos
logger = cbpos.get_logger(__name__)
from cbpos.mod.currency.models import Currency
from cbpos.mod.currency.views import CurrenciesPage
class CurrencyDialog(QtGui.QWidget):
def __init__(self):
super(CurrencyDialog, self).__init__()
message = cbpos.tr.currency._("Set up the currencies you will be using. You will be able to change them later also.")
self.message = QtGui.QLabel(message)
self.form = CurrenciesPage()
buttonBox = QtGui.QDialogButtonBox()
self.doneBtn = buttonBox.addButton(QtGui.QDialogButtonBox.Close)
self.doneBtn.pressed.connect(self.onDoneButton)
layout = QtGui.QVBoxLayout()
layout.setSpacing(10)
layout.addWidget(self.message)
layout.addWidget(self.form)
layout.addWidget(buttonBox)
self.setLayout(layout)
def onDoneButton(self):
session = cbpos.database.session()
currency = session.query(Currency).first()
if currency is None:
QtGui.QMessageBox.warning(self, cbpos.tr.currency._("No currency"),
cbpos.tr.currency._("You have to sest up at least one currency"),
QtGui.QMessageBox.Ok)
return
cbpos.config["mod.currency", "default"] = unicode(currency.id)
self.close()
cbpos.ui.show_default()
|
from PySide import QtGui
from cbpos.mod.currency.models.currency import Currency
import cbpos
class CurrencyDialog(QtGui.QWidget):
def __init__(self):
super(CurrencyDialog, self).__init__()
self.name = QtGui.QLineEdit()
self.symbol = QtGui.QLineEdit()
self.value = QtGui.QSpinBox()
self.value.setMinimum(0)
self.value.setSingleStep(1)
self.decimalPlaces = QtGui.QSpinBox()
self.decimalPlaces.setRange(0, 10)
self.decimalPlaces.setSingleStep(1)
self.digitGrouping = QtGui.QCheckBox()
buttonBox = QtGui.QDialogButtonBox()
self.okBtn = buttonBox.addButton(QtGui.QDialogButtonBox.Ok)
self.okBtn.pressed.connect(self.onOkButton)
self.cancelBtn = buttonBox.addButton(QtGui.QDialogButtonBox.Cancel)
self.cancelBtn.pressed.connect(self.onCancelButton)
rows = [["Name", self.name],
["Symbol", self.symbol],
["Value", self.value],
["Decimal Places", self.decimalPlaces],
["Digit Grouping", self.digitGrouping],
[buttonBox]]
form = QtGui.QFormLayout()
form.setSpacing(10)
[form.addRow(*row) for row in rows]
self.setLayout(form)
def onOkButton(self):
currency = Currency(name=self.name.text(),
symbol=self.symbol.text(),
value=self.value.text(),
decimal_places=self.decimalPlaces.value(),
digit_grouping=self.digitGrouping.isChecked()
)
session = cbpos.database.session()
session.add(currency)
session.commit()
cbpos.config["mod.currency", "default"] = unicode(currency.id)
self.close()
cbpos.ui.show_default()
def onCancelButton(self):
self.close()
|
Python
| 0.000001
|
0dacb5382e3099d0b9faa65e207c3be407747eeb
|
Use .array
|
chainerrl/optimizers/nonbias_weight_decay.py
|
chainerrl/optimizers/nonbias_weight_decay.py
|
# This caused an error in py2 because cupy expect non-unicode str
# from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
from chainer import cuda
class NonbiasWeightDecay(object):
"""Weight decay only for non-bias parameters.
This hook can be used just like chainer.optimizer_hooks.WeightDecay except
that this hook does not apply weight decay to bias parameters.
This hook assumes that all the bias parameters have the name of "b". Any
parameter whose name is "b" is considered as a bias and excluded from
weight decay.
"""
name = 'NonbiasWeightDecay'
call_for_each_param = True
timing = 'pre'
def __init__(self, rate):
self.rate = rate
def __call__(self, rule, param):
if param.name == 'b':
return
p, g = param.array, param.grad
if p is None or g is None:
return
with cuda.get_device_from_array(p) as dev:
if int(dev) == -1:
g += self.rate * p
else:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
kernel(p, self.rate, g)
|
# This caused an error in py2 because cupy expect non-unicode str
# from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
from chainer import cuda
class NonbiasWeightDecay(object):
"""Weight decay only for non-bias parameters.
This hook can be used just like chainer.optimizer_hooks.WeightDecay except
that this hook does not apply weight decay to bias parameters.
This hook assumes that all the bias parameters have the name of "b". Any
parameter whose name is "b" is considered as a bias and excluded from
weight decay.
"""
name = 'NonbiasWeightDecay'
call_for_each_param = True
timing = 'pre'
def __init__(self, rate):
self.rate = rate
def __call__(self, rule, param):
if param.name == 'b':
return
p, g = param.data, param.grad
if p is None or g is None:
return
with cuda.get_device_from_array(p) as dev:
if int(dev) == -1:
g += self.rate * p
else:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
kernel(p, self.rate, g)
|
Python
| 0
|
1006ac44b8ef9654976c1b57ccf20387877db1cb
|
Update results/title/forms100.py
|
results/title/forms100.py
|
results/title/forms100.py
|
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
import os.path
from laboratory.settings import FONTS_FOLDER
from directions.models import Issledovaniya
from reportlab.platypus import Paragraph, Table, TableStyle, Spacer
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import mm
from reportlab.lib.enums import TA_CENTER
def form_01(iss: Issledovaniya):
pdfmetrics.registerFont(TTFont('PTAstraSerifBold', os.path.join(FONTS_FOLDER, 'PTAstraSerif-Bold.ttf')))
styleSheet = getSampleStyleSheet()
style = styleSheet["Normal"]
style.fontName = "PTAstraSerifBold"
style.fontSize = 12
style.leading = 8
style.spaceAfter = 0 * mm
style.alignment = TA_CENTER
hospital = iss.doc_confirmation.hospital
hospital_short_title = hospital.safe_short_title
hospital_address = hospital.safe_address
hospital_ogrn = hospital.safe_ogrn
data = [
[Paragraph("Министерство здравоохранения Российской Федерации", style)],
[Paragraph(hospital_short_title, style)],
[Paragraph(hospital_address, style)],
[Paragraph(f"Код ОГРН {hospital_ogrn}", style)],
[Spacer(1, 1 * mm)],
[Paragraph("<u>ВЫПИСКА ИЗ АМБУЛАТОРНОЙ КАРТЫ</u>", style)],
]
t = Table(data, colWidths=180 * mm)
t.setStyle(
TableStyle(
[
('ALIGN', (0, 0), (-1, -1), 'CENTER'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]
)
)
return t
|
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
import os.path
from laboratory.settings import FONTS_FOLDER
from directions.models import Issledovaniya
from reportlab.platypus import Paragraph, Table, TableStyle, Spacer
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import mm
from reportlab.lib.enums import TA_CENTER
def form_01(iss: Issledovaniya):
pdfmetrics.registerFont(TTFont('PTAstraSerifBold', os.path.join(FONTS_FOLDER, 'PTAstraSerif-Bold.ttf')))
styleSheet = getSampleStyleSheet()
style = styleSheet["Normal"]
style.fontName = "PTAstraSerifBold"
style.fontSize = 12
style.leading = 8
style.spaceAfter = 0 * mm
style.alignment = TA_CENTER
hospital = iss.doc_confirmation.hospital
hospital_short_title = hospital.safe_short_title
hospital_address = hospital.safe_address
hospital_ogrn = hospital.safe_ogrn
data = [
[Paragraph("Министерство здравоохранения Российской Федерации", style)],
[Paragraph(hospital_short_title, style)],
[Paragraph(hospital_address, style)],
[Paragraph(f"Код ОГРН {hospital_ogrn}", style)],
[Spacer(1, 1 * mm)],
[Paragraph("<u>ВЫПИСКА ИЗ АМБУЛАТОРНОЙ КАРТЫ</u>", style)],
]
t = Table(data, colWidths= 180 * mm)
t.setStyle(
TableStyle(
[
('ALIGN', (0, 0), (-1, -1), 'CENTER'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]
)
)
return t
|
Python
| 0
|
4ea9a0bc8b7ef47dd98c155b3f35440fe88b564a
|
Fix output order, add uri attribute
|
resync/capability_list.py
|
resync/capability_list.py
|
"""ResourceSync Capability List object
An Capability List is a set of capabilitys with some metadata for
each capability. The Capability List object may also contain metadata
and links like other lists.
"""
import collections
from resource import Resource
from resource_set import ResourceSet
from list_base import ListBase
from sitemap import Sitemap
class CapabilitySet(ResourceSet):
"""Class for storage of resources in a Capability List
Extends the ResourceSet to add checks to ensure that there are
never two entries for the same resource, and that values are
returned in the canonical order.
"""
def __init__(self):
self.order = [ 'resourcelist', 'resourcedump',
'changelist', 'changedump',
'resourcelist-archive', 'resourcedump-archive',
'changelist-archive', 'changedump-archive' ]
def __iter__(self):
"""Iterator over all the resources in capability order
Deals with the case of unknown capabilities or duplicate entries
by using uri order for duplicates and adding any unknown ones
at the end
"""
self._iter_next_list = []
# look through all resources and build capability to uri index
uris = {}
for uri in self.keys():
cap = self[uri].capability
if (cap not in uris):
uris[cap]=[]
uris[cap].append(uri)
# build list or uris in defined order for iterator
for cap in self.order:
if (cap in uris):
for uri in sorted(uris[cap]):
self._iter_next_list.append(uri)
del uris[cap]
# add any left over capabilities we don't know about in alphabetical order
for cap in uris:
for uri in sorted(uris[cap]):
self._iter_next_list.append(uri)
self._iter_next_list.reverse()
return(iter(self._iter_next, None))
def _iter_next(self):
if (len(self._iter_next_list)>0):
return(self[self._iter_next_list.pop()])
else:
return(None)
class CapabilityList(ListBase):
"""Class representing a Capability List
An Capability List will admit only one resource with any given
URI. The iterator over resources is expected to return them in
canonical order of capability names as defined in main specification
section 7 and archives specification section 6.
"""
def __init__(self, resources=None, md=None, ln=None, uri=None):
if (resources is None):
resources = CapabilitySet()
super(CapabilityList, self).__init__(resources=resources, md=md, ln=ln, uri=uri)
self.capability_name='capabilitylist'
self.capability_md='capabilitylist'
def add(self, resource, replace=False):
"""Add a resource or an iterable collection of resources
Will throw a ValueError if the resource (ie. same uri) already
exists in the capability_list, unless replace=True.
"""
if isinstance(resource, collections.Iterable):
for r in resource:
self.resources.add(r,replace)
else:
self.resources.add(resource,replace)
def add_capability(self,capability=None,uri=None,name=None):
"""Specific add function for capabilities
Takes either:
- a capability object (derived from ListBase) as the first argument
from which the capability name is extracted, and the URI if given
- or a plain name string
and
- the URI of the capability
"""
if (capability is not None):
name = capability.capability_md
if (capability.uri is not None):
uri=capability.uri
self.add( Resource(uri=uri,capability=name) )
def has_capability(self,name=None):
"""True if the Capability List includes the named capability"""
return( self.capability(name) is not None )
def capability(self,name=None):
"""Return information about the requested capability from this list
Will return None if there is no information about the requested capability
"""
for r in self.resources:
if (r.capability == name):
return(r)
return(None)
|
"""ResourceSync Capability List object
An Capability List is a set of capabilitys with some metadata for
each capability. The Capability List object may also contain metadata
and links like other lists.
"""
import collections
from resource import Resource
from resource_set import ResourceSet
from list_base import ListBase
from sitemap import Sitemap
class CapabilitySet(ResourceSet):
"""Class for storage of resources in a Capability List
Extends the ResourceSet to add checks to ensure that there are
never two entries for the same resource, and that values are
returned in the canonical order.
"""
def __init__(self):
self.order = [ 'resourcelist', 'resourcedump',
'changelist', 'changedump',
'resourcelist-archive', 'resourcedump-archive',
'changelist-archive', 'changedump-archive' ]
def __iter__(self):
"""Iterator over all the resources in capability order
Deals with the case of unknown capabilities or duplicate entries
by using uri order for duplicates and adding any unknown ones
at the end
"""
self._iter_next_list = []
# look through all resources and build capability to uri index
uris = {}
for uri in self.keys():
cap = self[uri].capability
if (cap not in uris):
uris[cap]=[]
uris[cap].append(uri)
# now build list or uris in order for iterator
for cap in uris:
for uri in sorted(uris[cap]):
self._iter_next_list.append(uri)
self._iter_next_list.reverse()
return(iter(self._iter_next, None))
def _iter_next(self):
if (len(self._iter_next_list)>0):
return(self[self._iter_next_list.pop()])
else:
return(None)
class CapabilityList(ListBase):
"""Class representing a Capability List
An Capability List will admit only one resource with any given
URI. The iterator over resources is expected to return them in
canonical order of capability names as defined in main specification
section 9.2 and archives specification section 6.
"""
def __init__(self, resources=None, md=None, ln=None):
if (resources is None):
resources = CapabilitySet()
super(CapabilityList, self).__init__(resources=resources, md=md, ln=ln)
self.capability_name='capabilitylist'
self.capability_md='capabilitylist'
self.md['from']=None #usually don't want a from date
def add(self, resource, replace=False):
"""Add a resource or an iterable collection of resources
Will throw a ValueError if the resource (ie. same uri) already
exists in the capability_list, unless replace=True.
"""
if isinstance(resource, collections.Iterable):
for r in resource:
self.resources.add(r,replace)
else:
self.resources.add(resource,replace)
def add_capability(self,capability=None,uri=None,name=None):
"""Specific add function for capabilities
Takes either:
- a capability object (derived from ListBase) as the first argument
from which the capability name is extracted
- or a plain name string
and
- the URI of the capability
"""
if (capability is not None):
name = capability.capability_md
self.add( Resource(uri=uri,capability=name) )
def has_capability(self,name=None):
"""True if the Capability List includes the named capability"""
return( self.capability(name) is not None )
def capability(self,name=None):
"""Return information about the requested capability from this list
Will return None if there is no information about the requested capability
"""
for r in self.resources:
if (r.capability == name):
return(r)
return(None)
|
Python
| 0.000022
|
f51369999441cb85ed730488e943580d707e8856
|
use relative imports in parser/__init__.py
|
rflint/parser/__init__.py
|
rflint/parser/__init__.py
|
from .parser import (SuiteFolder, ResourceFile, SuiteFile, RobotFactory,
Testcase, Keyword, Row, Statement, TestcaseTable, KeywordTable)
from .tables import DefaultTable, SettingTable, UnknownTable, VariableTable, MetadataTable, RobotTable
|
from parser import ResourceFile, SuiteFile, RobotFileFactory, Testcase, Keyword, Row, Statement
from tables import DefaultTable, SettingTable, UnknownTable, VariableTable, MetadataTable, RobotTable
|
Python
| 0.000015
|
650dae4ce3bd154dba442cf0476651e0e949b092
|
add default
|
controller/management/commands/2xmp.py
|
controller/management/commands/2xmp.py
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from progressbar import ProgressBar, SimpleProgress
import os, sys, subprocess
from django.conf import settings
from emma.core.metadata import Metadata
class Command(BaseCommand):
"""
Migrates keywords from the IPTC framework to the
XMP framework.
Also removes iptc keywords. When -keywords is requested,
by exiftool, only -xmp:keywords will be returned.
"""
exiftool_args_path = os.path.join(settings.APP_ROOT,
'project/script/exiftool/args/iptckw2xmpkw.arg')
option_list = BaseCommand.option_list + (
make_option('-r', '--for-real',
action='store_true',
dest='action',
default=False,
help='Do the action.'),
make_option('-p', '--path',
dest='path',
default=settings.APP_CONTENT_ROOT,
help='Enter path'),
)
def handle(self, *args, **options):
action = options.get('action', False)
path = options.get('path', settings.APP_CONTENT_ROOT)
if not action:
print 'this is a dry run, only the subprocess command will be printed.'
errors = []
for root, dirs, files in os.walk(path):
for f in files:
p = os.path.join(root, f)
arg = self.exiftool_args_path
# enter keyowrds migration script, and remove iptc keywords
cmd = ['exiftool', '-overwrite_original_in_place', '-@', arg, p]
if action:
r = subprocess.call(cmd)
# the call should return 0, if not write to errors list.
if r:
errors.append(p)
print 'error for file %s' % p
else:
print 'successfully migrated keywords for %s' % p
else:
print cmd
if errors:
print errors
return 'process complete, but with errors'
else:
return 'complete'
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from progressbar import ProgressBar, SimpleProgress
import os, sys, subprocess
from django.conf import settings
from emma.core.metadata import Metadata
class Command(BaseCommand):
"""
Migrates keywords from the IPTC framework to the
XMP framework.
Also removes iptc keywords. When -keywords is requested,
by exiftool, only -xmp:keywords will be returned.
"""
exiftool_args_path = os.path.join(settings.APP_ROOT,
'project/script/exiftool/args/iptckw2xmpkw.arg')
option_list = BaseCommand.option_list + (
make_option('-r', '--for-real',
action='store_true',
dest='action',
default=False,
help='Do the action.'),
make_option('-p', '--path',
dest='path',
help='Enter path'),
)
def handle(self, *args, **options):
action = options.get('action', False)
path = options.get('path', settings.APP_CONTENT_ROOT)
if not action:
print 'this is a dry run, only the subprocess command will be printed.'
errors = []
for root, dirs, files in os.walk(path):
for f in files:
p = os.path.join(root, f)
arg = self.exiftool_args_path
# enter keyowrds migration script, and remove iptc keywords
cmd = ['exiftool', '-overwrite_original_in_place', '-@', arg, p]
if action:
r = subprocess.call(cmd)
# the call should return 0, if not write to errors list.
if r:
errors.append(p)
print 'error for file %s' % p
else:
print 'successfully migrated keywords for %s' % p
else:
print cmd
if errors:
print errors
return 'process complete, but with errors'
else:
return 'complete'
|
Python
| 0.000002
|
780f28cd91f92fea0dddee2b62bc659d244a8270
|
Change create sample code to select indexes by eval set
|
create_sample.py
|
create_sample.py
|
# importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create a sample of prior orders
orders_df = pd.read_csv("Data/orders.csv")
s = round(3214874 * 0.1)
i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s))
orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False)
# create a sample of train orders
s = round(131209 * 0.1)
j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s))
orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False)
# create a sample of test orders
s = round(75000 * 0.1)
k = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="test"].index), s))
orders_df.loc[k,:].to_csv("Data/orders_test_sample.csv", index = False)
# create a sample of prior order products
order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id')
order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv", index = False)
# create a sample of train order products
order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id')
order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv", index = False)
|
# importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create a sample of prior orders
orders_df = pd.read_csv("Data/orders.csv")
s = round(3214874 * 0.1)
i = sorted(random.sample(range(1,3214874), s))
orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False)
# create a sample of train orders
s = round(131209 * 0.1)
j = sorted(random.sample(range(1,131209), s))
orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False)
# create a sample of test orders
s = round(75000 * 0.1)
k = sorted(random.sample(range(1,75000), s))
orders_df.loc[k,:].to_csv("Data/orders_test_sample.csv", index = False)
# create a sample of prior order products
order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id')
order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv", index = False)
# create a sample of train order products
order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id')
order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv", index = False)
|
Python
| 0
|
fee30c4017da4d41a9487d961ba543d2d1e20e85
|
Add explicit Note join relationship on NoteContent model. (also remove extraneous comments on old date format)
|
tuhi_flask/models.py
|
tuhi_flask/models.py
|
# Copyright 2015 icasdri
#
# This file is part of tuhi-flask.
#
# tuhi-flask is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tuhi-flask is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with tuhi-flask. If not, see <http://www.gnu.org/licenses/>.
from flask import current_app as app
from sqlalchemy import Column, Integer, String, CHAR, Text, Boolean, DateTime, ForeignKey
from sqlalchemy.orm import relationship
from werkzeug.security import generate_password_hash, check_password_hash
from tuhi_flask.database import Base
class User(Base):
__tablename__ = 'users'
user_id = Column(Integer, primary_key=True)
username = Column(String, unique=True, index=True)
password_hash = Column(String)
def __init__(self, username, password):
self.username = username
self.set_password(password)
def set_password(self, password):
self.password_hash = generate_password_hash(password,
method=app.config['PASSWORD_HASH_METHOD'],
salt_length=app.config['PASSWORD_SALT_LENGTH'])
def check_password(self, password):
return check_password_hash(self.password_hash, password)
class Note(Base):
__tablename__ = 'notes'
note_id = Column(CHAR(36), primary_key=True)
user_id = Column(Integer, ForeignKey('users.user_id'), index=True)
title = Column(String)
deleted = Column(Boolean, default=False)
date_modified = Column(Integer, index=True) # Seconds from epoch
class NoteContent(Base):
__tablename__ = 'note_contents'
note_content_id = Column(CHAR(36), primary_key=True)
note_id = Column(CHAR(36), ForeignKey('notes.note_id'), index=True)
data = Column(Text)
date_created = Column(Integer, index=True) # Seconds from epoch
note = relationship("Note")
|
# Copyright 2015 icasdri
#
# This file is part of tuhi-flask.
#
# tuhi-flask is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tuhi-flask is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with tuhi-flask. If not, see <http://www.gnu.org/licenses/>.
from flask import current_app as app
from sqlalchemy import Column, Integer, String, CHAR, Text, Boolean, DateTime, ForeignKey
from werkzeug.security import generate_password_hash, check_password_hash
from tuhi_flask.database import Base
class User(Base):
__tablename__ = 'users'
user_id = Column(Integer, primary_key=True)
username = Column(String, unique=True, index=True)
password_hash = Column(String)
def __init__(self, username, password):
self.username = username
self.set_password(password)
def set_password(self, password):
self.password_hash = generate_password_hash(password,
method=app.config['PASSWORD_HASH_METHOD'],
salt_length=app.config['PASSWORD_SALT_LENGTH'])
def check_password(self, password):
return check_password_hash(self.password_hash, password)
class Note(Base):
__tablename__ = 'notes'
note_id = Column(CHAR(36), primary_key=True)
user_id = Column(Integer, ForeignKey('users.user_id'), index=True)
title = Column(String)
deleted = Column(Boolean, default=False)
date_modified = Column(Integer, index=True) # Seconds from epoch
# date_modified = Column(DateTime) # May need to use Integer from epoch here
class NoteContent(Base):
__tablename__ = 'note_contents'
note_content_id = Column(CHAR(36), primary_key=True)
note_id = Column(CHAR(36), ForeignKey('notes.note_id'), index=True)
data = Column(Text)
date_created = Column(Integer, index=True) # Seconds from epoch
# date_created = Column(DateTime) # May need to use Integer from epoch here
|
Python
| 0
|
ee76ae4f41be17a0f6a482273e99783df8212004
|
Reconfigure key repeat (should change to be configurable)
|
riker/worker/utils.py
|
riker/worker/utils.py
|
from logging import getLogger
import tempfile
from threading import Thread
import lirc
from django.conf import settings
from systemstate.models import RemoteButton
from systemstate.utils import push_button
LOGGER = getLogger(__name__)
LIRCRC_TEMPLATE = '''
begin
prog = {lirc_name}
button = {key_name}
config = {key_name}
repeat = 2
delay = 3
end
'''
class LircListener(Thread):
def __init__(self, lirc_name):
self.lirc_name = lirc_name
self.lircrc_filename = create_lircrc_tempfile()
super(LircListener, self).__init__()
def run(self):
lirc.init(self.lirc_name, self.lircrc_filename)
listen(self.lirc_name, self.lircrc_filename)
def listen(lirc_name, lircrc_filename, callback=None):
lirc.init(lirc_name, lircrc_filename)
callback = callback or push_button
while True:
for key_code in lirc.nextcode():
LOGGER.warning(key_code)
callback(key_code)
def create_lircrc_tempfile(lirc_name):
buttons = RemoteButton.objects.all().values_list('lirc_code', flat=True)
with tempfile.NamedTemporaryFile(delete=False) as lircrc_file:
lircrc_file.write(generate_lircrc(lirc_name, buttons).encode('ascii'))
return lircrc_file.name
def generate_lircrc(name, buttons):
return '\n'.join(
LIRCRC_TEMPLATE.format(
lirc_name=name,
key_name=button,
) for button in buttons
)
|
from logging import getLogger
import tempfile
from threading import Thread
import lirc
from django.conf import settings
from systemstate.models import RemoteButton
from systemstate.utils import push_button
LOGGER = getLogger(__name__)
LIRCRC_TEMPLATE = '''
begin
prog = {lirc_name}
button = {key_name}
config = {key_name}
end
'''
class LircListener(Thread):
def __init__(self, lirc_name):
self.lirc_name = lirc_name
self.lircrc_filename = create_lircrc_tempfile()
super(LircListener, self).__init__()
def run(self):
lirc.init(self.lirc_name, self.lircrc_filename)
listen(self.lirc_name, self.lircrc_filename)
def listen(lirc_name, lircrc_filename, callback=None):
lirc.init(lirc_name, lircrc_filename)
callback = callback or push_button
while True:
for key_code in lirc.nextcode():
LOGGER.warning(key_code)
callback(key_code)
def create_lircrc_tempfile(lirc_name):
buttons = RemoteButton.objects.all().values_list('lirc_code', flat=True)
with tempfile.NamedTemporaryFile(delete=False) as lircrc_file:
lircrc_file.write(generate_lircrc(lirc_name, buttons).encode('ascii'))
return lircrc_file.name
def generate_lircrc(name, buttons):
return '\n'.join(
LIRCRC_TEMPLATE.format(
lirc_name=name,
key_name=button,
) for button in buttons
)
|
Python
| 0
|
0827911184bf43a6dd50712444d3f9385a64eb31
|
support combining bigrams
|
constraintWriterTool.py
|
constraintWriterTool.py
|
#!/usr/bin/env python
from autosuggest import *
import os, sys
from sys import argv, exit
def printUsage():
print("Usage: constraintWriterTool action [options]\nActions:\n\tsuggest\t\tbigramfile word\n\tsuggestPfx\tbigramfile word prefix\n\tinWhitelist\tbigramfile word\n\tinBlacklist\tbigramfile word\n\tcompile\t\tcorpus bigramfile\n\tcompileMulti\tbigramfile corpus [corpus_2 ... corpus_n]\n\tcombine\t\tbigramfile_out [bigramfile_in ... ]\n")
exit(1)
if len(argv)<4:
printUsage()
world={}
if argv[1] in ["suggest", "suggestPfx", "inWhitelist", "inBlacklist"]:
def inBlacklist(world, word):
return checkWhiteList(world, word, True)
def pfx(world, word):
return bigramSuggestPfx(world, word, argv[4])
funcs={"suggest":bigramSuggest, "inWhitelist":checkWhiteList, "inBlacklist":inBlacklist, "suggestPfx":pfx}
world=loadBigrams(argv[2])
print(funcs[argv[1]](world, argv[3]))
exit(0)
elif argv[1]=="compile":
with open(argv[2], 'r') as f:
saveBigrams(corpus2bigrams(f.read()), argv[3])
elif argv[1]=="compileMulti":
corpora=[]
for fname in argv[3:]:
with open(fname, 'r') as f:
corpora.append(f.read())
saveBigrams(corpus2bigrams("\n".join(corpora)), argv[2])
elif argv[1]=="combine":
bigrams={}
for fname in argv[3:]:
world=loadBigrams(fname)
for w1 in world.keys():
if not (w1 in bigrams):
bigrams[w1]={}
for w2 in world[w1].keys():
if not w2 in bigrams[w1]:
bigrams[w1][w2]=0
bigrams[w1][w2]+=world[w1][w2]
saveBigrams(bigrams, argv[2])
|
#!/usr/bin/env python
from autosuggest import *
import os, sys
from sys import argv, exit
def printUsage():
print("Usage: constraintWriterTool action [options]\nActions:\n\tsuggest\t\tbigramfile word\n\tsuggestPfx\t\tbigramfile word prefix\n\tinWhitelist\tbigramfile word\n\tinBlacklist\tbigramfile word\n\tcompile\t\tcorpus bigramfile\n\tcompileMulti\tbigramfile corpus [corpus_2 ... corpus_n]\n")
exit(1)
if len(argv)<4:
printUsage()
world={}
if argv[1] in ["suggest", "suggestPfx", "inWhitelist", "inBlacklist"]:
def inBlacklist(world, word):
return checkWhiteList(world, word, True)
def pfx(world, word):
return bigramSuggestPfx(world, word, argv[4])
funcs={"suggest":bigramSuggest, "inWhitelist":checkWhiteList, "inBlacklist":inBlacklist, "suggestPfx":pfx}
world=loadBigrams(argv[2])
print(funcs[argv[1]](world, argv[3]))
exit(0)
elif argv[1]=="compile":
with open(argv[2], 'r') as f:
saveBigrams(corpus2bigrams(f.read()), argv[3])
elif argv[1]=="compileMulti":
corpora=[]
for fname in argv[3:]:
with open(fname, 'r') as f:
corpora.append(f.read())
saveBigrams(corpus2bigrams("\n".join(corpora)), argv[2])
|
Python
| 0
|
23939ace63c12391dc07a3419a55ca573ee5dd73
|
Update debug output and remove unnecessary assignment
|
righteous/api/server_template.py
|
righteous/api/server_template.py
|
import re
from urllib import urlencode
from logging import getLogger
import omnijson as json
from .. import config
from .base import _request, debug
log = getLogger(__name__)
def list_server_templates():
"""
Lists ServerTemplates
:return: list of dicts of server information with the following keys:
::
[u'description', u'is_head_version', u'created_at', u'updated_at',
u'href', u'version', u'nickname']
"""
response = _request('/server_templates.js')
return json.loads(response.content)
def _extract_template_id(template_href):
"""
Returns the template id from an href
:param template_href: String representing the server template
href
:return: String of the template_id or None
"""
result = re.match(config.account_url + config.settings.account_id +
'/ec2_server_templates/(\d+)',
template_href)
if result:
return result.groups()[0]
return None
def server_template_info(template_href):
"""
Details ServerTemplate information
:param template_href: String representing the server template
href
:return: dict of server template information, with the following keys:
::
[u'description', u'is_head_version', u'created_at', u'updated_at',
u'href', u'version', u'nickname']
"""
response = _request('/server_templates/%s.js' %
_extract_template_id(template_href))
template = json.loads(response.content)
if template:
return template
else:
return None
def create_server_template(nickname, description, multi_cloud_image_href):
"""
Create a new ServerTemplate
Returns a tuple of operation status, href of the created, started server
:param nickname: String of the template nickname
:param description: String describing the ServerTemplate
:param multi_cloud_image_href: String of the template image href
:return: tuple of operation success and new server template href
"""
location = None
create_data = {
'server_template[nickname]': nickname,
'server_template[description]': description,
'server_template[multi_cloud_image_href]': multi_cloud_image_href,
}
response = _request('/server_templates', method='POST',
body=urlencode(create_data))
success = response.status_code == 201
if success:
location = response.headers.get('location')
debug('Created server template %s: %s (%s:%s)' % (nickname, location,
response.status_code, response.content))
# TODO: error responses
return success, location
def delete_server_template(server_template_href):
"""
Deletes a ServerTemplate
:param server_template_href: String of the ServerTemplate to delete
:return: `requests.Response`
"""
return _request('/server_templates/%s.js' %
_extract_template_id(server_template_href),
method='DELETE').status_code == 200
|
import re
from urllib import urlencode
from logging import getLogger
import omnijson as json
from .. import config
from .base import _request, debug
log = getLogger(__name__)
def list_server_templates():
"""
Lists ServerTemplates
:return: list of dicts of server information with the following keys:
::
[u'description', u'is_head_version', u'created_at', u'updated_at',
u'href', u'version', u'nickname']
"""
response = _request('/server_templates.js')
return json.loads(response.content)
def _extract_template_id(template_href):
"""
Returns the template id from an href
:param template_href: String representing the server template
href
:return: String of the template_id or None
"""
result = re.match(config.account_url + config.settings.account_id +
'/ec2_server_templates/(\d+)',
template_href)
if result:
return result.groups()[0]
return None
def server_template_info(template_href):
"""
Details ServerTemplate information
:param template_href: String representing the server template
href
:return: dict of server template information, with the following keys:
::
[u'description', u'is_head_version', u'created_at', u'updated_at',
u'href', u'version', u'nickname']
"""
response = _request('/server_templates/%s.js' %
_extract_template_id(template_href))
template = json.loads(response.content)
if template:
return template
else:
return None
def create_server_template(nickname, description, multi_cloud_image_href):
"""
Create a new ServerTemplate
Returns a tuple of operation status, href of the created, started server
:param nickname: String of the template nickname
:param description: String describing the ServerTemplate
:param multi_cloud_image_href: String of the template image href
:return: tuple of operation success and new server template href
"""
location = None
success = False
create_data = {
'server_template[nickname]': nickname,
'server_template[description]': description,
'server_template[multi_cloud_image_href]': multi_cloud_image_href,
}
response = _request('/server_templates', method='POST',
body=urlencode(create_data))
success = response.status_code == 201
if success:
location = response.headers.get('location')
debug('Created ServerTemplate %s: %s (%s:%s)' % (nickname, location,
response.status_code, response.content))
# TODO: error responses
return success, location
def delete_server_template(server_template_href):
"""
Deletes a ServerTemplate
:param server_template_href: String of the ServerTemplate to delete
:return: `requests.Response`
"""
return _request('/server_templates/%s.js' %
_extract_template_id(server_template_href),
method='DELETE').status_code == 200
|
Python
| 0
|
1f19fa52e40db1f28d620aa8bf75745e814c0f81
|
Remove unused import
|
cogs/fun.py
|
cogs/fun.py
|
import discord
from discord.ext import commands
from utils.messages import ColoredEmbed
class Fun:
def __init__(self, bot):
self.bot = bot
@commands.command()
async def xkcd(self, ctx):
"""See the latest XKCD comic."""
async with self.bot.session.get('https://xkcd.com/info.0.json') as r:
if r.status == 200:
json = await r.json()
embed = ColoredEmbed(title=json['title'],
description=json['alt'])
embed.set_image(url=json['img'])
await ctx.send(embed=embed)
@commands.command()
async def lenny(self, ctx):
"""( ͡° ͜ʖ ͡°)"""
await ctx.send('( ͡° ͜ʖ ͡°)')
def setup(bot):
bot.add_cog(Fun(bot))
|
import random
import discord
from discord.ext import commands
from utils.messages import ColoredEmbed
class Fun:
def __init__(self, bot):
self.bot = bot
@commands.command()
async def xkcd(self, ctx):
"""See the latest XKCD comic."""
async with self.bot.session.get('https://xkcd.com/info.0.json') as r:
if r.status == 200:
json = await r.json()
embed = ColoredEmbed(title=json['title'],
description=json['alt'])
embed.set_image(url=json['img'])
await ctx.send(embed=embed)
@commands.command()
async def lenny(self, ctx):
"""( ͡° ͜ʖ ͡°)"""
await ctx.send('( ͡° ͜ʖ ͡°)')
def setup(bot):
bot.add_cog(Fun(bot))
|
Python
| 0.000001
|
14eaff694912320296412f2e4ca51072c5dddf49
|
add unit_testing_only decorator
|
corehq/apps/userreports/dbaccessors.py
|
corehq/apps/userreports/dbaccessors.py
|
from corehq.apps.domain.dbaccessors import get_docs_in_domain_by_class
from corehq.apps.domain.models import Domain
from corehq.util.test_utils import unit_testing_only
def get_number_of_report_configs_by_data_source(domain, data_source_id):
"""
Return the number of report configurations that use the given data source.
"""
from corehq.apps.userreports.models import ReportConfiguration
return ReportConfiguration.view(
'userreports/report_configs_by_data_source',
reduce=True,
key=[domain, data_source_id]
).one()['value']
@unit_testing_only
def get_all_report_configs():
all_domains = Domain.get_all()
for domain in all_domains:
for report_config in get_report_configs_for_domain(domain.name):
yield report_config
def get_report_configs_for_domain(domain):
from corehq.apps.userreports.models import ReportConfiguration
return sorted(
get_docs_in_domain_by_class(domain, ReportConfiguration),
key=lambda report: report.title,
)
|
from django.conf import settings
from dimagi.utils.couch.database import iter_docs
from corehq.apps.domain.dbaccessors import get_docs_in_domain_by_class
from corehq.apps.domain.models import Domain
def get_number_of_report_configs_by_data_source(domain, data_source_id):
"""
Return the number of report configurations that use the given data source.
"""
from corehq.apps.userreports.models import ReportConfiguration
return ReportConfiguration.view(
'userreports/report_configs_by_data_source',
reduce=True,
key=[domain, data_source_id]
).one()['value']
def get_all_report_configs():
assert settings.UNIT_TESTING
all_domains = Domain.get_all()
for domain in all_domains:
for report_config in get_report_configs_for_domain(domain.name):
yield report_config
def get_report_configs_for_domain(domain):
from corehq.apps.userreports.models import ReportConfiguration
return sorted(
get_docs_in_domain_by_class(domain, ReportConfiguration),
key=lambda report: report.title,
)
|
Python
| 0
|
e595d823e303a6db0a9c7e24f6a9d1644615009c
|
Bump version of CaptchaService.py
|
module/plugins/internal/CaptchaService.py
|
module/plugins/internal/CaptchaService.py
|
# -*- coding: utf-8 -*-
from module.plugins.internal.Captcha import Captcha
class CaptchaService(Captcha):
__name__ = "CaptchaService"
__type__ = "captcha"
__version__ = "0.35"
__status__ = "stable"
__description__ = """Base anti-captcha service plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
def init(self):
self.key = None #: Last key detected
#@TODO: Recheck in 0.4.10
def retrieve_key(self, data):
if self.detect_key(data):
return self.key
else:
self.fail(_("%s key not found") % self.__name__)
def retrieve_data(self):
return self.pyfile.plugin.data or self.pyfile.plugin.last_html or ""
def detect_key(self, data=None):
raise NotImplementedError
def challenge(self, key=None, data=None):
raise NotImplementedError
def result(self, server, challenge):
raise NotImplementedError
|
# -*- coding: utf-8 -*-
from module.plugins.internal.Captcha import Captcha
class CaptchaService(Captcha):
__name__ = "CaptchaService"
__type__ = "captcha"
__version__ = "0.34"
__status__ = "stable"
__description__ = """Base anti-captcha service plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
def init(self):
self.key = None #: Last key detected
#@TODO: Recheck in 0.4.10
def retrieve_key(self, data):
if self.detect_key(data):
return self.key
else:
self.fail(_("%s key not found") % self.__name__)
def retrieve_data(self):
return self.pyfile.plugin.data or self.pyfile.plugin.last_html or ""
def detect_key(self, data=None):
raise NotImplementedError
def challenge(self, key=None, data=None):
raise NotImplementedError
def result(self, server, challenge):
raise NotImplementedError
|
Python
| 0.003249
|
5efc40cd9be0c212f142d7469a9bf6f44da0827a
|
add story support in client with -s boolean operator
|
instapy_cli/__main__.py
|
instapy_cli/__main__.py
|
import sys
from platform import python_version
from instapy_cli.cli import InstapyCli as client
from optparse import OptionParser
import pkg_resources # part of setuptools
version = pkg_resources.require('instapy_cli')[0].version
def main(args=None):
print('instapy-cli ' + version + ' | python ' + python_version())
parser = OptionParser(usage="usage: %prog [options]")
parser.add_option('-u', dest='username', help='username')
parser.add_option('-p', dest='password', help='password')
parser.add_option('-f', dest='file', help='file path or url')
parser.add_option('-t', dest='caption', help='caption text')
parser.add_option('-s', dest='story', action='store_true', help='publish to story')
# parser.add_option('-h', dest='help', help='help')
(options, args) = parser.parse_args(args)
if args is None or (
not options.username and
not options.password and
not options.file and
not (options.caption or options.story)
):
print('[USE] instapy -u USR -p PSW -f FILE/LINK -t \'TEXT CAPTION\'')
print('\nFor other reference go to >> https://github.com/b3nab/instapy-cli')
return
if not options.username:
parser.error('Username is required')
password = options.password
if not options.password:
import getpass
password = getpass.getpass()
if not options.file:
parser.error('File path or url link is required to create a media to upload')
story = options.story
if not story:
story = False
with client(options.username, password) as cli:
text = options.caption or ''
cli.upload(options.file, text, story)
if __name__ == '__main__':
main()
|
import sys
from platform import python_version
from instapy_cli.cli import InstapyCli as client
from optparse import OptionParser
import pkg_resources # part of setuptools
version = pkg_resources.require('instapy_cli')[0].version
'''
TODO:
- use instapy_cli.media to download image link and use it for upload and configure_photo
- rewrite main to support file and links for media
'''
def main(args=None):
welcome_msg = 'instapy-cli'
print('instapy ' + version + ' | python ' + python_version())
# cli = client()
# cli.loop(args)
parser = OptionParser(usage="usage: %prog [options]")
parser.add_option('-u', dest='username', help='username')
parser.add_option('-p', dest='password', help='password')
parser.add_option('-f', dest='file', help='file path or url')
parser.add_option('-t', dest='caption', help='caption text')
# parser.add_option('-h', dest='help', help='help')
(options, args) = parser.parse_args(args)
if args is None or (
not options.username and
not options.password and
not options.file and
not options.caption
):
print('[USE] instapy -u USR -p PSW -f FILE/LINK -t \'TEXT CAPTION\'')
print('\nFor other reference go to >> https://github.com/b3nab/instapy-cli')
return
if not options.username:
parser.error('Username is required')
password = options.password
if not options.password:
import getpass
password = getpass.getpass()
if not options.file:
parser.error('File path or url link is required to create a media to upload')
with client(options.username, password) as cli:
text = options.caption or ''
cli.upload(options.file, text)
if __name__ == '__main__':
main()
|
Python
| 0
|
6341f351aab0ff510fcf1d9ce135be680763a971
|
Fix comments in backtracking/coloring.py (#4857)
|
backtracking/coloring.py
|
backtracking/coloring.py
|
"""
Graph Coloring also called "m coloring problem"
consists of coloring a given graph with at most m colors
such that no adjacent vertices are assigned the same color
Wikipedia: https://en.wikipedia.org/wiki/Graph_coloring
"""
def valid_coloring(
neighbours: list[int], colored_vertices: list[int], color: int
) -> bool:
"""
For each neighbour check if the coloring constraint is satisfied
If any of the neighbours fail the constraint return False
If all neighbours validate the constraint return True
>>> neighbours = [0,1,0,1,0]
>>> colored_vertices = [0, 2, 1, 2, 0]
>>> color = 1
>>> valid_coloring(neighbours, colored_vertices, color)
True
>>> color = 2
>>> valid_coloring(neighbours, colored_vertices, color)
False
"""
# Does any neighbour not satisfy the constraints
return not any(
neighbour == 1 and colored_vertices[i] == color
for i, neighbour in enumerate(neighbours)
)
def util_color(
graph: list[list[int]], max_colors: int, colored_vertices: list[int], index: int
) -> bool:
"""
Pseudo-Code
Base Case:
1. Check if coloring is complete
1.1 If complete return True (meaning that we successfully colored the graph)
Recursive Step:
2. Iterates over each color:
Check if the current coloring is valid:
2.1. Color given vertex
2.2. Do recursive call, check if this coloring leads to a solution
2.4. if current coloring leads to a solution return
2.5. Uncolor given vertex
>>> graph = [[0, 1, 0, 0, 0],
... [1, 0, 1, 0, 1],
... [0, 1, 0, 1, 0],
... [0, 1, 1, 0, 0],
... [0, 1, 0, 0, 0]]
>>> max_colors = 3
>>> colored_vertices = [0, 1, 0, 0, 0]
>>> index = 3
>>> util_color(graph, max_colors, colored_vertices, index)
True
>>> max_colors = 2
>>> util_color(graph, max_colors, colored_vertices, index)
False
"""
# Base Case
if index == len(graph):
return True
# Recursive Step
for i in range(max_colors):
if valid_coloring(graph[index], colored_vertices, i):
# Color current vertex
colored_vertices[index] = i
# Validate coloring
if util_color(graph, max_colors, colored_vertices, index + 1):
return True
# Backtrack
colored_vertices[index] = -1
return False
def color(graph: list[list[int]], max_colors: int) -> list[int]:
"""
Wrapper function to call subroutine called util_color
which will either return True or False.
If True is returned colored_vertices list is filled with correct colorings
>>> graph = [[0, 1, 0, 0, 0],
... [1, 0, 1, 0, 1],
... [0, 1, 0, 1, 0],
... [0, 1, 1, 0, 0],
... [0, 1, 0, 0, 0]]
>>> max_colors = 3
>>> color(graph, max_colors)
[0, 1, 0, 2, 0]
>>> max_colors = 2
>>> color(graph, max_colors)
[]
"""
colored_vertices = [-1] * len(graph)
if util_color(graph, max_colors, colored_vertices, 0):
return colored_vertices
return []
|
"""
Graph Coloring also called "m coloring problem"
consists of coloring given graph with at most m colors
such that no adjacent vertices are assigned same color
Wikipedia: https://en.wikipedia.org/wiki/Graph_coloring
"""
def valid_coloring(
neighbours: list[int], colored_vertices: list[int], color: int
) -> bool:
"""
For each neighbour check if coloring constraint is satisfied
If any of the neighbours fail the constraint return False
If all neighbours validate constraint return True
>>> neighbours = [0,1,0,1,0]
>>> colored_vertices = [0, 2, 1, 2, 0]
>>> color = 1
>>> valid_coloring(neighbours, colored_vertices, color)
True
>>> color = 2
>>> valid_coloring(neighbours, colored_vertices, color)
False
"""
# Does any neighbour not satisfy the constraints
return not any(
neighbour == 1 and colored_vertices[i] == color
for i, neighbour in enumerate(neighbours)
)
def util_color(
graph: list[list[int]], max_colors: int, colored_vertices: list[int], index: int
) -> bool:
"""
Pseudo-Code
Base Case:
1. Check if coloring is complete
1.1 If complete return True (meaning that we successfully colored graph)
Recursive Step:
2. Itterates over each color:
Check if current coloring is valid:
2.1. Color given vertex
2.2. Do recursive call check if this coloring leads to solving problem
2.4. if current coloring leads to solution return
2.5. Uncolor given vertex
>>> graph = [[0, 1, 0, 0, 0],
... [1, 0, 1, 0, 1],
... [0, 1, 0, 1, 0],
... [0, 1, 1, 0, 0],
... [0, 1, 0, 0, 0]]
>>> max_colors = 3
>>> colored_vertices = [0, 1, 0, 0, 0]
>>> index = 3
>>> util_color(graph, max_colors, colored_vertices, index)
True
>>> max_colors = 2
>>> util_color(graph, max_colors, colored_vertices, index)
False
"""
# Base Case
if index == len(graph):
return True
# Recursive Step
for i in range(max_colors):
if valid_coloring(graph[index], colored_vertices, i):
# Color current vertex
colored_vertices[index] = i
# Validate coloring
if util_color(graph, max_colors, colored_vertices, index + 1):
return True
# Backtrack
colored_vertices[index] = -1
return False
def color(graph: list[list[int]], max_colors: int) -> list[int]:
"""
Wrapper function to call subroutine called util_color
which will either return True or False.
If True is returned colored_vertices list is filled with correct colorings
>>> graph = [[0, 1, 0, 0, 0],
... [1, 0, 1, 0, 1],
... [0, 1, 0, 1, 0],
... [0, 1, 1, 0, 0],
... [0, 1, 0, 0, 0]]
>>> max_colors = 3
>>> color(graph, max_colors)
[0, 1, 0, 2, 0]
>>> max_colors = 2
>>> color(graph, max_colors)
[]
"""
colored_vertices = [-1] * len(graph)
if util_color(graph, max_colors, colored_vertices, 0):
return colored_vertices
return []
|
Python
| 0
|
86c9a2191e412d7701940e5aa64279ca000235b3
|
Add ListsRanking, which is identical to ProbabilisticRanking, and PairwisePreferenceRanking as well #57
|
interleaving/ranking.py
|
interleaving/ranking.py
|
from collections import defaultdict
class BalancedRanking(list):
'''
A list of document IDs generated by an interleaving method
including two rankers A and B
'''
__slots__ = ['a', 'b']
def __hash__(self):
return hash((tuple(self), tuple(self.a), tuple(self.b)))
def dumpd(self):
return {
'a': self.a,
'b': self.b,
'ranking_list': self,
}
class CreditRanking(list):
'''
A list of document IDs generated by an interleaving method
including credits
Args:
num_rankers: number of rankers
contents: initial list of document IDs (optional)
'''
__slots__ = ['credits']
def __init__(self, num_rankers, contents=[]):
'''
Initialize self.credits
num_rankers: number of rankers
contents: initial list of document IDs (optional)
'''
self += contents
self.credits = {}
for i in range(num_rankers):
self.credits[i] = defaultdict(float)
def __hash__(self):
l = []
for k, v in self.credits.items():
ll = []
for kk, vv in v.items():
ll.append((kk, vv))
l.append((k, frozenset(ll)))
return hash((tuple(self), frozenset(l)))
def dumpd(self):
return {
'credits': self.credits,
'ranking_list': self,
}
class ListsRanking(list):
'''
A list of document IDs generated by an interleaving method,
including original rankings
Args:
lists: list of original document ID lists
contents: initial list of document IDs (optional)
'''
__slots__ = ['lists']
def __init__(self, lists, contents=[]):
'''
Initialize self.teams
lists: list of original document ID lists
contents: initial list of document IDs (optional)
'''
self += contents
self.lists = lists
def __hash__(self):
l = []
for v in self.lists:
l.append(tuple(v))
return hash((tuple(self), tuple(l)))
def dumpd(self):
return {
'ranking_list': self,
'lists': self.lists,
}
class ProbabilisticRanking(ListsRanking):
pass
class TeamRanking(list):
'''
A list of document IDs generated by an interleaving method,
including teams
Args:
team_indices: indices for self.teams
contents: initial list of document IDs (optional)
'''
__slots__ = ['teams']
def __init__(self, team_indices, contents=[]):
'''
Initialize self.teams
team_indices: indices for self.teams
contents: initial list of document IDs (optional)
'''
self += contents
self.teams = {i: set() for i in team_indices}
def __hash__(self):
'''
TeamRanking can be a key by which
rankings with the same document ID list
and the same team assignment are the same
'''
l = []
for k, v in self.teams.items():
l.append((k, frozenset(v)))
return hash((tuple(self), frozenset(l)))
def dumpd(self):
team_dict = {}
for tid, s in self.teams.items():
team_dict[tid] = sorted(list(s))
return {
'ranking_list': self,
'teams': team_dict,
}
class PairwisePreferenceRanking(ListsRanking):
pass
|
from collections import defaultdict
class BalancedRanking(list):
'''
A list of document IDs generated by an interleaving method
including two rankers A and B
'''
__slots__ = ['a', 'b']
def __hash__(self):
return hash((tuple(self), tuple(self.a), tuple(self.b)))
def dumpd(self):
return {
'a': self.a,
'b': self.b,
'ranking_list': self,
}
class CreditRanking(list):
'''
A list of document IDs generated by an interleaving method
including credits
Args:
num_rankers: number of rankers
contents: initial list of document IDs (optional)
'''
__slots__ = ['credits']
def __init__(self, num_rankers, contents=[]):
'''
Initialize self.credits
num_rankers: number of rankers
contents: initial list of document IDs (optional)
'''
self += contents
self.credits = {}
for i in range(num_rankers):
self.credits[i] = defaultdict(float)
def __hash__(self):
l = []
for k, v in self.credits.items():
ll = []
for kk, vv in v.items():
ll.append((kk, vv))
l.append((k, frozenset(ll)))
return hash((tuple(self), frozenset(l)))
def dumpd(self):
return {
'credits': self.credits,
'ranking_list': self,
}
class ProbabilisticRanking(list):
'''
A list of document IDs generated by an interleaving method,
including original rankings
Args:
lists: list of original document ID lists
contents: initial list of document IDs (optional)
'''
__slots__ = ['lists']
def __init__(self, lists, contents=[]):
'''
Initialize self.teams
lists: list of original document ID lists
contents: initial list of document IDs (optional)
'''
self += contents
self.lists = lists
def __hash__(self):
l = []
for v in self.lists:
l.append(tuple(v))
return hash((tuple(self), tuple(l)))
def dumpd(self):
return {
'ranking_list': self,
'lists': self.lists,
}
class TeamRanking(list):
'''
A list of document IDs generated by an interleaving method,
including teams
Args:
team_indices: indices for self.teams
contents: initial list of document IDs (optional)
'''
__slots__ = ['teams']
def __init__(self, team_indices, contents=[]):
'''
Initialize self.teams
team_indices: indices for self.teams
contents: initial list of document IDs (optional)
'''
self += contents
self.teams = {i: set() for i in team_indices}
def __hash__(self):
'''
TeamRanking can be a key by which
rankings with the same document ID list
and the same team assignment are the same
'''
l = []
for k, v in self.teams.items():
l.append((k, frozenset(v)))
return hash((tuple(self), frozenset(l)))
def dumpd(self):
team_dict = {}
for tid, s in self.teams.items():
team_dict[tid] = sorted(list(s))
return {
'ranking_list': self,
'teams': team_dict,
}
|
Python
| 0
|
ca295aff7a051c5a8b5272a47f4af32378db3185
|
Update PID_wrap.py
|
control/PID/PID_wrap.py
|
control/PID/PID_wrap.py
|
import sensors.SensorClass as SensorClass
import PID_controller
import numpy as np
import Accel_to_Pos as posfinder
class PID(object):
def __init__(self):
self.pos = posfinder(self)
self.data = SensorClass.Data_file.State()
self.controller_x = PID_controller.PID_Controller(self)
self.controller_y = PID_controller.PID_Controller(self)
self.controller_z = PID_controller.PID_Controller(self)
self.controller_phi = PID_controller.PID_Controller(self)
self.controller_mu = PID_controller.PID_Controller(self)
self.controller_theta = PID_controller.PID_Controller(self)
def engage_PID(self):
updated_x = self.update_x()
updated_y = self.update_y()
updated_z = self.update_z()
updated_phi = self.update_phi()
updated_mu = self.update_mu()
updated_theta = self.update_theta()
return np.array([(updated_x), (updated_y), (updated_z), (updated_phi), (updated_mu), (updated_theta)])
def update_x(self):
self.controller_x.update(self.data.get_state())
return self.controller_x.getOutput()
def update_y(self):
self.controller_y.update(self.data.get_state())
return self.controller_y.getOutput()
def update_z(self):
self.controller_z.update(self.data.get_state())
return self.controller_z.getOutput()
def update_phi(self):
self.controller_phi.update(self.data.get_state('Roll'))
return self.controller_phi.getOutput()
def update_mu(self):
self.controller_mu.update(self.data.get_state('Pitch'))
return self.controller_mu.getOutput()
def update_theta(self):
self.controller_theta.update(self.data.get_state('Heading'))
return self.controller_theta.getOutput()
def set_destination(self, delta_x, delta_y, delta_z, delta_phi, delta_mu, delta_theta):
"""
Depended on Sonar
curr_x = self.data.get_state('')
curr_y = self.data.get_state('')
curr_z = self.data.get_state('')
"""
curr_x = self.pos.integration(self.data.get_state('Acceleration-X'))
curr_y = self.pos.integration(self.data.get_state('Acceleration-Y'))
curr_z = self.pos.integration(self.data.get_state('Acceleration-Z'))
curr_phi = self.data.get_state('Roll')
curr_mu = self.data.get_state('Pitch')
curr_theta = self.data.get_state('Heading')
self.dest_x = curr_x + delta_x
self.dest_y = curr_y + delta_y
self.dest_z = curr_z + delta_z
self.dest_phi = curr_phi + delta_phi
self.dest_mu = curr_mu + delta_mu
self.dest_theta = curr_theta + delta_theta
self.controller_x.set_setpoint(self.dest_x)
self.controller_y.set_setpoint(self.dest_y)
self.controller_z.set_setpoint(self.dest_z)
self.controller_phi.set_setpoint(self.dest_phi)
self.controller_mu.set_setpoint(self.dest_mu)
self.controller_theta.set_setpoint(self.dest_theta)
if __name__ == '__main__':
test = PID()
while True:
print(test.engage())
|
import sensors.SensorClass as SensorClass
import PID_controller
import numpy as np
class PID(object):
def __init__(self):
self.data = SensorClass.Data_file.State()
self.controller_x = PID_controller.PID_Controller(self)
self.controller_y = PID_controller.PID_Controller(self)
self.controller_z = PID_controller.PID_Controller(self)
self.controller_phi = PID_controller.PID_Controller(self)
self.controller_mu = PID_controller.PID_Controller(self)
self.controller_theta = PID_controller.PID_Controller(self)
def engage_PID(self):
updated_x = self.update_x()
updated_y = self.update_y()
updated_z = self.update_z()
updated_phi = self.update_phi()
updated_mu = self.update_mu()
updated_theta = self.update_theta()
return np.array([(updated_x), (updated_y), (updated_z), (updated_phi), (updated_mu), (updated_theta)])
def update_x(self):
self.controller_x.update(self.data.get_state())
return self.controller_x.getOutput()
def update_y(self):
self.controller_y.update(self.data.get_state())
return self.controller_y.getOutput()
def update_z(self):
self.controller_z.update(self.data.get_state())
return self.controller_z.getOutput()
def update_phi(self):
self.controller_phi.update(self.data.get_state('Roll'))
return self.controller_phi.getOutput()
def update_mu(self):
self.controller_mu.update(self.data.get_state('Pitch'))
return self.controller_mu.getOutput()
def update_theta(self):
self.controller_theta.update(self.data.get_state('Heading'))
return self.controller_theta.getOutput()
def set_destination(self, delta_x, delta_y, delta_z, delta_phi, delta_mu, delta_theta):
curr_x = self.data.get_state('')
curr_y = self.data.get_state('')
curr_z = self.data.get_state('')
curr_phi = self.data.get_state('Roll')
curr_mu = self.data.get_state('Pitch')
curr_theta = self.data.get_state('Heading')
self.dest_x = curr_x + delta_x
self.dest_y = curr_y + delta_y
self.dest_z = curr_z + delta_z
self.dest_phi = curr_phi + delta_phi
self.dest_mu = curr_mu + delta_mu
self.dest_theta = curr_theta + delta_theta
self.controller_x.set_setpoint(self.dest_x)
self.controller_y.set_setpoint(self.dest_y)
self.controller_z.set_setpoint(self.dest_z)
self.controller_phi.set_setpoint(self.dest_phi)
self.controller_mu.set_setpoint(self.dest_mu)
self.controller_theta.set_setpoint(self.dest_theta)
if __name__ == '__main__':
test = PID()
while True:
print(test.engage())
|
Python
| 0
|
f26c2059ff6e2a595097ef7a03efe149f9e253eb
|
Add default images for podcasts if necessary
|
iterator.py
|
iterator.py
|
import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
if re.search('podcast', filename):
if re.search('^hero: ', contents[6]):
print filename
contents.insert(6, 'hero: /blog/images/category/podcasts.jpg\n')
f = file.open(filename, "w")
f.write("".join(contents))
f.close()
|
import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
for key, line in enumerate(contents):
src = re.search('\!\[.*?\]\((.*?)\)', line)
if src:
wordpress_src = re.search('/blog/images/wordpress/(.*)', src.group(1))
if wordpress_src:
image_src = wordpress_src.group(1)
path = 'images/wordpress/'+image_src
print 'Retrieving ' + path + '...'
if not os.path.isfile(path):
print path
f = open(path, "w")
f.write(requests.get("http://blog.stackoverflow.com/wp-content/uploads/" + wordpress_src.group(1)).content)
f.close()
continue
f = open(filename, "w")
contents = "".join(contents)
f.write(contents)
f.close()
|
Python
| 0.000001
|
6d52a6a1447ae854e22bc6317b694cb3bb317c12
|
Fix import paths
|
curiosity/bot.py
|
curiosity/bot.py
|
import traceback
import logbook
from ruamel import yaml
from curious.commands.bot import CommandsBot
from curious.commands.context import Context
from curious.commands.exc import CheckFailureError, MissingArgumentError, ConversionFailedError
from curious.dataclasses.status import Game, Status
from curious.dataclasses.message import Message
from curious.event import EventContext
from curious.ext.paginator import ReactionsPaginator
class Curiosity(CommandsBot):
def __init__(self):
try:
with open("config.yml") as f:
self.config = yaml.safe_load(f)
except FileNotFoundError as e:
print("You need to make a config.yml.")
raise SystemExit(1) from e
token = self.config["bot_token"]
super().__init__(token, command_prefix="c!")
self.logger = logbook.Logger("curiosity")
async def on_command_error(self, ctx: Context, exc: Exception):
if isinstance(exc, (CheckFailureError, MissingArgumentError, ConversionFailedError)):
await ctx.channel.send(":x: {}".format(str(exc)))
else:
fmtted = traceback.format_exception(None, exc.__cause__, exc.__cause__.__traceback__)
final = "```{}```".format(''.join(fmtted))
if len(final) < 1900:
await ctx.channel.send(final)
else:
items = ["```{}```".format(i) for i in traceback.format_exception(None, exc.__cause__,
exc.__cause__.__traceback__)]
p = ReactionsPaginator(channel=ctx.channel, content=items, respond_to=ctx.message.author.user)
await p.paginate()
async def on_connect(self, ctx):
self.logger.info("Connected to Discord on shard {0}, "
"logged in as {1.name}#{1.discriminator}.".format(ctx.shard_id, self.user))
self.logger.info("I am owned by {0.name}#{0.discriminator}.".format(self.application_info.owner))
self.logger.info("Invite URL: {}".format(self.invite_url))
await self.change_status(game=Game(name="curiosity loading..."), status=Status.DND, shard_id=ctx.shard_id)
async def on_ready(self, ctx):
await self.change_status(game=Game(
name="[shard {}/{}] curio is the future!".format(ctx.shard_id + 1, self.shard_count)
), status=Status.ONLINE, shard_id=ctx.shard_id)
if ctx.shard_id != 0:
return
plugins = self.config.get("plugins", [])
for plugin in plugins:
try:
await self.load_plugins_from(plugin)
except:
self.logger.exception("Failed to load {}!".format(plugin))
else:
self.logger.info("Loaded plugin {}.".format(plugin))
async def on_message_create(self, ctx: EventContext, message: Message):
self.logger.info("Recieved message: {message.content} "
"from {message.author.name} ({message.author.user.name}){bot}"
.format(message=message, bot=" [BOT]" if message.author.user.bot else ""))
self.logger.info(" On channel: #{message.channel.name}".format(message=message))
if message.guild:
self.logger.info(" On guild: {message.guild.name} ({message.guild.id})".format(message=message))
|
import traceback
import logbook
from ruamel import yaml
from curious.commands.bot import CommandsBot
from curious.commands.context import Context
from curious.commands.exc import CheckFailureError, MissingArgumentError, ConversionFailedError
from curious.dataclasses import Game, Status, Message
from curious.event import EventContext
from curious.ext.paginator import ReactionsPaginator
class Curiosity(CommandsBot):
def __init__(self):
try:
with open("config.yml") as f:
self.config = yaml.safe_load(f)
except FileNotFoundError as e:
print("You need to make a config.yml.")
raise SystemExit(1) from e
token = self.config["bot_token"]
super().__init__(token, command_prefix="c!")
self.logger = logbook.Logger("curiosity")
async def on_command_error(self, ctx: Context, exc: Exception):
if isinstance(exc, (CheckFailureError, MissingArgumentError, ConversionFailedError)):
await ctx.channel.send(":x: {}".format(str(exc)))
else:
fmtted = traceback.format_exception(None, exc.__cause__, exc.__cause__.__traceback__)
final = "```{}```".format(''.join(fmtted))
if len(final) < 1900:
await ctx.channel.send(final)
else:
items = ["```{}```".format(i) for i in traceback.format_exception(None, exc.__cause__,
exc.__cause__.__traceback__)]
p = ReactionsPaginator(channel=ctx.channel, content=items, respond_to=ctx.message.author.user)
await p.paginate()
async def on_connect(self, ctx):
self.logger.info("Connected to Discord on shard {0}, "
"logged in as {1.name}#{1.discriminator}.".format(ctx.shard_id, self.user))
self.logger.info("I am owned by {0.name}#{0.discriminator}.".format(self.application_info.owner))
self.logger.info("Invite URL: {}".format(self.invite_url))
await self.change_status(game=Game(name="curiosity loading..."), status=Status.DND, shard_id=ctx.shard_id)
async def on_ready(self, ctx):
await self.change_status(game=Game(
name="[shard {}/{}] curio is the future!".format(ctx.shard_id + 1, self.shard_count)
), status=Status.ONLINE, shard_id=ctx.shard_id)
if ctx.shard_id != 0:
return
plugins = self.config.get("plugins", [])
for plugin in plugins:
try:
await self.load_plugins_from(plugin)
except:
self.logger.exception("Failed to load {}!".format(plugin))
else:
self.logger.info("Loaded plugin {}.".format(plugin))
async def on_message_create(self, ctx: EventContext, message: Message):
self.logger.info("Recieved message: {message.content} "
"from {message.author.name} ({message.author.user.name}){bot}"
.format(message=message, bot=" [BOT]" if message.author.user.bot else ""))
self.logger.info(" On channel: #{message.channel.name}".format(message=message))
if message.guild:
self.logger.info(" On guild: {message.guild.name} ({message.guild.id})".format(message=message))
|
Python
| 0.00008
|
a72e0a6068614b740ade7586ec316db7b9611b46
|
Make JBrowse work in DEBUG mode without nginx.
|
genome_designer/urls.py
|
genome_designer/urls.py
|
from django.conf.urls import include
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.generic import RedirectView
import settings
urlpatterns = patterns('',
url(r'^$', 'genome_designer.main.views.home_view'),
# Project-specific views
url(r'^projects$',
'genome_designer.main.views.project_list_view'),
url(r'^projects/create$',
'genome_designer.main.views.project_create_view'),
url(r'^projects/([\w-]+)$',
'genome_designer.main.views.project_view'),
url(r'^projects/([\w-]+)/delete$',
'genome_designer.main.views.project_delete'),
# Tab base views.
url(r'^projects/([\w-]+)/data$',
'genome_designer.main.views.project_view'),
url(r'^projects/([\w-]+)/align$',
'genome_designer.main.views.tab_root_align'),
url(r'^projects/([\w-]+)/analyze$',
'genome_designer.main.views.tab_root_analyze'),
# Reference genomes
url(r'^projects/([\w-]+)/refgenomes$',
'genome_designer.main.views.reference_genome_list_view'),
url(r'^projects/([\w-]+)/refgenomes/([\w-]+)$',
'genome_designer.main.views.reference_genome_view'),
# Alignments
url(r'^projects/([\w-]+)/alignments$',
'genome_designer.main.views.alignment_list_view'),
url(r'^projects/([\w-]+)/alignments/create$',
'genome_designer.main.views.alignment_create_view'),
url(r'^projects/([\w-]+)/alignments/([\w-]+)$',
'genome_designer.main.views.alignment_view'),
url(r'^projects/([\w-]+)/alignments/([\w-]+)/samplealign/([\w-]+)/error$',
'genome_designer.main.views.sample_alignment_error_view'),
# Variant sets
url(r'^projects/([\w-]+)/sets$',
'genome_designer.main.views.variant_set_list_view'),
url(r'^projects/([\w-]+)/sets/([\w-]+)$',
'genome_designer.main.views.variant_set_view'),
# Samples
url(r'^projects/([\w-]+)/samples$',
'genome_designer.main.views.sample_list_view'),
# Variants
url(r'^projects/([\w-]+)/refgenomes/([\w-]+)/variants/([\w-]+)$',
'genome_designer.main.views.single_variant_view'),
# Genes
url(r'^projects/([\w-]+)/genes$',
'genome_designer.main.views.gene_list_view'),
# GO terms
url(r'^projects/([\w-]+)/goterms$',
'genome_designer.main.views.goterm_list_view'),
############################################################################
# Templates
############################################################################
url(r'^templates/sample_list_targets_template.tsv$',
'genome_designer.main.views.sample_list_targets_template'),
url(r'^templates/variant_set_upload_template.vcf$',
'genome_designer.main.views.variant_set_upload_template'),
############################################################################
# Auth
############################################################################
# django-registration defaults (further delgates to django.contrib.auth.url)
(r'^accounts/', include('registration.backends.simple.urls')),
# The default behavior of registration is redirect to 'users/<username>'.
# For now let's catch this request here and just redirect to '/'.
(r'^users/', RedirectView.as_view(url='/')),
############################################################################
# XHR Actions
############################################################################
url(r'^_/sets/exportcsv$',
'genome_designer.main.xhr_handlers.export_variant_set_as_csv'),
url(r'^_/variants$',
'genome_designer.main.xhr_handlers.get_variant_list'),
url(r'^_/variants/modify_set_membership$',
'genome_designer.main.xhr_handlers.modify_variant_in_set_membership'),
)
if settings.DEBUG:
from django.conf.urls.static import static
urlpatterns += static('jbrowse', document_root=settings.JBROWSE_ROOT)
|
from django.conf.urls import include
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.generic import RedirectView
urlpatterns = patterns('',
url(r'^$', 'genome_designer.main.views.home_view'),
# Project-specific views
url(r'^projects$',
'genome_designer.main.views.project_list_view'),
url(r'^projects/create$',
'genome_designer.main.views.project_create_view'),
url(r'^projects/([\w-]+)$',
'genome_designer.main.views.project_view'),
url(r'^projects/([\w-]+)/delete$',
'genome_designer.main.views.project_delete'),
# Tab base views.
url(r'^projects/([\w-]+)/data$',
'genome_designer.main.views.project_view'),
url(r'^projects/([\w-]+)/align$',
'genome_designer.main.views.tab_root_align'),
url(r'^projects/([\w-]+)/analyze$',
'genome_designer.main.views.tab_root_analyze'),
# Reference genomes
url(r'^projects/([\w-]+)/refgenomes$',
'genome_designer.main.views.reference_genome_list_view'),
url(r'^projects/([\w-]+)/refgenomes/([\w-]+)$',
'genome_designer.main.views.reference_genome_view'),
# Alignments
url(r'^projects/([\w-]+)/alignments$',
'genome_designer.main.views.alignment_list_view'),
url(r'^projects/([\w-]+)/alignments/create$',
'genome_designer.main.views.alignment_create_view'),
url(r'^projects/([\w-]+)/alignments/([\w-]+)$',
'genome_designer.main.views.alignment_view'),
url(r'^projects/([\w-]+)/alignments/([\w-]+)/samplealign/([\w-]+)/error$',
'genome_designer.main.views.sample_alignment_error_view'),
# Variant sets
url(r'^projects/([\w-]+)/sets$',
'genome_designer.main.views.variant_set_list_view'),
url(r'^projects/([\w-]+)/sets/([\w-]+)$',
'genome_designer.main.views.variant_set_view'),
# Samples
url(r'^projects/([\w-]+)/samples$',
'genome_designer.main.views.sample_list_view'),
# Variants
url(r'^projects/([\w-]+)/refgenomes/([\w-]+)/variants/([\w-]+)$',
'genome_designer.main.views.single_variant_view'),
# Genes
url(r'^projects/([\w-]+)/genes$',
'genome_designer.main.views.gene_list_view'),
# GO terms
url(r'^projects/([\w-]+)/goterms$',
'genome_designer.main.views.goterm_list_view'),
############################################################################
# Templates
############################################################################
url(r'^templates/sample_list_targets_template.tsv$',
'genome_designer.main.views.sample_list_targets_template'),
url(r'^templates/variant_set_upload_template.vcf$',
'genome_designer.main.views.variant_set_upload_template'),
############################################################################
# Auth
############################################################################
# django-registration defaults (further delgates to django.contrib.auth.url)
(r'^accounts/', include('registration.backends.simple.urls')),
# The default behavior of registration is redirect to 'users/<username>'.
# For now let's catch this request here and just redirect to '/'.
(r'^users/', RedirectView.as_view(url='/')),
############################################################################
# XHR Actions
############################################################################
url(r'^_/sets/exportcsv$',
'genome_designer.main.xhr_handlers.export_variant_set_as_csv'),
url(r'^_/variants$',
'genome_designer.main.xhr_handlers.get_variant_list'),
url(r'^_/variants/modify_set_membership$',
'genome_designer.main.xhr_handlers.modify_variant_in_set_membership'),
)
|
Python
| 0.000001
|
e0f3e68435b406e3bad9b7f7e459b724ea832e9e
|
Disable summernote editor test from Travis
|
shuup_tests/browser/admin/test_editor.py
|
shuup_tests/browser/admin/test_editor.py
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2018, Shuup Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import os
import pytest
from django.core.urlresolvers import reverse
from django.utils.translation import activate
from shuup import configuration
from shuup.testing import factories
from shuup.testing.browser_utils import (
click_element, move_to_element, wait_until_appeared,
wait_until_condition
)
from shuup.testing.utils import initialize_admin_browser_test
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
@pytest.mark.browser
@pytest.mark.djangodb
@pytest.mark.skipif(os.environ.get("SHUUP_TESTS_TRAVIS", "0") == "1", reason="Disable when run through tox.")
def test_summernote_editor_picture(browser, admin_user, live_server, settings):
activate("en")
factories.get_default_shop()
factories.get_default_product_type()
factories.get_default_sales_unit()
factories.get_default_tax_class()
filer_image = factories.get_random_filer_image()
configuration.set(None, "shuup_product_tour_complete", True)
initialize_admin_browser_test(browser, live_server, settings)
browser.driver.set_window_size(1920, 1080)
url = reverse("shuup_admin:shop_product.new")
browser.visit("%s%s" % (live_server, url))
wait_until_condition(browser, condition=lambda x: x.is_text_present("New shop product"))
img_icon_selector = "#id_base-description__en-editor-wrap i[class='note-icon-picture']"
move_to_element(browser, img_icon_selector)
click_element(browser, img_icon_selector)
wait_until_condition(browser, lambda b: len(b.windows) == 2)
# change to the media browser window
browser.windows.current = browser.windows[1]
# click to select the picture
wait_until_appeared(browser, "a.file-preview")
click_element(browser, "a.file-preview")
# back to the main window
wait_until_condition(browser, lambda b: len(b.windows) == 1)
browser.windows.current = browser.windows[0]
# make sure the image was added to the editor
wait_until_appeared(
browser,
"#id_base-description__en-editor-wrap .note-editable img[src='%s']" % filer_image.url, timeout=20)
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2018, Shuup Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import os
import pytest
from django.core.urlresolvers import reverse
from django.utils.translation import activate
from shuup import configuration
from shuup.testing import factories
from shuup.testing.browser_utils import (
click_element, move_to_element, wait_until_appeared,
wait_until_condition
)
from shuup.testing.utils import initialize_admin_browser_test
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
@pytest.mark.browser
@pytest.mark.djangodb
def test_summernote_editor_picture(browser, admin_user, live_server, settings):
activate("en")
factories.get_default_shop()
factories.get_default_product_type()
factories.get_default_sales_unit()
factories.get_default_tax_class()
filer_image = factories.get_random_filer_image()
configuration.set(None, "shuup_product_tour_complete", True)
initialize_admin_browser_test(browser, live_server, settings)
browser.driver.set_window_size(1920, 1080)
url = reverse("shuup_admin:shop_product.new")
browser.visit("%s%s" % (live_server, url))
wait_until_condition(browser, condition=lambda x: x.is_text_present("New shop product"))
img_icon_selector = "#id_base-description__en-editor-wrap i[class='note-icon-picture']"
move_to_element(browser, img_icon_selector)
click_element(browser, img_icon_selector)
wait_until_condition(browser, lambda b: len(b.windows) == 2)
# change to the media browser window
browser.windows.current = browser.windows[1]
# click to select the picture
wait_until_appeared(browser, "a.file-preview")
click_element(browser, "a.file-preview")
# back to the main window
wait_until_condition(browser, lambda b: len(b.windows) == 1)
browser.windows.current = browser.windows[0]
# make sure the image was added to the editor
wait_until_appeared(
browser,
"#id_base-description__en-editor-wrap .note-editable img[src='%s']" % filer_image.url, timeout=20)
|
Python
| 0
|
6f7c11c13793cbba7904cfd2a27ab3eb59ab9302
|
Update ispyb/sp/xtalimaging.py: proper multi-line docstring
|
ispyb/sp/xtalimaging.py
|
ispyb/sp/xtalimaging.py
|
from __future__ import absolute_import, division, print_function
from ispyb.interface.dataarea import DataArea
class XtalImaging(DataArea):
"""provides methods for accessing crystal imaging tables."""
def upsert_sample_image(
self,
id=None,
sample_id=None,
inspection_id=None,
microns_per_pixel_x=None,
microns_per_pixel_y=None,
image_full_path=None,
comments=None,
):
"""Store new or update existing sample image.
:param image_full_path: The full path to the sample image
:return: The sample_image_id.
"""
return self.get_connection().call_sp_write(
procname="upsert_sample_image",
args=[
id,
sample_id,
inspection_id,
microns_per_pixel_x,
microns_per_pixel_y,
image_full_path,
comments,
],
)
def upsert_sample_image_auto_score(
self, image_full_path, schema_name, score_class, probability
):
"""Store new or update existing automatic score for a sample image.
:param image_full_path: The full path to the sample image
:param schema_name: The name of the scoring schema, e.g. MARCO
:param score_class: A string that describes the thing we're scoring, e.g. crystal, clear, precipitant, other
:param probability: A float indicating the probability that the image contains the score_class
"""
self.get_connection().call_sp_write(
procname="upsert_sample_image_auto_score",
args=[image_full_path, schema_name, score_class, probability],
)
def insert_subsample_for_image_full_path(
self,
image_full_path,
source,
position1x,
position1y,
position2x=None,
position2y=None,
):
"""Store new subsample for a given sample image.
Either specify a point (by providing position1x and position1y)
or a ROI box (by additionally providing position2x and position2y).
Position coordinates are given in pixels from the top-left corner
of the image.
:param image_full_path: The full path to the sample image
:type image_full_path: str
:param source: manual or auto
:type source: str
:param position1x: x component of position1
:type position1x: int
:param position1y: y component of position1
:type position1y: int
:param position2x: x component of position2 which is the lower right
corner of a ROI box
:type position2x: int
:param position2y: y component of position2 which is the lower right
corner of a ROI box
:type position2y: int
:return: The subsample_id.
"""
id = None
return self.get_connection().call_sp_write(
procname="insert_subsample_for_image_full_path",
args=[
id,
image_full_path,
source,
position1x,
position1y,
position2x,
position2y,
],
)
def retrieve_container_for_barcode(self, barcode):
"""Retrieve info about the container indetified by the give barcode."""
return self.get_connection().call_sp_retrieve(
procname="retrieve_container_for_barcode", args=[barcode]
)
def retrieve_container_for_inspection_id(self, inspection_id):
"""Retrieve info about the container identified by container inspection ID"""
return self.get_connection().call_sp_retrieve(
procname="retrieve_container_for_inspection_id", args=[inspection_id]
)
def retrieve_sample_for_container_id_and_location(self, container_id, location):
"""Retrieve info about the sample identified by the given container ID and its location."""
return self.get_connection().call_sp_retrieve(
procname="retrieve_sample_for_container_id_and_location",
args=[container_id, location],
)
|
from __future__ import absolute_import, division, print_function
from ispyb.interface.dataarea import DataArea
class XtalImaging(DataArea):
"""provides methods for accessing crystal imaging tables."""
def upsert_sample_image(
self,
id=None,
sample_id=None,
inspection_id=None,
microns_per_pixel_x=None,
microns_per_pixel_y=None,
image_full_path=None,
comments=None,
):
"""Store new or update existing sample image.
:param image_full_path: The full path to the sample image
:return: The sample_image_id.
"""
return self.get_connection().call_sp_write(
procname="upsert_sample_image",
args=[
id,
sample_id,
inspection_id,
microns_per_pixel_x,
microns_per_pixel_y,
image_full_path,
comments,
],
)
def upsert_sample_image_auto_score(
self, image_full_path, schema_name, score_class, probability
):
"""Store new or update existing automatic score for a sample image.
:param image_full_path: The full path to the sample image
:param schema_name: The name of the scoring schema, e.g. MARCO
:param score_class: A string that describes the thing we're scoring, e.g. crystal, clear, precipitant, other
:param probability: A float indicating the probability that the image contains the score_class
"""
self.get_connection().call_sp_write(
procname="upsert_sample_image_auto_score",
args=[image_full_path, schema_name, score_class, probability],
)
def insert_subsample_for_image_full_path(
self,
image_full_path,
source,
position1x,
position1y,
position2x=None,
position2y=None,
):
"""Store new subsample for a given sample image. Either specify a point
(by providing position1x and position1y) or a ROI box (by additionally
providing position2x and position2y). Position coordinates are given in
pixels from the top-left corner of the image.
:param image_full_path: The full path to the sample image
:type image_full_path: str
:param source: manual or auto
:type source: str
:param position1x: x component of position1
:type position1x: int
:param position1y: y component of position1
:type position1y: int
:param position2x: x component of position2 which is the lower right
corner of a ROI box
:type position2x: int
:param position2y: y component of position2 which is the lower right
corner of a ROI box
:type position2y: int
:return: The subsample_id.
"""
id = None
return self.get_connection().call_sp_write(
procname="insert_subsample_for_image_full_path",
args=[
id,
image_full_path,
source,
position1x,
position1y,
position2x,
position2y,
],
)
def retrieve_container_for_barcode(self, barcode):
"""Retrieve info about the container indetified by the give barcode."""
return self.get_connection().call_sp_retrieve(
procname="retrieve_container_for_barcode", args=[barcode]
)
def retrieve_container_for_inspection_id(self, inspection_id):
"""Retrieve info about the container identified by container inspection ID"""
return self.get_connection().call_sp_retrieve(
procname="retrieve_container_for_inspection_id", args=[inspection_id]
)
def retrieve_sample_for_container_id_and_location(self, container_id, location):
"""Retrieve info about the sample identified by the given container ID and its location."""
return self.get_connection().call_sp_retrieve(
procname="retrieve_sample_for_container_id_and_location",
args=[container_id, location],
)
|
Python
| 0
|
b1d1df9a5368a50f82a8bab6a4be023a51ef603f
|
Update PickList.py
|
Cogs/PickList.py
|
Cogs/PickList.py
|
import asyncio
import discord
from discord.ext import commands
def setup(bot):
# This module isn't actually a cog
return
class Picker:
def __init__(self, **kwargs):
self.list = kwargs.get("list", [])
self.title = kwargs.get("title", None)
self.timeout = kwargs.get("timeout", 60)
self.ctx = kwargs.get("ctx", None)
self.message = kwargs.get("message", None) # message to edit
self.max = 10 # Don't set programmatically - as we don't want this overridden
self.reactions = [ "🛑" ]
async def _add_reactions(self, message, react_list):
for r in react_list:
await message.add_reaction(r)
async def pick(self):
# This actually brings up the pick list and handles the nonsense
# Returns a tuple of (return_code, message)
# The return code is -1 for cancel, -2 for timeout, -3 for error, 0+ is index
# Let's check our prerequisites first
if self.ctx == None or not len(self.list) or len(self.list) > self.max:
return (-3, None)
msg = ""
if self.title:
msg += self.title + "\n"
msg += "```\n"
# Show our list items
current = 0
# current_reactions = [self.reactions[0]]
current_reactions = []
for item in self.list:
current += 1
current_reactions.append("{}\N{COMBINING ENCLOSING KEYCAP}".format(current))
msg += "{}. {}\n".format(current, item)
msg += "```"
# Add the stop reaction
current_reactions.append(self.reactions[0])
if self.message:
message = self.message
await message.edit(content=msg)
else:
message = await self.ctx.send(msg)
# Add our reactions
await self._add_reactions(message, current_reactions)
# Now we would wait...
def check(reaction, user):
return user == self.ctx.author and str(reaction.emoji) in current_reactions
try:
reaction, user = await self.ctx.bot.wait_for('reaction_add', timeout=self.timeout, check=check)
except:
# Didn't get a reaction
await message.clear_reactions()
return (-2, message)
await message.clear_reactions()
# Get the adjusted index
ind = current_reactions.index(str(reaction.emoji))
if ind == len(current_reactions)-1:
ind = -1
return (ind, message)
|
import asyncio
import discord
from discord.ext import commands
def setup(bot):
# This module isn't actually a cog
return
class Picker:
def __init__(self, **kwargs):
self.list = kwargs.get("list", [])
self.title = kwargs.get("title", None)
self.timeout = kwargs.get("timeout", 60)
self.ctx = kwargs.get("ctx", None)
self.message = kwargs.get("message", None) # message to edit
self.max = 10 # Don't set programmatically - as we don't want this overridden
self.reactions = [ "🛑" ]
async def _add_reactions(self, message, react_list):
for r in react_list:
await message.add_reaction(r)
async def pick(self):
# This actually brings up the pick list and handles the nonsense
# Returns a tuple of (return_code, message)
# The return code is -1 for cancel, -2 for timeout, -3 for error, 0+ is index
# Let's check our prerequisites first
if self.ctx == None or not len(self.list) or len(self.list) > self.max:
return (-3, None)
msg = ""
if self.title:
msg += self.title + "\n"
msg += "```\n"
# Show our list items
current = 0
# current_reactions = [self.reactions[0]]
current_reactions = []
for item in self.list:
current += 1
current_reactions.append("{}\N{COMBINING ENCLOSING KEYCAP}".format(current))
msg += "{}. {}\n".format(current, item)
msg += "```"
# Add the stop reaction
current_reactions.append(self.reactions[0])
if self.message:
message = self.message
await message.edit(content=msg)
else:
message = await self.ctx.send(msg)
# Add our reactions
await self._add_reactions(message, current_reactions)
# Now we would wait...
def check(reaction, user):
return user == self.ctx.author and str(reaction.emoji) in current_reactions
try:
reaction, user = await self.ctx.bot.wait_for('reaction_add', timeout=self.timeout, check=check)
except:
# Didn't get a reaction
await message.clear_reactions()
return (-2, message)
await message.clear_reactions()
# Get the adjusted index
ind = current_reactions.index(str(reaction.emoji))+1
if ind == len(current_reactions):
ind = -1
return (ind, message)
|
Python
| 0
|
eda2f6905a3275623525c4179358e55e472b4fd7
|
Fix bug in urls.py following the sample_list template being renamed.
|
genome_designer/urls.py
|
genome_designer/urls.py
|
from django.conf.urls.defaults import include
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('',
url(r'^$', 'genome_designer.main.views.home_view'),
# Project-specific views
url(r'^projects$',
'genome_designer.main.views.project_list_view'),
url(r'^projects/([\w-]+)$',
'genome_designer.main.views.project_view'),
url(r'^projects/([\w-]+)/refgenomes$',
'genome_designer.main.views.reference_genome_list_view'),
url(r'^projects/([\w-]+)/alignments$',
'genome_designer.main.views.alignment_list_view'),
url(r'^projects/([\w-]+)/sets$',
'genome_designer.main.views.variant_set_list_view'),
url(r'^projects/([\w-]+)/samples$',
'genome_designer.main.views.sample_list_view'),
url(r'^projects/([\w-]+)/variants$',
'genome_designer.main.views.variant_list_view'),
url(r'^projects/([\w-]+)/genes$',
'genome_designer.main.views.gene_list_view'),
url(r'^projects/([\w-]+)/goterms$',
'genome_designer.main.views.goterm_list_view'),
############################################################################
# Templates
############################################################################
url(r'^templates/sample_list_targets_template.tsv$',
'genome_designer.main.views.sample_list_targets_template'),
############################################################################
# Auth
############################################################################
# django-registration defaults (further delgates to django.contrib.auth.url)
(r'^accounts/', include('registration.backends.simple.urls')),
)
|
from django.conf.urls.defaults import include
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('',
url(r'^$', 'genome_designer.main.views.home_view'),
# Project-specific views
url(r'^projects$',
'genome_designer.main.views.project_list_view'),
url(r'^projects/([\w-]+)$',
'genome_designer.main.views.project_view'),
url(r'^projects/([\w-]+)/refgenomes$',
'genome_designer.main.views.reference_genome_list_view'),
url(r'^projects/([\w-]+)/alignments$',
'genome_designer.main.views.alignment_list_view'),
url(r'^projects/([\w-]+)/sets$',
'genome_designer.main.views.variant_set_list_view'),
url(r'^projects/([\w-]+)/samples$',
'genome_designer.main.views.sample_list_view'),
url(r'^projects/([\w-]+)/variants$',
'genome_designer.main.views.variant_list_view'),
url(r'^projects/([\w-]+)/genes$',
'genome_designer.main.views.gene_list_view'),
url(r'^projects/([\w-]+)/goterms$',
'genome_designer.main.views.goterm_list_view'),
############################################################################
# Templates
url(r'^templates/sample_list_targets_template.tsv$',
'genome_designer.main.views.sample_list_upload_template'),
############################################################################
############################################################################
# Auth
############################################################################
# django-registration defaults (further delgates to django.contrib.auth.url)
(r'^accounts/', include('registration.backends.simple.urls')),
)
|
Python
| 0
|
63702a236d6c882b747cb19b566122a4a8ddfa3b
|
Change Indicator.add_menu arguments to allow passing CheckMenuItem status
|
jackselect/indicator.py
|
jackselect/indicator.py
|
"""A convenience class for a GTK 3 system tray indicator."""
from pkg_resources import resource_filename
import gi
gi.require_version('Gtk', '3.0') # noqa
from gi.repository import Gtk
from gi.repository.GdkPixbuf import Pixbuf
class Indicator:
"""This class defines a standard GTK3 system tray indicator.
Class Indicator can be easily reused in any other project.
"""
def __init__(self, icon, title=None):
"""Create indicator icon and add menu.
Args:
icon (str): path to initial icon that will be shown on system panel
"""
self._icon_cache = {}
self.icon = Gtk.StatusIcon.new_from_pixbuf(self._get_icon(icon))
self.menu = Gtk.Menu()
self.icon.connect('activate', self.on_popup_menu_open)
self.icon.connect('popup-menu', self.on_popup_menu_open)
if title:
self.icon.set_title(title)
def _get_icon(self, icon):
"""Return icon from package as GdkPixbuf.Pixbuf.
Extracts the image from package to a file, stores it in the icon cache
if it's not in there yet and returns it. Otherwise just returns the
image stored in the cache.
"""
if icon not in self._icon_cache:
filename = resource_filename(__name__, "images/%s" % icon)
self._icon_cache[icon] = Pixbuf.new_from_file(filename)
return self._icon_cache[icon]
def set_icon(self, icon):
"""Set new icon in system tray.
Args:
icon (str): path to file with new icon
"""
self.icon.set_from_pixbuf(self._get_icon(icon))
def set_tooltip(self, callback):
self.icon.set_has_tooltip(True)
self.icon.connect("query-tooltip", callback)
def clear_menu(self):
"""Clear all entries from the main menu."""
self.menu = Gtk.Menu()
def add_menu_item(self, command=None, title=None, icon=None, enabled=True, is_check=False,
active=False, menu=None, data=None):
"""Add mouse right click menu item.
Args:
command (callable): function that will be called after left mouse
click on title
title (str): label that will be shown in menu
icon (str): name of icon stored in application package
active (bool): whether the menu entry can be activated (default: True)
data (obj): arbitrary data to associate with the menu entry
"""
if icon:
m_item = Gtk.ImageMenuItem(title)
image = Gtk.Image.new_from_pixbuf(self._get_icon(icon))
m_item.set_image(image)
elif is_check:
m_item = Gtk.CheckMenuItem(title)
m_item.set_active(active)
else:
m_item = Gtk.MenuItem(title)
if command:
m_item.connect('toggled' if is_check else 'activate', command)
m_item.set_sensitive(enabled)
m_item.data = data
if menu:
menu.append(m_item)
else:
self.menu.append(m_item)
return m_item
def add_submenu(self, title):
"""Add a sub menu popup menu."""
submenu = Gtk.Menu()
m_item = Gtk.MenuItem(title)
m_item.set_submenu(submenu)
self.menu.append(m_item)
return submenu
def add_separator(self):
"""Add separator between labels in the popup menu."""
m_item = Gtk.SeparatorMenuItem()
self.menu.append(m_item)
def on_popup_menu_open(self, widget=None, button=None, *args):
"""Some action requested opening the popup menu."""
self.menu.popup(None, None, Gtk.StatusIcon.position_menu,
widget or self.icon, button or 1,
Gtk.get_current_event_time())
def on_popup_menu_close(self, widget=None, button=None, *args):
"""Some action requested closing the popup menu."""
self.menu.popdown()
|
"""A convenience class for a GTK 3 system tray indicator."""
from pkg_resources import resource_filename
import gi
gi.require_version('Gtk', '3.0') # noqa
from gi.repository import Gtk
from gi.repository.GdkPixbuf import Pixbuf
class Indicator:
"""This class defines a standard GTK3 system tray indicator.
Class Indicator can be easily reused in any other project.
"""
def __init__(self, icon, title=None):
"""Create indicator icon and add menu.
Args:
icon (str): path to initial icon that will be shown on system panel
"""
self._icon_cache = {}
self.icon = Gtk.StatusIcon.new_from_pixbuf(self._get_icon(icon))
self.menu = Gtk.Menu()
self.icon.connect('activate', self.on_popup_menu_open)
self.icon.connect('popup-menu', self.on_popup_menu_open)
if title:
self.icon.set_title(title)
def _get_icon(self, icon):
"""Return icon from package as GdkPixbuf.Pixbuf.
Extracts the image from package to a file, stores it in the icon cache
if it's not in there yet and returns it. Otherwise just returns the
image stored in the cache.
"""
if icon not in self._icon_cache:
filename = resource_filename(__name__, "images/%s" % icon)
self._icon_cache[icon] = Pixbuf.new_from_file(filename)
return self._icon_cache[icon]
def set_icon(self, icon):
"""Set new icon in system tray.
Args:
icon (str): path to file with new icon
"""
self.icon.set_from_pixbuf(self._get_icon(icon))
def set_tooltip(self, callback):
self.icon.set_has_tooltip(True)
self.icon.connect("query-tooltip", callback)
def clear_menu(self):
"""Clear all entries from the main menu."""
self.menu = Gtk.Menu()
def add_menu_item(self, command=None, title=None, icon=None, active=True, is_check=False,
menu=None, data=None):
"""Add mouse right click menu item.
Args:
command (callable): function that will be called after left mouse
click on title
title (str): label that will be shown in menu
icon (str): name of icon stored in application package
active (bool): whether the menu entry can be activated (default: True)
data (obj): arbitrary data to associate with the menu entry
"""
if icon:
m_item = Gtk.ImageMenuItem(title)
image = Gtk.Image.new_from_pixbuf(self._get_icon(icon))
m_item.set_image(image)
elif is_check:
m_item = Gtk.CheckMenuItem(title)
else:
m_item = Gtk.MenuItem(title)
if command:
m_item.connect('toggled' if is_check else 'activate', command)
m_item.set_sensitive(active)
m_item.data = data
if menu:
menu.append(m_item)
else:
self.menu.append(m_item)
return m_item
def add_submenu(self, title):
"""Add a sub menu popup menu."""
submenu = Gtk.Menu()
m_item = Gtk.MenuItem(title)
m_item.set_submenu(submenu)
self.menu.append(m_item)
return submenu
def add_separator(self):
"""Add separator between labels in the popup menu."""
m_item = Gtk.SeparatorMenuItem()
self.menu.append(m_item)
def on_popup_menu_open(self, widget=None, button=None, *args):
"""Some action requested opening the popup menu."""
self.menu.popup(None, None, Gtk.StatusIcon.position_menu,
widget or self.icon, button or 1,
Gtk.get_current_event_time())
def on_popup_menu_close(self, widget=None, button=None, *args):
"""Some action requested closing the popup menu."""
self.menu.popdown()
|
Python
| 0
|
50bd1ce1118ddb52a54f679fc9faee4bc3110458
|
Allow the --force command line argument to accept one or more stage names'
|
rubra/cmdline_args.py
|
rubra/cmdline_args.py
|
# Process the unix command line of the pipeline.
import argparse
from version import rubra_version
def get_cmdline_args():
return parser.parse_args()
parser = argparse.ArgumentParser(
description='A bioinformatics pipeline system.')
parser.add_argument(
'--pipeline',
metavar='PIPELINE_FILE',
type=str,
help='Your Ruffus pipeline stages (a Python module)')
parser.add_argument(
'--config',
metavar='CONFIG_FILE',
type=str,
nargs='+',
required=True,
help='One or more configuration files (Python modules)')
parser.add_argument(
'--verbose',
type=int,
choices=(0, 1, 2),
required=False,
default=1,
help='Output verbosity level: 0 = quiet; 1 = normal; \
2 = chatty (default is 1)')
parser.add_argument(
'--style',
type=str,
choices=('print', 'run', 'flowchart', 'touchfiles'),
required=False,
default='print',
help='Pipeline behaviour: print; run; touchfiles; flowchart (default is print)')
parser.add_argument(
'--force',
metavar='TASKNAME',
type=str,
required=False,
default=[],
nargs='+',
help='tasks which are forced to be out of date regardless of timestamps')
parser.add_argument(
'--end',
metavar='TASKNAME',
type=str,
required=False,
help='end points (tasks) for the pipeline')
parser.add_argument(
'--rebuild',
type=str,
choices=('fromstart', 'fromend'),
required=False,
default='fromstart',
help='rebuild outputs by working back from end tasks or forwards \
from start tasks (default is fromstart)')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + rubra_version)
|
# Process the unix command line of the pipeline.
import argparse
from version import rubra_version
def get_cmdline_args():
return parser.parse_args()
parser = argparse.ArgumentParser(
description='A bioinformatics pipeline system.')
parser.add_argument(
'--pipeline',
metavar='PIPELINE_FILE',
type=str,
help='Your Ruffus pipeline stages (a Python module)')
parser.add_argument(
'--config',
metavar='CONFIG_FILE',
type=str,
nargs='+',
required=True,
help='One or more configuration files (Python modules)')
parser.add_argument(
'--verbose',
type=int,
choices=(0, 1, 2),
required=False,
default=1,
help='Output verbosity level: 0 = quiet; 1 = normal; \
2 = chatty (default is 1)')
parser.add_argument(
'--style',
type=str,
choices=('print', 'run', 'flowchart', 'touchfiles'),
required=False,
default='print',
help='Pipeline behaviour: print; run; touchfiles; flowchart (default is print)')
parser.add_argument(
'--force',
metavar='TASKNAME',
type=str,
required=False,
default=[],
help='tasks which are forced to be out of date regardless of timestamps')
parser.add_argument(
'--end',
metavar='TASKNAME',
type=str,
required=False,
help='end points (tasks) for the pipeline')
parser.add_argument(
'--rebuild',
type=str,
choices=('fromstart', 'fromend'),
required=False,
default='fromstart',
help='rebuild outputs by working back from end tasks or forwards \
from start tasks (default is fromstart)')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + rubra_version)
|
Python
| 0
|
4009e01004ecd9b8f3d759842181b65a3893f73a
|
fix `TypeError: the JSON object must be str, bytes or bytearray, not NoneType`
|
simple_settings/dynamic_settings/base.py
|
simple_settings/dynamic_settings/base.py
|
# -*- coding: utf-8 -*-
import re
from copy import deepcopy
import jsonpickle
class BaseReader(object):
"""
Base class for dynamic readers
"""
_default_conf = {}
def __init__(self, conf):
self.conf = deepcopy(self._default_conf)
self.conf.update(conf)
self.key_pattern = self.conf.get('pattern')
self.auto_casting = self.conf.get('auto_casting')
self.key_prefix = self.conf.get('prefix')
def get(self, key):
if not self._is_valid_key(key):
return
result = self._get(self._qualified_key(key))
if self.auto_casting and (result is not None):
result = jsonpickle.decode(result)
return result
def set(self, key, value):
if not self._is_valid_key(key):
return
if self.auto_casting:
value = jsonpickle.encode(value)
self._set(self._qualified_key(key), value)
def _is_valid_key(self, key):
if not self.key_pattern:
return True
return bool(re.match(self.key_pattern, key))
def _qualified_key(self, key):
"""
Prepends the configured prefix to the key (if applicable).
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
"""
pfx = self.key_prefix if self.key_prefix is not None else ''
return '{}{}'.format(pfx, key)
|
# -*- coding: utf-8 -*-
import re
from copy import deepcopy
import jsonpickle
class BaseReader(object):
"""
Base class for dynamic readers
"""
_default_conf = {}
def __init__(self, conf):
self.conf = deepcopy(self._default_conf)
self.conf.update(conf)
self.key_pattern = self.conf.get('pattern')
self.auto_casting = self.conf.get('auto_casting')
self.key_prefix = self.conf.get('prefix')
def get(self, key):
if not self._is_valid_key(key):
return
result = self._get(self._qualified_key(key))
if self.auto_casting:
result = jsonpickle.decode(result)
return result
def set(self, key, value):
if not self._is_valid_key(key):
return
if self.auto_casting:
value = jsonpickle.encode(value)
self._set(self._qualified_key(key), value)
def _is_valid_key(self, key):
if not self.key_pattern:
return True
return bool(re.match(self.key_pattern, key))
def _qualified_key(self, key):
"""
Prepends the configured prefix to the key (if applicable).
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
"""
pfx = self.key_prefix if self.key_prefix is not None else ''
return '{}{}'.format(pfx, key)
|
Python
| 0.000001
|
a6cb3bfeb5f7201a0e702024257df1f874a3bb70
|
Bump version 15.
|
terroroftinytown/client/__init__.py
|
terroroftinytown/client/__init__.py
|
VERSION = 15 # Please update this whenever .client or .services changes
# Please update MIN_VERSION_OVERRIDE and MIN_CLIENT_VERSION_OVERRIDE as needed
|
VERSION = 14 # Please update this whenever .client or .services changes
# Please update MIN_VERSION_OVERRIDE and MIN_CLIENT_VERSION_OVERRIDE as needed
|
Python
| 0
|
3f70ead379b7f586313d01d5ab617fd5368f8ce3
|
Print traceback if startup fails
|
cthulhubot/management/commands/restart_masters.py
|
cthulhubot/management/commands/restart_masters.py
|
from traceback import print_exc
from django.core.management.base import BaseCommand
from cthulhubot.models import Buildmaster
class Command(BaseCommand):
help = 'Restart all Buildmaster processes'
args = ""
def handle(self, *fixture_labels, **options):
verbosity = int(options.get('verbosity', 1))
commit = int(options.get('commit', 1))
if verbosity > 1:
print 'Restarting buildmasters...'
for b in Buildmaster.objects.all():
if verbosity > 1:
print 'Handling buildmaster %s for project %s' % (str(b.id), str(b.project.name))
try:
b.stop()
except:
print 'Failed to stop master'
print_exc()
try:
b.start()
except:
print 'Failed to start master'
print_exc()
|
from django.core.management.base import BaseCommand
from cthulhubot.models import Buildmaster
class Command(BaseCommand):
help = 'Restart all Buildmaster processes'
args = ""
def handle(self, *fixture_labels, **options):
verbosity = int(options.get('verbosity', 1))
commit = int(options.get('commit', 1))
if verbosity > 1:
print 'Restarting buildmasters...'
for b in Buildmaster.objects.all():
if verbosity > 1:
print 'Handling buildmaster %s for project %s' % (str(b.id), str(b.project.name))
try:
b.stop()
except:
print 'Failed to stop master'
try:
b.start()
except:
print 'Failed to start master'
|
Python
| 0.000008
|
8095347cc2b4b1d10ae9e37223d98c2dd44b7164
|
fix reversing after commit
|
nm_payment/drivers/bbs/payment_session.py
|
nm_payment/drivers/bbs/payment_session.py
|
import concurrent.futures
from threading import Lock
import logging
log = logging.getLogger('nm_payment')
from nm_payment.base import PaymentSession
from nm_payment.exceptions import (
SessionCompletedError, SessionCancelledError, CancelFailedError,
)
from .session import BBSSession
RUNNING = 'RUNNING'
CANCELLING = 'CANCELLING'
REVERSING = 'REVERSING'
FINISHED = 'FINISHED'
BROKEN = 'BROKEN'
class BBSPaymentSession(BBSSession, PaymentSession):
def __init__(
self, connection, amount, *, before_commit=None,
on_print=None, on_display=None):
super(BBSPaymentSession, self).__init__(connection)
self._future = concurrent.futures.Future()
self._lock = Lock()
self._state = RUNNING
self._commit_callback = before_commit
self._print_callback = on_print
self._display_callback = on_display
self._connection.request_transfer_amount(amount).result()
def _start_reversal(self):
try:
self._state = REVERSING
self._connection.request_reversal().result()
except Exception as e:
# XXX This is really really bad
raise CancelFailedError() from e
def _on_local_mode_running(self, result, **kwargs):
if result == 'success':
commit = True
self._state == CANCELLING
if self._commit_callback is not None:
# TODO can't decide on commit callback api
try:
commit = self._commit_callback(result)
except Exception:
commit = False
if commit:
self._state = FINISHED
self._future.set_result(None)
else:
self._start_reversal()
else:
# TODO interpret errors from ITU
self._state = FINISHED
self._future.set_exception(SessionCancelledError("itu error"))
def _on_local_mode_cancelling(self, result, **kwargs):
if result == 'success':
self._start_reversal()
else:
self._state = FINISHED
self._future.set_exception(SessionCancelledError())
def _on_local_mode_reversing(self, result, **kwargs):
if result == 'success':
self._state = FINISHED
self._future.set_exception(SessionCancelledError())
else:
# XXX
self._state = BROKEN
def on_req_local_mode(self, *args, **kwargs):
"""
.. note:: Internal use only
"""
with self._lock:
if self._state == RUNNING:
return self._on_local_mode_running(*args, **kwargs)
elif self._state == CANCELLING:
return self._on_local_mode_cancelling(*args, **kwargs)
elif self._state == REVERSING:
return self._on_local_mode_reversing(*args, **kwargs)
else:
raise Exception("invalid state")
def on_display_text(self, text):
if self._display_callback is not None:
self._display_callback(text)
def on_print_text(self, commands):
if self._print_callback is not None:
self._print_callback(commands)
def on_reset_timer(self, timeout):
pass
def cancel(self):
"""
:raises SessionCompletedError:
If session has already finished
"""
with self._lock:
if self._state == RUNNING:
self._state = CANCELLING
# non-blocking, don't wait for result
self._connection.request_cancel()
# block until session finishes
try:
self.result()
except SessionCancelledError:
# this is what we want
return
else:
raise CancelFailedError()
def result(self, timeout=None):
try:
return self._future.result(timeout=timeout)
except concurrent.futures.CancelledError as e:
raise SessionCancelledError() from e
def add_done_callback(self, fn):
return self._future.add_done_callback(fn)
def unbind(self):
try:
self.cancel()
except SessionCompletedError:
pass
|
import concurrent.futures
from threading import Lock
import logging
log = logging.getLogger('nm_payment')
from nm_payment.base import PaymentSession
from nm_payment.exceptions import (
SessionCompletedError, SessionCancelledError, CancelFailedError,
)
from .session import BBSSession
RUNNING = 'RUNNING'
CANCELLING = 'CANCELLING'
REVERSING = 'REVERSING'
FINISHED = 'FINISHED'
BROKEN = 'BROKEN'
class BBSPaymentSession(BBSSession, PaymentSession):
def __init__(
self, connection, amount, *, before_commit=None,
on_print=None, on_display=None):
super(BBSPaymentSession, self).__init__(connection)
self._future = concurrent.futures.Future()
self._lock = Lock()
self._state = RUNNING
self._commit_callback = before_commit
self._print_callback = on_print
self._display_callback = on_display
self._connection.request_transfer_amount(amount).result()
def _start_reversal(self):
try:
self._state = REVERSING
self._connection.request_reversal().result()
except Exception as e:
# XXX This is really really bad
raise CancelFailedError() from e
def _on_local_mode_running(self, result, **kwargs):
if result == 'success':
reverse = self._state == CANCELLING
if self._commit_callback is not None:
# TODO can't decide on commit callback api
try:
reverse = not self._commit_callback(result)
except Exception:
reverse = True
if reverse:
self._start_reversal()
else:
self._state = FINISHED
self._future.set_result(None)
else:
# TODO interpret errors from ITU
self._state = FINISHED
self._future.set_exception(SessionCancelledError("itu error"))
def _on_local_mode_cancelling(self, result, **kwargs):
if result == 'success':
self._start_reversal()
else:
self._state = FINISHED
self._future.set_exception(SessionCancelledError())
def _on_local_mode_reversing(self, result, **kwargs):
if result == 'success':
self._state = FINISHED
self._future.set_exception(SessionCancelledError())
else:
# XXX
self._state = BROKEN
def on_req_local_mode(self, *args, **kwargs):
"""
.. note:: Internal use only
"""
with self._lock:
if self._state == RUNNING:
return self._on_local_mode_running(*args, **kwargs)
elif self._state == CANCELLING:
return self._on_local_mode_cancelling(*args, **kwargs)
elif self._state == REVERSING:
return self._on_local_mode_reversing(*args, **kwargs)
else:
raise Exception("invalid state")
def on_display_text(self, text):
if self._display_callback is not None:
self._display_callback(text)
def on_print_text(self, commands):
if self._print_callback is not None:
self._print_callback(commands)
def on_reset_timer(self, timeout):
pass
def cancel(self):
"""
:raises SessionCompletedError:
If session has already finished
"""
with self._lock:
if self._state == RUNNING:
self._state = CANCELLING
# non-blocking, don't wait for result
self._connection.request_cancel()
# block until session finishes
try:
self.result()
except SessionCancelledError:
# this is what we want
return
else:
raise CancelFailedError()
def result(self, timeout=None):
try:
return self._future.result(timeout=timeout)
except concurrent.futures.CancelledError as e:
raise SessionCancelledError() from e
def add_done_callback(self, fn):
return self._future.add_done_callback(fn)
def unbind(self):
try:
self.cancel()
except SessionCompletedError:
pass
|
Python
| 0.000002
|
4541b5edc808d77f53305eafca418d3be6715e8d
|
Cut 0.17.3
|
invocations/_version.py
|
invocations/_version.py
|
__version_info__ = (0, 17, 3)
__version__ = '.'.join(map(str, __version_info__))
|
__version_info__ = (0, 17, 2)
__version__ = '.'.join(map(str, __version_info__))
|
Python
| 0.000001
|
8d70bad3968cb11c929beafcef44b023822b886f
|
make interval adjustable in poll_request, and also remove check_response call duplication
|
stacktester/common/http.py
|
stacktester/common/http.py
|
from stacktester import exceptions
import httplib2
import os
import time
class Client(object):
USER_AGENT = 'python-nova_test_client'
def __init__(self, host='localhost', port=80, base_url=''):
#TODO: join these more robustly
self.base_url = "http://%s:%s/%s" % (host, port, base_url)
def poll_request(self, method, url, check_response, **kwargs):
timeout = kwargs.pop('timeout', 180)
interval = kwargs.pop('interval', 2)
# Start timestamp
start_ts = int(time.time())
while True:
resp, body = self.request(method, url, **kwargs)
if (check_response(resp, body)):
break
if (int(time.time()) - start_ts >= (timeout * 1000)):
raise exceptions.TimeoutException
time.sleep(interval)
def request(self, method, url, **kwargs):
self.http_obj = httplib2.Http()
params = {}
params['headers'] = {'User-Agent': self.USER_AGENT}
params['headers'].update(kwargs.get('headers', {}))
if 'Content-Type' not in params.get('headers',{}):
params['headers']['Content-Type'] = 'application/json'
if 'body' in kwargs:
params['body'] = kwargs.get('body')
req_url = "%s/%s" % (self.base_url, url)
resp, body = self.http_obj.request(req_url, method, **params)
return resp, body
|
from stacktester import exceptions
import httplib2
import os
import time
class Client(object):
USER_AGENT = 'python-nova_test_client'
def __init__(self, host='localhost', port=80, base_url=''):
#TODO: join these more robustly
self.base_url = "http://%s:%s/%s" % (host, port, base_url)
def poll_request(self, method, url, check_response, **kwargs):
timeout = kwargs.pop('timeout', 180)
# Start timestamp
start_ts = int(time.time())
resp, body = self.request(method, url, **kwargs)
while (not check_response(resp, body)):
if (int(time.time()) - start_ts >= (timeout * 1000)):
raise exceptions.TimeoutException
time.sleep(2)
resp, body = self.request(method, url, **kwargs)
def request(self, method, url, **kwargs):
self.http_obj = httplib2.Http()
params = {}
params['headers'] = {'User-Agent': self.USER_AGENT}
params['headers'].update(kwargs.get('headers', {}))
if 'Content-Type' not in kwargs.get('headers',{}):
params['headers']['Content-Type'] = 'application/json'
if 'body' in kwargs:
params['body'] = kwargs.get('body')
req_url = "%s/%s" % (self.base_url, url)
resp, body = self.http_obj.request(req_url, method, **params)
return resp, body
|
Python
| 0
|
a8fe56cd60296607f879dea86432532a5b40824a
|
Add a main method
|
dame/__init__.py
|
dame/__init__.py
|
from .dame import *
def main():
dame.main()
|
Python
| 0.998985
|
|
c005ce217f77aa185ad8916475463f2040a3dc67
|
clean up yaml generator.
|
iridium/core/trapper.py
|
iridium/core/trapper.py
|
from functools import wraps
from .logger import glob_logger
from iridium.config import config
from .exceptions import FunctionException
import yaml
from inspect import signature
def tracer(func):
"""
tracer will decorate a given function which allow users to step through
a function call on error.
:param func: Function which is to be wrapped.
:return: decorated function.
"""
import pdb
@wraps(func)
def wrapper(*args, **kwargs):
glob_logger.information("calling: {0} with these args: {1}".format(func.__name__, str(signature(func))))
try:
return func(*args, **kwargs)
except FunctionException as fne:
print('We catch a function: {0:s} with a value of: {1:s} doing something bad'.format(fne.func_name,
fne.value))
pdb.set_trace()
return wrapper
def trap(func):
"""
trap will return the name of the function and its arguments
as well as its return values.
:param func: function for which to decorate.
"""
@wraps(func)
def wrapper(*args, **kwargs):
collector(func.__name__, str(signature(func)), *args, **kwargs)
return func(*args, **kwargs)
return wrapper
def collector(fn_name, fn_args, *fn_args_val, **fn_kwargs_val):
"""
collector will format the return information from the
decorator 'trap' and place it into a simple yaml file.
:param fn_name:
:param fn_args:
:param fn_kwargs:
:return file creation status and new file.
"""
fh = open(config.iridium_function_calls['function_log'], mode='a')
fname = fh.name
if fh.mode != 'a':
raise "Please make sure %s is writable." % fname
fn_output = yaml.dump({'Function Attributes': {'Function Name': fn_name,
'Function Arguments': fn_args,
'Function Argument Values': str(fn_args_val),
'Function Keyword values': str(fn_kwargs_val)}},
indent=4, default_flow_style=False, explicit_start=True)
status = fh.write(fn_output)
if status > 0:
ret_val = "Data written to %s" % fname
fh.close()
else:
ret_val = "Please check %s data was not saved." % fname
fh.close()
return ret_val
|
from functools import wraps
from .logger import glob_logger
from iridium.config import config
from .exceptions import FunctionException
import yaml
from inspect import signature
def tracer(func):
"""
tracer will decorate a given function which allow users to step through
a function call on error.
:param func: Function which is to be wrapped.
:return: decorated function.
"""
import pdb
@wraps(func)
def wrapper(*args, **kwargs):
glob_logger.information("calling: {0} with these args: {1}".format(func.__name__, str(signature(func))))
try:
return func(*args, **kwargs)
except FunctionException as fne:
print('We catch a function: {0:s} with a value of: {1:s} doing something bad'.format(fne.func_name,
fne.value))
pdb.set_trace()
return wrapper
def trap(func):
"""
trap will return the name of the function and its arguments
as well as its return values.
:param func: function for which to decorate.
"""
@wraps(func)
def wrapper(*args, **kwargs):
collector(func.__name__, str(signature(func)), *args, **kwargs)
return func(*args, **kwargs)
return wrapper
def collector(fn_name, fn_args, *fn_args_val, **fn_kwargs_val):
"""
collector will format the return information from the
decorator 'trap' and place it into a simple yaml file.
:param fn_name:
:param fn_args:
:param fn_kwargs:
:return file creation status and new file.
"""
fh = open(config.iridium_function_calls['function_log'], mode='a')
fname = fh.name
if fh.mode != 'a':
raise "Please make sure %s is writable." % fname
fn_output = yaml.dump({'Function Attributes': {'Function Name': fn_name,
'Function Arguments': fn_args,
'Function Argument Values': str(fn_args_val),
'Function Keyword values': str(fn_kwargs_val)}},
indent=4, default_flow_style=False, explicit_start=True)
status = fh.write(fn_output)
if status > 0:
ret_val = "Data written to %s" % fname
fh.close()
else:
ret_val = "Please check %s data was not saved." % fname
fh.close()
return ret_val
|
Python
| 0
|
e6d0e3c2b01a28a3235d1de292f99328c77e6584
|
print usage
|
dnsquery.py
|
dnsquery.py
|
import sys
import csv
import time
import logging
from common import threadpool
import dns.resolver
import dns.message
import dns.rdataclass
import dns.rdatatype
import dns.query
import dns.exception
class DNSQueryTask(threadpool.Task):
def do(self):
qname, qtype, qcount, bdnsip = "", "", 0, ""
for arg in self.kargs:
if arg == "qtype":
qtype = self.kargs[arg]
elif arg == "qname":
qname = self.kargs[arg]
elif arg == "qcount":
qcount = int(self.kargs[arg])
elif arg == "bdnsip":
bdnsip = self.kargs[arg]
if (qname == "") or (qtype == "") or (qcount == 0) or (bdnsip == ""):
logging.error("Incorrect task!")
return False
resolver = dns.resolver.Resolver(configure=False)
resolver.nameservers = [bdnsip]
for i in range(qcount):
try:
time_start = time.perf_counter()
answer = resolver.query(qname, qtype)
time_performance = time.perf_counter() - time_start
for rr in answer:
if time_performance > 0:
logging.info("%02d %s %s %15s - performace = %3.3f sec", i, qname, qtype, rr, time_performance)
time_performance = 0
else:
logging.info(" %s %s %15s", qname, qtype, rr)
except dns.exception.DNSException:
time_performance = time.perf_counter() - time_start
logging.warning("Exception - performance = %3.3f sec", time_performance)
except Exception as ex:
print(ex)
return True
QUERY_FILE = "QueryFile"
DNS_IP = "DNSIP"
ARGUMENT_LIST = [
[QUERY_FILE, "-f", "<query_list_file>"],
[DNS_IP, "-s", "<DNS server IP>"]
]
def PrintUsage():
print("\npython dnsquery.py\n")
for argItem in ARGUMENT_LIST:
print(" ", argItem[1], argItem[2])
def GetArguments(argv):
arguments = dict()
idx, argc = 0, len(argv)
while idx < argc:
for argItem in ARGUMENT_LIST:
if (argv[idx] == argItem[1]) and (idx < argc - 1):
idx = idx + 1
arguments[argItem[0]] = argv[idx]
idx = idx + 1
if (QUERY_FILE not in arguments) or (DNS_IP not in arguments):
PrintUsage()
exit(0)
else:
return arguments
if __name__ == '__main__':
arguments = GetArguments(sys.argv)
logging.basicConfig(level=logging.DEBUG, format="%(asctime)s-%(thread)06d-%(levelname)s: %(message)s", datefmt="%Y%m%d-%H%M%S")
logging.info("dnsquery started...")
csvfile = open(arguments[QUERY_FILE])
reader = csv.reader(csvfile)
thdpool = threadpool.ThreadPool(20, 40)
thdpool.start_pool()
try:
for row in reader:
qtask = DNSQueryTask(qtype = row[0], qname = row[1], qcount = int(row[2]), bdnsip = arguments[DNS_IP])
thdpool.add_task(qtask)
except csv.Error as ex:
print(ex.args)
thdpool.wait_completion()
thdpool.stop_pool()
logging.info("dnsquery complete...")
|
import sys
import csv
import time
import logging
from common import threadpool
import dns.resolver
import dns.message
import dns.rdataclass
import dns.rdatatype
import dns.query
import dns.exception
class DNSQueryTask(threadpool.Task):
def do(self):
qname, qtype, qcount, bdnsip = "", "", 0, ""
for arg in self.kargs:
if arg == "qtype":
qtype = self.kargs[arg]
elif arg == "qname":
qname = self.kargs[arg]
elif arg == "qcount":
qcount = int(self.kargs[arg])
elif arg == "bdnsip":
bdnsip = self.kargs[arg]
if (qname == "") or (qtype == "") or (qcount == 0) or (bdnsip == ""):
logging.error("Incorrect task!")
return False
resolver = dns.resolver.Resolver(configure=False)
resolver.nameservers = [bdnsip]
for i in range(qcount):
try:
time_start = time.perf_counter()
answer = resolver.query(qname, qtype)
time_performance = time.perf_counter() - time_start
for rr in answer:
if time_performance > 0:
logging.info("%02d %s %s %15s - performace = %3.3f sec", i, qname, qtype, rr, time_performance)
time_performance = 0
else:
logging.info(" %s %s %15s", qname, qtype, rr)
except dns.exception.DNSException:
time_performance = time.perf_counter() - time_start
logging.warning("Exception - performance = %3.3f sec", time_performance)
except Exception as ex:
print(ex)
return True
QUERY_FILE = "QFile"
DNS_IP = "DNSIP"
ARGUMENT_LIST = [
[QUERY_FILE, "-f", "<query_list_file>"],
[DNS_IP, "-s", "<DNS server IP>"]
]
def PrintUsage():
print("python dnsquery.py -f <query_list.csv> -s <backend_dns_ip>")
def GetArguments(argv):
arguments = dict()
idx, argc = 0, len(argv)
while idx < argc:
for argItem in ARGUMENT_LIST:
if (argv[idx] == argItem[1]) and (idx < argc - 1):
idx = idx + 1
arguments[argItem[0]] = argv[idx]
idx = idx + 1
if (arguments[QUERY_FILE] == ""):
PrintUsage()
exit(0)
else:
return arguments
if __name__ == '__main__':
arguments = GetArguments(sys.argv)
logging.basicConfig(level=logging.DEBUG, format="%(asctime)s-%(thread)06d-%(levelname)s: %(message)s", datefmt="%Y%m%d-%H%M%S")
logging.info("dnsquery started...")
csvfile = open(arguments[QUERY_FILE])
reader = csv.reader(csvfile)
thdpool = threadpool.ThreadPool(20, 40)
thdpool.start_pool()
try:
for row in reader:
qtask = DNSQueryTask(qtype = row[0], qname = row[1], qcount = int(row[2]), bdnsip = arguments[DNS_IP])
thdpool.add_task(qtask)
except csv.Error as ex:
print(ex.args)
thdpool.wait_completion()
thdpool.stop_pool()
logging.info("dnsquery complete...")
|
Python
| 0.000003
|
083499cc0bb2ad443bbebb45d0e75bd0bc2df8b7
|
allow ssh key of any size
|
fig_leaf.py
|
fig_leaf.py
|
"""
Fig Leaf: Encrypt and decrypt data with ssh keys!
2017 maryx
Usage:
1. Run `pip install pycrypto`
2. To encrypt, run `python fig_leaf.py <path to file location> <path to output location> <path to public key>`
3. To decrypt, run `python fig_leaf.py <path to encrypted file location> <path to output location> <path to private key> --decrypt`
"""
import pickle
import argparse
from Crypto.Cipher import AES, PKCS1_OAEP
from Crypto import Random
from Crypto.PublicKey import RSA
def encrypt(data, public_key):
"""
Returns RSA-encrypted symmetric key concatenated with symmetrically-encrypted data.
"""
# Symmetrically encrypt data
initialization_vector = Random.new().read(AES.block_size)
symmetric_key = Random.get_random_bytes(AES.key_size[2])
cipher = AES.new(symmetric_key, AES.MODE_CFB, initialization_vector)
encrypted_data = initialization_vector + cipher.encrypt(data)
# RSA-encrypt symmetric key
public_key = RSA.importKey(public_key)
rsa_cipher = PKCS1_OAEP.new(public_key)
encrypted_symmetric_key = rsa_cipher.encrypt(symmetric_key)
return [encrypted_symmetric_key, encrypted_data]
def decrypt(encrypted_data, private_key):
"""
Given RSA-encrypted symmetric key and symmetrically-encrypted data, returns original data.
"""
encrypted_symmetric_key = encrypted_data[0]
symmetrically_encrypted_data = encrypted_data[1]
# Decrypt RSA-encrypted symmetric key
private_key = RSA.importKey(private_key)
rsa_cipher = PKCS1_OAEP.new(private_key)
symmetric_key = rsa_cipher.decrypt(encrypted_symmetric_key)
# Decrypt symmetrically-encrypted data
initialization_vector = Random.new().read(AES.block_size)
aes_cipher = AES.new(symmetric_key, AES.MODE_CFB, initialization_vector)
decrypted_data = aes_cipher.decrypt(symmetrically_encrypted_data)
decrypted_data = decrypted_data[16:] # first 16 are extraneous
return decrypted_data
def command_line_arg_parser():
"""
Command line argument parser. Encrypts by default. Decrypts when --decrypt flag is passed in.
"""
parser = argparse.ArgumentParser(description='Parses input args')
parser.add_argument('input_file', type=str,
help='Path to input file location')
parser.add_argument('output_file', type=str, default='./output_data',
help='Path to output file location')
parser.add_argument('key_file', type=str,
help='Path to public or private key file')
parser.add_argument('--decrypt', dest='decrypt', action='store_true',
help='Private key file (for decryption)')
return parser
def main():
parser = command_line_arg_parser()
args = parser.parse_args()
input_file_location = args.input_file
output_file_location = args.output_file
with open(args.key_file, 'rb') as f:
key = f.read()
# decrypting
if args.decrypt:
with open(input_file_location, 'rb') as f:
encrypted_data = pickle.load(f)
decrypted_data = decrypt(encrypted_data, key)
with open(output_file_location, 'wb') as f:
f.write(decrypted_data)
print('Decrypted data to %s' % output_file_location)
# encrypting
else:
with open(input_file_location, 'rb') as f:
data = f.read()
encrypted_data = encrypt(data, key)
with open(output_file_location, 'wb') as f:
pickle.dump(encrypted_data, f)
print('Encrypted data to %s' % output_file_location)
if __name__ == '__main__':
main()
|
"""
Fig Leaf: Encrypt and decrypt data with ssh keys!
2017 maryx
Usage:
1. Run `pip install pycrypto`
2. To encrypt, run `python fig_leaf.py <path to file location> <path to output location> <path to public key>`
3. To decrypt, run `python fig_leaf.py <path to encrypted file location> <path to output location> <path to private key> --decrypt`
"""
import argparse
from Crypto.Cipher import AES, PKCS1_OAEP
from Crypto import Random
from Crypto.PublicKey import RSA
def encrypt(data, public_key):
"""
Returns RSA-encrypted symmetric key concatenated with symmetrically-encrypted data.
"""
# Symmetrically encrypt data
initialization_vector = Random.new().read(AES.block_size)
symmetric_key = Random.get_random_bytes(AES.key_size[2])
cipher = AES.new(symmetric_key, AES.MODE_CFB, initialization_vector)
encrypted_data = initialization_vector + cipher.encrypt(data)
# RSA-encrypt symmetric key
public_key = RSA.importKey(public_key)
rsa_cipher = PKCS1_OAEP.new(public_key)
encrypted_symmetric_key = rsa_cipher.encrypt(symmetric_key)
return encrypted_symmetric_key + encrypted_data
def decrypt(encrypted_data, private_key):
"""
Given RSA-encrypted symmetric key and symmetrically-encrypted data, returns original data.
"""
encrypted_symmetric_key = encrypted_data[0:512]
symmetrically_encrypted_data = encrypted_data[512:]
# Decrypt RSA-encrypted symmetric key
private_key = RSA.importKey(private_key)
rsa_cipher = PKCS1_OAEP.new(private_key)
symmetric_key = rsa_cipher.decrypt(encrypted_symmetric_key)
# Decrypt symmetrically-encrypted data
initialization_vector = Random.new().read(AES.block_size)
aes_cipher = AES.new(symmetric_key, AES.MODE_CFB, initialization_vector)
decrypted_data = aes_cipher.decrypt(symmetrically_encrypted_data)
decrypted_data = decrypted_data[16:] # first 16 are extraneous
return decrypted_data
def command_line_arg_parser():
"""
Command line argument parser. Encrypts by default. Decrypts when --decrypt flag is passed in.
"""
parser = argparse.ArgumentParser(description='Parses input args')
parser.add_argument('input_file', type=str,
help='Path to input file location')
parser.add_argument('output_file', type=str, default='./output_data',
help='Path to output file location')
parser.add_argument('key_file', type=str,
help='Path to public or private key file')
parser.add_argument('--decrypt', dest='decrypt', action='store_true',
help='Private key file (for decryption)')
return parser
def main():
parser = command_line_arg_parser()
args = parser.parse_args()
input_file_location = args.input_file
output_file_location = args.output_file
with open(args.key_file, 'rb') as f:
key = f.read()
# decrypting
if args.decrypt:
with open(input_file_location, 'rb') as f:
encrypted_data = f.read()
decrypted_data = decrypt(encrypted_data, key)
with open(output_file_location, 'wb') as f:
f.write(decrypted_data)
print('Decrypted data to %s' % output_file_location)
# encrypting
else:
with open(input_file_location, 'rb') as f:
data = f.read()
encrypted_data = encrypt(data, key)
with open(output_file_location, 'wb') as f:
f.write(encrypted_data)
print('Encrypted data to %s' % output_file_location)
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
1cbe7b335405e6294fcbca792914932f7226ac9b
|
Fix entities API
|
openfisca_web_api/controllers/entities.py
|
openfisca_web_api/controllers/entities.py
|
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Entities controller"""
import collections
from .. import contexts, conv, model, wsgihelpers
@wsgihelpers.wsgify
def api1_entities(req):
def build_entity_data(entity_class):
entity_data = {
'isPersonsEntity': entity_class.is_persons_entity,
'label': entity_class.label,
'nameKey': entity_class.name_key,
}
if hasattr(entity_class, 'roles_key'):
entity_data.update({
'maxCardinalityByRoleKey': entity_class.max_cardinality_by_role_key,
'roles': entity_class.roles_key,
'labelByRoleKey': entity_class.label_by_role_key,
})
return entity_data
ctx = contexts.Ctx(req)
headers = wsgihelpers.handle_cross_origin_resource_sharing(ctx)
assert req.method == 'GET', req.method
params = req.GET
inputs = dict(
context = params.get('context'),
)
data, errors = conv.pipe(
conv.struct(
dict(
context = conv.noop, # For asynchronous calls
),
default = 'drop',
),
)(inputs, state = ctx)
if errors is not None:
return wsgihelpers.respond_json(ctx,
collections.OrderedDict(sorted(dict(
apiVersion = '1.0',
context = inputs.get('context'),
error = collections.OrderedDict(sorted(dict(
code = 400, # Bad Request
errors = [conv.jsonify_value(errors)],
message = ctx._(u'Bad parameters in request'),
).iteritems())),
method = req.script_name,
params = inputs,
url = req.url.decode('utf-8'),
).iteritems())),
headers = headers,
)
entities_class = model.tax_benefit_system.entity_class_by_key_plural.itervalues()
entities = collections.OrderedDict(sorted({
entity_class.key_plural: build_entity_data(entity_class)
for entity_class in entities_class
}.iteritems()))
return wsgihelpers.respond_json(ctx,
collections.OrderedDict(sorted(dict(
apiVersion = '1.0',
context = data['context'],
entities = entities,
method = req.script_name,
params = inputs,
).iteritems())),
headers = headers,
)
|
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Entities controller"""
import collections
from .. import contexts, model, wsgihelpers
@wsgihelpers.wsgify
def api1_entities(req):
def build_entity_data(entity_class):
entity_data = {
'isPersonsEntity': entity_class.is_persons_entity,
'label': entity_class.label,
'nameKey': entity_class.name_key,
}
if hasattr(entity_class, 'roles_key'):
entity_data.update({
'maxCardinalityByRoleKey': entity_class.max_cardinality_by_role_key,
'roles': entity_class.roles_key,
'labelByRoleKey': entity_class.label_by_role_key,
})
return entity_data
ctx = contexts.Ctx(req)
headers = wsgihelpers.handle_cross_origin_resource_sharing(ctx)
assert req.method == 'GET', req.method
entities_class = model.tax_benefit_system.entity_class_by_key_plural.itervalues()
data = collections.OrderedDict(sorted({
entity_class.key_plural: build_entity_data(entity_class)
for entity_class in entities_class
}.iteritems()))
return wsgihelpers.respond_json(ctx, data, headers = headers)
|
Python
| 0.000035
|
f5e65b648d632f2e75dffe7943ed3e7105b21d7f
|
Remove GCS patch fixed upstream in te upstream library
|
core/polyaxon/fs/gcs.py
|
core/polyaxon/fs/gcs.py
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gcsfs import GCSFileSystem as BaseGCSFileSystem
from polyaxon.connections.gcp.base import get_gc_credentials, get_project_id
class GCSFileSystem(BaseGCSFileSystem):
retries = 3
def get_fs(
context_path: str = None,
asynchronous: bool = False,
use_listings_cache: bool = False,
**kwargs
):
return GCSFileSystem(
project=get_project_id(context_path=context_path, **kwargs),
token=get_gc_credentials(context_path=context_path, **kwargs),
asynchronous=asynchronous,
use_listings_cache=use_listings_cache,
)
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import aiofiles
from gcsfs import GCSFileSystem as BaseGCSFileSystem
from gcsfs.checkers import get_consistency_checker
from gcsfs.retry import retry_request, validate_response
from polyaxon.connections.gcp.base import get_gc_credentials, get_project_id
class GCSFileSystem(BaseGCSFileSystem):
retries = 3
@retry_request(retries=retries)
async def _get_file_request(self, rpath, lpath, *args, headers=None, **kwargs):
consistency = kwargs.pop("consistency", self.consistency)
async with self.session.get(
url=rpath,
params=self._get_params(kwargs),
headers=self._get_headers(headers),
timeout=self.requests_timeout,
) as r:
r.raise_for_status()
checker = get_consistency_checker(consistency)
os.makedirs(os.path.dirname(lpath), exist_ok=True)
async with aiofiles.open(lpath, "wb") as f2:
while True:
data = await r.content.read(4096 * 32)
if not data:
break
await f2.write(data)
checker.update(data)
# validate http request
validate_response(r.status, data, rpath)
checker.validate_http_response(r) # validate file consistency
return r.status, r.headers, r.request_info, data
async def _get_file(self, rpath, lpath, callback=None, **kwargs):
# TODO: Remove when https://github.com/dask/gcsfs/issues/433 is fixed
if await self._isdir(rpath):
return
await super()._get_file(rpath, lpath, callback=callback, **kwargs)
def get_fs(
context_path: str = None,
asynchronous: bool = False,
use_listings_cache: bool = False,
**kwargs
):
return GCSFileSystem(
project=get_project_id(context_path=context_path, **kwargs),
token=get_gc_credentials(context_path=context_path, **kwargs),
asynchronous=asynchronous,
use_listings_cache=use_listings_cache,
)
|
Python
| 0
|
479b414fd3d93bcf6dfabe61494c8e958ea60f08
|
Rewrite of basic methods for food sort class.
|
ucrfood/food_sort.py
|
ucrfood/food_sort.py
|
from urllib.parse import urlparse, parse_qs, quote
from typing import TypeVar, Generic
from datetime import datetime
from requests import get
from hashlib import md5
class FoodSort:
url_types = TypeVar('url_types', str, list)
def __init__(self, urls: Generic[url_types], check_data: bool):
self.__menu_objects = []
self.__serialized_menus = []
if isinstance(urls, str):
self.__urls = [{'url': urls, 'content': None}]
elif isinstance(urls, list):
self.__urls = [{'url': i, 'content': None} for i in urls]
else:
raise TypeError('Url is not an instance or list or str.')
@staticmethod
def __pull_page(url: str) -> str:
"""Gets the page from the given url and returns the page content.
:param url: url to get page content from.
:return: page content.
"""
return get(url).content
@staticmethod
def __get_page_sum(page_content: str) -> str:
"""Given a string containing the relevant page content, return the md5sum of said page.
This is helpful for not parsing the page again when a copy exists in the database.
:param page_content: string representing the page content.
:return: md5sum of page_content.
"""
m = md5()
# Update the md5 parser with the content of the page and return the hex digest.
m.update(page_content)
return m.hexdigest()
@staticmethod
def __get_parameters(url: str, parameter: str, index: int) -> str:
"""Gets a specific parameter from the given url.
:param url: url to parse.
:param parameter: parameter to get from url.
:param index: index in resulting list from getting parse_qs dict.
:return: parameter set value.
"""
return parse_qs(urlparse(url).query).get(parameter)[index]
def __create_single_menu_serial(self, url_entry: dict) -> dict:
"""Creates base dictionary with menus, location date, time data, url, and page sum.
:param url_entry: dict containing page url and content.
:return: dictionary with data shown below.
"""
# Declare dictionary.
serial = dict()
# Create empty list with menus.
serial['menus'] = []
# Create sub duct with location name and number.
serial['location'] = {}
serial['location']['name'] = self.__get_parameters(url_entry.get('url'), 'locationname', 0)
serial['location']['num'] = self.__get_parameters(url_entry.get('url'), 'locationnum', 0)
# Create sub dict with generation, update time and menu date.
serial['time_info'] = {}
serial['time_info']['gen'] = str(datetime.now())
serial['time_info']['update'] = None
serial['time_info']['menu_date'] = self.__get_parameters(url_entry.get('url'),
'dtdate',
0).replace('/', '-')
# Source url and page sum.
serial['url'] = quote(url_entry.get('url'), safe='')
serial['sum'] = self.__get_page_sum(url_entry.get('content'))
return serial
|
# Imports:
from datetime import datetime
from urllib.parse import urlparse, parse_qs, quote
from bs4 import BeautifulSoup
import requests
import uuid
import re
class FoodSort(object):
"""
Description: grabs dining hall menu web page and restructures it to an object format.
Methods:
- _daily_menu_tree : grabs the web page, generated the tree, and returns the menu sections.
- sort_data : parses the tree and returns an object sorted by menu_section -> dining hall food section -> menu item.
- _add_base_data : adds useful data to object and sets up structure of object.
"""
def __init__(self, url: str, check_data: bool = True):
# Initial class variables.
self.url = url
self.daily_menu = None
self.tree_data = {}
# Check if function should be run. If it should, check validity of passed URL:
if check_data:
self._check_url_format()
else:
pass
# Generates header info for daily_menu object:
self._add_base_data()
# Automatically start sorting data when the constructor is called.
self.sort_data()
def _check_url_format(self):
"""
Function checks to make sure url has correct query strings.
"""
# List of url parameters to check.
url_parameters = ['dtdate', 'locationnum', 'locationname']
# Parse url for query strings.
parsed_url = parse_qs(urlparse(self.url).query)
# Check that all correct query strings are in passed url.
for parameter in url_parameters:
if parameter in parsed_url:
continue
else:
raise Exception('URL does not contain proper query strings.')
def _add_base_data(self) -> object:
"""
Generates useful information for JSON object.
- data : object containing parsed data from sort_data method.
- location_data :
- location_name : name of dining hall location.
- location_num : dining hall location number.
- generated_time : time the object was originally created.
- update_time : in case menu changes and object needs to be updated.
- source_url : encoded URL from which data was extracted.
- menu_date : date for menu.
"""
# Create data lists/objects:
self.tree_data['data'] = []
self.tree_data['location_data'] = {}
# Add data as described above:
self.tree_data['location_data']['location_name'] = parse_qs(urlparse(self.url).query).get('locationname')[0]
self.tree_data['location_data']['location_num'] = parse_qs(urlparse(self.url).query).get('locationnum')[0]
self.tree_data['generated_time'] = str(datetime.now())
self.tree_data['update_time'] = None
self.tree_data['uuid'] = str(uuid.uuid4())
self.tree_data['source_url'] = quote(self.url, safe='')
self.tree_data['menu_date'] = parse_qs(urlparse(self.url).query).get('dtdate')[0].replace('/', '-')
def _daily_menu_tree(self) -> object:
# Grabs web page and returns the tree.
page = requests.get(self.url)
html_tree = BeautifulSoup(page.content, 'html.parser')
self.daily_menu = html_tree.find_all('td', attrs={'width': '30%'})
def sort_data(self) -> dict:
# Grab page and generate tree.
self._daily_menu_tree()
for dining_section in self.daily_menu:
# Resulting object-based data structure.
food_dict = {}
# Filters html_tree for section names and menu items. List comprehension gets the text from each
# element in the list.
unordered_items = [el.get_text() for el in dining_section.find_all(re.compile('a[name="Recipe_Desc"]'))]
"""
Main loop:
Filters menu items from dining hall food sections. Assigns menu items to respective
dining hall food sections.
"""
for idx, item in enumerate(unordered_items):
if item[:2] == '--':
sub_menu_items = []
while True:
count = 1
try:
if not unordered_items[idx + count]:
# Skip empty elements.
count += 1
continue
elif unordered_items[idx + count][:2] != '--':
# Remove duplicate whitespaces & strip extraneous & most special characters:
sub_menu_items.append(re.sub(' +', ' ',
re.sub('[^a-zA-Z0-9-() *.]', '',
unordered_items[idx + count])))
# Delete item from master list.
del unordered_items[idx + count]
count += 1
else:
break
except IndexError:
# When the end of the list is reached, stop the while loop.
break
# Menu item to dining hall food section assignment.
food_dict[item[3:-3]] = sub_menu_items
# Set tree data to food_dict.
self.tree_data['data'].append({'type': dining_section.find('div', class_='shortmenumeals').get_text(),
'content': food_dict})
|
Python
| 0.000001
|
cb92a3cf67557fbd4a629601490a74bdb2119935
|
add print_list method to dijkstra
|
dijkstra.py
|
dijkstra.py
|
# -*- coding: utf-8 -*-
class Dijkstra:
def __init__(self, adj, start):
self.adj = adj
self.s = start
self.dists = [0 for x in range(len(adj))]
# Liefert minimales Element > 0
def minweight(self, verts):
return min([x for x in verts if x>0])
# Baut liste der Entfernungen von s ausgehend auf
def dist_list(self):
i = s
for v in adj[i]:
if v>0:
self.dists[adj[s].index(v)] = v
# Ausgabe der kürzesten Wege von Knoten s zu alle anderen Knoten
def print_list(self):
print("Distance from Node "+ str(adj[self.s]) + " to:" )
for node in range(len(self.adj)):
print("\t\tNode "+str(adj[node])+ ": " + str(self.dists[node]))
|
# -*- coding: utf-8 -*-
class Dijkstra:
def __init__(self, adj, start):
self.adj = adj
self.s = start
self.dists = [0 for x in range(len(adj))]
# Liefert minimales Element > 0
def minweight(self, verts):
return min([x for x in verts if x>0])
# Baut liste der Entfernungen von s ausgehend auf
def dist_list(self):
i = s
for v in adj[i]:
if v>0:
self.dists[adj[s].index(v)] = v
|
Python
| 0.000001
|
ed42fa81e1029633f6b6f426c437df0c55262922
|
Fix LabHubApp.
|
jupyterlab/labhubapp.py
|
jupyterlab/labhubapp.py
|
import os
import warnings
from traitlets import default
from .labapp import LabApp
try:
from jupyterhub.singleuser import SingleUserNotebookApp
except ImportError:
SingleUserLabApp = None
raise ImportError('You must have jupyterhub installed for this to work.')
else:
class SingleUserLabApp(SingleUserNotebookApp, LabApp):
@default("default_url")
def _default_url(self):
"""when using jupyter-labhub, jupyterlab is default ui"""
return "/lab"
def init_webapp(self, *args, **kwargs):
warnings.warn(
"SingleUserLabApp is deprecated, use SingleUserNotebookApp and set " + \
"c.Spawner.default_url = '/lab' in jupyterhub_config.py", DeprecationWarning
)
super().init_webapp(*args, **kwargs)
def main(argv=None):
return SingleUserLabApp.launch_instance(argv)
if __name__ == "__main__":
main()
|
import os
from traitlets import default
from .labapp import LabApp
try:
from jupyterhub.singleuser import SingleUserNotebookApp
except ImportError:
SingleUserLabApp = None
raise ImportError('You must have jupyterhub installed for this to work.')
else:
class SingleUserLabApp(SingleUserNotebookApp, LabApp):
@default("default_url")
def _default_url(self):
"""when using jupyter-labhub, jupyterlab is default ui"""
return "/lab"
def init_webapp(self, *args, **kwargs):
warnings.warn(
"SingleUserLabApp is deprecated, use SingleUserNotebookApp and set " + "c.Spawner.default_url = '/lab' in jupyterhub_config.py", DeprecationWarning
)
super().init_webapp(*args, **kwargs)
def main(argv=None):
return SingleUserLabApp.launch_instance(argv)
if __name__ == "__main__":
main()
|
Python
| 0
|
f4c1093616d08bd4abcb5ddc030b59d863dcec05
|
Change netapi to use processmanager
|
salt/client/netapi.py
|
salt/client/netapi.py
|
# encoding: utf-8
'''
The main entry point for salt-api
'''
# Import python libs
import logging
import multiprocessing
import signal
import os
# Import salt-api libs
import salt.loader
import salt.utils.process
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
self.process_manager = salt.utils.process.ProcessManager()
self.netapi = salt.loader.netapi(self.opts)
def run(self):
'''
Load and start all available api modules
'''
for fun in self.netapi:
if fun.endswith('.start'):
logger.info('Starting {0} netapi module'.format(fun))
self.process_manager.add_process(self.netapi[fun])
self.process_manager.run()
|
# encoding: utf-8
'''
The main entry point for salt-api
'''
# Import python libs
import logging
import multiprocessing
import signal
import os
# Import salt-api libs
import salt.loader
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
# pid -> {fun: foo, Process: object}
self.pid_map = {}
self.netapi = salt.loader.netapi(self.opts)
def add_process(self, fun):
'''
Start a netapi child process of "fun"
'''
p = multiprocessing.Process(target=self.netapi[fun])
p.start()
logger.info("Started '{0}' api module with pid {1}".format(fun, p.pid))
self.pid_map[p.pid] = {'fun': fun,
'Process': p}
def run(self):
'''
Load and start all available api modules
'''
for fun in self.netapi:
if fun.endswith('.start'):
self.add_process(fun)
# make sure to kill the subprocesses if the parent is killed
signal.signal(signal.SIGTERM, self.kill_children)
while True:
pid, exit_status = os.wait()
if pid not in self.pid_map:
logger.info(('Process of pid {0} died, not a known netapi'
' process, will not restart').format(pid))
continue
logger.info(('Process {0} ({1}) died with exit status {2},'
' restarting...').format(self.pid_map[pid]['fun'],
pid,
exit_status))
self.pid_map[pid]['Process'].join(1)
self.add_process(self.pid_map[pid]['fun'])
del self.pid_map[pid]
def kill_children(self, *args):
'''
Kill all of the children
'''
for pid, p_map in self.pid_map.items():
p_map['Process'].terminate()
p_map['Process'].join()
del self.pid_map[pid]
|
Python
| 0
|
72b3d998a14388be98c73556df1cd20859a71573
|
remove invalid data
|
signal_receive.py
|
signal_receive.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2013 KuoE0 <kuoe0.tw@gmail.com>
#
# Distributed under terms of the MIT license.
"""
"""
import tornado.httpserver
import tornado.ioloop
import tornado.web
import serial
import signal
import sys
import json
tornado_port = 8888
# create serial object
serial_port = sys.argv[1]
serial_baudrate = int(sys.argv[2])
ser = serial.Serial(serial_port, serial_baudrate, timeout=1)
# global variable
number_of_signal = 1000
serial_pending = list()
signals = [[0] * 6] * number_of_signal
signal_type = ['x-acc', 'y-acc', 'z-acc', 'x-gyro', 'y-gyro', 'z-gyro']
# SIGINT handler to close serial connection
def handler_SIGINT(signum, frame):
global ser
print "Signal {0} happened!".format(signum)
print "Serial connection closed..."
ser.close()
signal.signal(signal.SIGINT, handler_SIGINT)
# receive signal with a non-blocking way
def recieve_signal():
data = ""
try:
if ser.inWaiting() != 0:
data = ser.readline()
print data
except Exception as e:
error_msg = "Error reading from {0}".format(serial_port)
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(e).__name__, e.args)
print error_msg, message
if len(data):
parse_pending(data)
# parse out the signal value
def parse_pending(signal_string):
global signals
try:
# split by ',' and get first element
values = [int(x) for x in signal_string.split(',')]
except:
values = None
# push signal into list
if values and len(values) == 6:
signals.append(values)
# tornado web handler
class query_signal_handler(tornado.web.RequestHandler):
def get(self, url='/'):
print 'get'
# get the name of callback parameter
callback_func = self.get_argument('callback')
self.handle_request(callback_func)
# return signals
def handle_request(self, callback):
global signals
global number_of_signal
# retrieve signal needed
ret_signals = signals[:number_of_signal]
# transpose the list
ret_signals = zip(*ret_signals)
# create list of dict
ret = list()
for i in xrange(6):
ret.append({ 'data': [p for p in enumerate(ret_signals[i])], 'label': signal_type[i] })
# convert to JSON format
ret = json.dumps({'data': ret})
# convert to JSONP format
ret = '{0}({1})'.format(callback, ret)
# set content type
self.set_header("Content-Type", "application/json")
# write data
self.write(ret)
# remove first element to realtime plot
signals.pop(0)
application = tornado.web.Application([(r"/", query_signal_handler),])
if __name__ == "__main__":
#tell tornado to run checkSerial every 50 ms
serial_loop = tornado.ioloop.PeriodicCallback(recieve_signal, 10)
serial_loop.start()
application.listen(tornado_port)
print "Starting server on port number {0}...".format(tornado_port)
print "Open at http://localhost:{0}/".format(tornado_port)
try:
tornado.ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
print 'Server closed...'
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2013 KuoE0 <kuoe0.tw@gmail.com>
#
# Distributed under terms of the MIT license.
"""
"""
import tornado.httpserver
import tornado.ioloop
import tornado.web
import serial
import signal
import sys
import json
tornado_port = 8888
# create serial object
serial_port = sys.argv[1]
serial_baudrate = int(sys.argv[2])
ser = serial.Serial(serial_port, serial_baudrate, timeout=1)
# global variable
number_of_signal = 1000
serial_pending = list()
signals = [[0] * 6] * number_of_signal
signal_type = ['x-acc', 'y-acc', 'z-acc', 'x-gyro', 'y-gyro', 'z-gyro']
# SIGINT handler to close serial connection
def handler_SIGINT(signum, frame):
global ser
print "Signal {0} happened!".format(signum)
print "Serial connection closed..."
ser.close()
signal.signal(signal.SIGINT, handler_SIGINT)
# receive signal with a non-blocking way
def recieve_signal():
try:
if ser.inWaiting() != 0:
data = ser.readline()
print data
except Exception as e:
print "Error reading from {0}".format(serial_port)
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(e).__name__, e.args)
print message
if len(data):
parse_pending(data)
# parse out the signal value
def parse_pending(signal_string):
global signals
# split by ',' and get first element
values = [int(x) for x in signal_string.split(',')]
# push signal into list
if len(values) == 6:
signals.append(values)
# tornado web handler
class query_signal_handler(tornado.web.RequestHandler):
def get(self, url='/'):
print 'get'
# get the name of callback parameter
callback_func = self.get_argument('callback')
self.handle_request(callback_func)
# return signals
def handle_request(self, callback):
global signals
global number_of_signal
# retrieve signal needed
ret_signals = signals[:number_of_signal]
# transpose the list
ret_signals = zip(*ret_signals)
# create list of dict
ret = list()
for i in xrange(6):
ret.append({ 'data': [p for p in enumerate(ret_signals[i])], 'label': signal_type[i] })
# convert to JSON format
ret = json.dumps({'data': ret})
# convert to JSONP format
ret = '{0}({1})'.format(callback, ret)
# set content type
self.set_header("Content-Type", "application/json")
# write data
self.write(ret)
# remove first element to realtime plot
signals.pop(0)
application = tornado.web.Application([(r"/", query_signal_handler),])
if __name__ == "__main__":
#tell tornado to run checkSerial every 50 ms
serial_loop = tornado.ioloop.PeriodicCallback(recieve_signal, 10)
serial_loop.start()
application.listen(tornado_port)
print "Starting server on port number {0}...".format(tornado_port)
print "Open at http://localhost:{0}/".format(tornado_port)
try:
tornado.ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
print 'Server closed...'
|
Python
| 0.001233
|
32e1ff21562c451ab790c0af077b3760855f1e6b
|
use slider component for bmi gui
|
ExPy/ExPy/module19.py
|
ExPy/ExPy/module19.py
|
""" BMI Calculator """
from tkinter import HORIZONTAL
import rx
import tkcomponents
def calculate_bmi(weight, height):
""" Given weight (pounds), height (inches)
Return BMI
"""
return (weight / (height * height)) * 703.
def bmi_recommendation(bmi):
"""Given a BMI, return a recommendation"""
if bmi < 18.5:
return 'You are underweight. You should see a doctor.'
elif bmi < 25:
return 'You are within the ideal weight range.'
return 'You are overweight. You should see a doctor.'
def prompt_float(prompt):
""" Given a specified prompt, return a float """
while True:
try:
return float(input(prompt))
except ValueError:
print('Enter a valid number')
def ex19():
""" Prompt for weight and height
Print BMI and BMI range
"""
weight = prompt_float('Enter weight in pounds(lbs): ')
height = prompt_float('Enter height in inches: ')
bmi = calculate_bmi(weight, height)
recommendation = bmi_recommendation(bmi)
print('Your BMI is {}'.format(bmi))
print(recommendation)
def ex19gui():
""" GUI version of BMI """
root = tkcomponents.create('BMI')
options = {
'Imperial': ('pounds(lbs)', 'inches'),
'Metric': ('kilograms(kg)', 'centimeters(cm)')
}
systems = tkcomponents.radio_stream(root, [(x, x) for x in options], 0, default='Imperial')
weight_labels = systems.map(lambda x: 'Enter weight in {}'.format(options[x][0]))
weights = tkcomponents.scale_stream(root, weight_labels, 1, from_=1, to=500, orient=HORIZONTAL, default=160)
height_labels = systems.map(lambda x: 'Enter height in {}'.format(options[x][1]))
heights = tkcomponents.scale_stream(root, height_labels, 2, from_=1, to=500, orient=HORIZONTAL, default=68)
def callback(system, weight, height):
"""Given a system, a weight, and a height
Calculate BMI"""
weight = float(weight)
height = float(height)
if system == 'Imperial':
return calculate_bmi(weight, height)
weight_lbs = weight * 2.20462
height_in = height * 0.393701
return calculate_bmi(weight_lbs, height_in)
#pylint: disable=E1101
bmis = rx.Observable.combine_latest(systems, weights, heights, callback)
tkcomponents.output_label(root, bmis.map('Your BMI is {}'.format), 3)
tkcomponents.output_label(root, bmis.map(bmi_recommendation), 4)
root.mainloop()
if __name__ == '__main__':
ex19gui()
|
""" BMI Calculator """
import rx
import tkcomponents
def calculate_bmi(weight, height):
""" Given weight (pounds), height (inches)
Return BMI
"""
return (weight / (height * height)) * 703.
def bmi_recommendation(bmi):
"""Given a BMI, return a recommendation"""
if bmi < 18.5:
return 'You are underweight. You should see a doctor.'
elif bmi < 25:
return 'You are within the ideal weight range.'
return 'You are overweight. You should see a doctor.'
def prompt_float(prompt):
""" Given a specified prompt, return a float """
while True:
try:
return float(input(prompt))
except ValueError:
print('Enter a valid number')
def ex19():
""" Prompt for weight and height
Print BMI and BMI range
"""
weight = prompt_float('Enter weight in pounds(lbs): ')
height = prompt_float('Enter height in inches: ')
bmi = calculate_bmi(weight, height)
recommendation = bmi_recommendation(bmi)
print('Your BMI is {}'.format(bmi))
print(recommendation)
def ex19gui():
""" GUI version of BMI """
root = tkcomponents.create('BMI')
options = {
'Imperial': ('pounds(lbs)', 'inches'),
'Metric': ('kilograms(kg)', 'centimeters(cm)')
}
systems = tkcomponents.radio_stream(root, [(x, x) for x in options], 0, default='Imperial')
weight_labels = systems.map(lambda x: 'Enter weight in {}'.format(options[x][0]))
weights = tkcomponents.input_stream(root, weight_labels, 1)
height_labels = systems.map(lambda x: 'Enter height in {}'.format(options[x][1]))
heights = tkcomponents.input_stream(root, height_labels, 2)
def callback(system, weight, height):
"""Given a system, a weight, and a height
Calculate BMI"""
try:
weight = float(weight)
except ValueError:
return (False, 'Enter a valid weight')
try:
height = float(height)
except ValueError:
return (False, 'Enter a valid height')
if system == 'Imperial':
return (True, calculate_bmi(weight, height))
weight_lbs = weight * 2.20462
height_in = height * 0.393701
return (True, calculate_bmi(weight_lbs, height_in))
#pylint: disable=E1101
bmis = rx.Observable.combine_latest(systems, weights, heights, callback)
def bmi_output(bmi):
""" If the BMI could successfully be converted then display it
Otherwise display the error
"""
okay, value = bmi
if okay:
return 'Your BMI is {}'.format(value)
return value
tkcomponents.output_label(root, bmis.map(bmi_output), 3)
def bmi_recommendation_output(bmi):
""" If the BMI could successfully be converted then display
the recommendation
Otherwise display the error
"""
okay, value = bmi
if okay:
return bmi_recommendation(value)
return ''
tkcomponents.output_label(root, bmis.map(bmi_recommendation_output), 4)
root.mainloop()
if __name__ == '__main__':
ex19gui()
|
Python
| 0
|
89f8d0ebe01e188b5a043dfbf891cf3a3bca0504
|
Clarify that event is sent up to the master
|
salt/modules/event.py
|
salt/modules/event.py
|
'''
Fire events on the minion, events can be fired up to the master
'''
# Import salt libs
import salt.crypt
import salt.utils.event
import salt.payload
def fire_master(data, tag):
'''
Fire an event off up to the master server
CLI Example::
salt '*' event.fire_master 'stuff to be in the event' 'tag'
'''
load = {'id': __opts__['id'],
'tag': tag,
'data': data,
'cmd': '_minion_event'}
auth = salt.crypt.SAuth(__opts__)
sreq = salt.payload.SREQ(__opts__['master_uri'])
try:
sreq.send('aes', auth.crypticle.dumps(load))
except Exception:
pass
return True
def fire(data, tag):
'''
Fire an event on the local minion event bus
CLI Example::
salt '*' event.fire 'stuff to be in the event' 'tag'
'''
return salt.utils.event.MinionEvent(**__opts__).fire_event(data, tag)
|
'''
Fire events on the minion, events can be fired up to the master
'''
# Import salt libs
import salt.crypt
import salt.utils.event
import salt.payload
def fire_master(data, tag):
'''
Fire an event off on the master server
CLI Example::
salt '*' event.fire_master 'stuff to be in the event' 'tag'
'''
load = {'id': __opts__['id'],
'tag': tag,
'data': data,
'cmd': '_minion_event'}
auth = salt.crypt.SAuth(__opts__)
sreq = salt.payload.SREQ(__opts__['master_uri'])
try:
sreq.send('aes', auth.crypticle.dumps(load))
except Exception:
pass
return True
def fire(data, tag):
'''
Fire an event on the local minion event bus
CLI Example::
salt '*' event.fire 'stuff to be in the event' 'tag'
'''
return salt.utils.event.MinionEvent(**__opts__).fire_event(data, tag)
|
Python
| 0.001957
|
e6d327a5249e14765999357a391d97c4fd2cd8b8
|
Test for sitemap.xml
|
skeleton/tests.py
|
skeleton/tests.py
|
from django.core import management
from django.test import TestCase as BaseTestCase
from django.test.client import Client as BaseClient, FakePayload, \
RequestFactory
from django.core.urlresolvers import reverse
from post.models import Post
from foundry.models import Member
class TestCase(BaseTestCase):
@classmethod
def setUpClass(cls):
cls.request = RequestFactory()
cls.client = BaseClient()
# Post-syncdb steps
management.call_command('load_photosizes', interactive=False)
management.call_command('loaddata', 'skeleton/fixtures/sites.json', interactive=False)
# Editor
cls.editor, dc = Member.objects.get_or_create(
username='editor',
email='editor@test.com'
)
cls.editor.set_password("password")
cls.editor.save()
# Post
post, dc = Post.objects.get_or_create(
title='Post 1', content='<b>aaa</b>',
owner=cls.editor, state='published',
)
post.sites = [1]
post.save()
def test_common_urls(self):
"""High-level test to confirm common set of URLs render"""
urls = (
(reverse('join'), 200),
(reverse('login'), 200),
(reverse('logout'), 302),
(reverse('password_reset'), 200),
(reverse('terms-and-conditions'), 200),
('/post/post-1/', 200),
('/sitemap.xml', 200),
)
for url, code in urls:
print "Checking path %s" % url
response = self.client.get(url)
self.assertEqual(response.status_code, code)
|
from django.core import management
from django.test import TestCase as BaseTestCase
from django.test.client import Client as BaseClient, FakePayload, \
RequestFactory
from django.core.urlresolvers import reverse
from post.models import Post
from foundry.models import Member
class TestCase(BaseTestCase):
@classmethod
def setUpClass(cls):
cls.request = RequestFactory()
cls.client = BaseClient()
# Post-syncdb steps
management.call_command('load_photosizes', interactive=False)
management.call_command('loaddata', 'skeleton/fixtures/sites.json', interactive=False)
# Editor
cls.editor, dc = Member.objects.get_or_create(
username='editor',
email='editor@test.com'
)
cls.editor.set_password("password")
cls.editor.save()
# Post
post, dc = Post.objects.get_or_create(
title='Post 1', content='<b>aaa</b>',
owner=cls.editor, state='published',
)
post.sites = [1]
post.save()
def test_common_urls(self):
"""High-level test to confirm common set of URLs render"""
urls = (
(reverse('join'), 200),
(reverse('login'), 200),
(reverse('logout'), 302),
(reverse('password_reset'), 200),
(reverse('terms-and-conditions'), 200),
('/post/post-1/', 200),
)
for url, code in urls:
print "Checking path %s" % url
response = self.client.get(url)
self.assertEqual(response.status_code, code)
|
Python
| 0.000001
|
5f962415d401b3c37825d6e3a0560de47ce9ec3d
|
remove unused code
|
controller/lib/jubamgr/controller/main.py
|
controller/lib/jubamgr/controller/main.py
|
# -*- coding: utf-8 -*-
import threading
import msgpackrpc
from jubavisor.client import Jubavisor
from jubavisor.types import ServerArgv
from .config import JubaManagerConfig
from .zk import get_zk, cancel_if_down
class JubaManagerController():
@classmethod
def main(cls, args):
myself = args.pop(0)
# TODO externalize config field name
with open('config.json') as f:
cfg = JubaManagerConfig.from_json(f.read())
# TODO assert length of args
subcmd = args[0]
if subcmd == 'start':
process_type = args[1]
target_id = args[2]
cls.start(cfg, process_type, target_id)
elif subcmd == 'stop':
process_type = args[1]
target_id = args[2]
cls.stop(cfg, process_type, target_id)
elif subcmd == 'save':
target_id = args[1]
cls.local_model(cfg, target_id, 'save')
elif subcmd == 'load':
target_id = args[1]
cls.local_model(cfg, target_id, 'load')
elif subcmd == 'status':
# TODO implement
print "Not implemented yet: {0}".format(subcmd)
else:
print "Unknown subcmd: {0}".format(subcmd)
@classmethod
def start(cls, cfg, process_type, target_id):
server = cfg.lookup(process_type, target_id)
visor = cfg.lookup('visor', server._visor)
cluster = cfg.lookup('cluster', server._cluster)
client = Jubavisor(visor._host, visor._port, 'juba' + cluster._type + '/' + cluster._id, 10)
argv = ServerArgv(server._port, "", "", 10, 10, 10, 2, 'juba' + cluster._type, cluster._type, cfg._global_zookeeper,
cluster._id, "", "", "", "", 16, 512, "linear_mixer", False)
client.start(1, argv)
@classmethod
def stop(cls, cfg, process_type, target_id):
server = cfg.lookup(process_type, target_id)
visor = cfg.lookup('visor', server._visor)
cluster = cfg.lookup('cluster', server._cluster)
client = Jubavisor(visor._host, visor._port, 'juba' + cluster._type + '/' + cluster._id, 10)
client.stop(1)
@classmethod
def local_model(cls, cfg, target_id, method):
cluster = cfg.lookup('cluster', target_id)
servers = []
if cluster is None:
server = cfg.lookup('server', target_id)
if server is None:
print "No such cluster or server matching the ID"
return
servers.append(server)
cluster = cfg.lookup('cluster', server._cluster)
else:
servers = filter(lambda x: x._cluster == cluster._id, cfg.get_all('server'))
threads = []
zk = get_zk()
for s in servers:
host = cfg.lookup('visor', s._visor)._host
client = msgpackrpc.Client(msgpackrpc.Address(host, s._port), 0)
cancel_if_down(client, zk, host, s._port, cluster._type, cluster._id)
client.call(method, cluster._id, 'jubamgr',)
zk.stop()
|
# -*- coding: utf-8 -*-
import threading
import msgpackrpc
from jubavisor.client import Jubavisor
from jubavisor.types import ServerArgv
from .config import JubaManagerConfig
from .zk import get_zk, cancel_if_down
class JubaManagerController():
@classmethod
def main(cls, args):
myself = args.pop(0)
# TODO externalize config field name
with open('config.json') as f:
cfg = JubaManagerConfig.from_json(f.read())
# TODO assert length of args
subcmd = args[0]
if subcmd == 'start':
process_type = args[1]
target_id = args[2]
cls.start(cfg, process_type, target_id)
elif subcmd == 'stop':
process_type = args[1]
target_id = args[2]
cls.stop(cfg, process_type, target_id)
elif subcmd == 'save':
target_id = args[1]
cls.local_model(cfg, target_id, 'save')
elif subcmd == 'load':
target_id = args[1]
cls.local_model(cfg, target_id, 'load')
elif subcmd == 'status':
# TODO implement
print "Not implemented yet: {0}".format(subcmd)
else:
print "Unknown subcmd: {0}".format(subcmd)
@classmethod
def start(cls, cfg, process_type, target_id):
server = cfg.lookup(process_type, target_id)
visor = cfg.lookup('visor', server._visor)
cluster = cfg.lookup('cluster', server._cluster)
client = Jubavisor(visor._host, visor._port, 'juba' + cluster._type + '/' + cluster._id, 10)
argv = ServerArgv(server._port, "", "", 10, 10, 10, 2, 'juba' + cluster._type, cluster._type, cfg._global_zookeeper,
cluster._id, "", "", "", "", 16, 512, "linear_mixer", False)
client.start(1, argv)
@classmethod
def stop(cls, cfg, process_type, target_id):
server = cfg.lookup(process_type, target_id)
visor = cfg.lookup('visor', server._visor)
cluster = cfg.lookup('cluster', server._cluster)
client = Jubavisor(visor._host, visor._port, 'juba' + cluster._type + '/' + cluster._id, 10)
client.stop(1)
@classmethod
def local_model(cls, cfg, target_id, method):
cluster = cfg.lookup('cluster', target_id)
servers = []
if cluster is None:
server = cfg.lookup('server', target_id)
if server is None:
print "No such cluster or server matching the ID"
return
servers.append(server)
cluster = cfg.lookup('cluster', server._cluster)
else:
servers = filter(lambda x: x._cluster == cluster._id, cfg.get_all('server'))
threads = []
zk = get_zk()
for s in servers:
host = cfg.lookup('visor', s._visor)._host
client = msgpackrpc.Client(msgpackrpc.Address(host, s._port), 0)
cancel_if_down(client, zk, host, s._port, cluster._type, cluster._id)
client.call(method, cluster._id, 'jubamgr',)
#future = client.call_async(method, cluster._id, 'jubamgr',)
#future.get()
zk.stop()
|
Python
| 0.000017
|
eb624f48a259e948f4bc4c33370fe971b19ea19b
|
Update alert() fn tests according to new signature
|
tests/alerts/geomodel/test_alert.py
|
tests/alerts/geomodel/test_alert.py
|
from datetime import datetime, timedelta
from mozdef_util.utilities.toUTC import toUTC
from alerts.geomodel.alert import alert
import alerts.geomodel.locality as locality
class TestAlert:
'''Unit tests for alert generation.
'''
def test_do_not_alert_when_travel_possible(self):
state = locality.State('locality', 'testuser', [
locality.Locality(
sourceipaddress='1.2.3.123',
city='Toronto',
country='CA',
lastaction=toUTC(datetime.now()) - timedelta(minutes=5),
latitude=43.6529,
longitude=-79.3849,
radius=50),
locality.Locality(
sourceipaddress='123.3.2.1',
city='San Francisco',
country='US',
lastaction=toUTC(datetime.now()) - timedelta(hours=10),
latitude=37.773972,
longitude=-122.431297,
radius=50)
])
alert_produced = alert(state.username, state.localities)
assert alert_produced is None
def test_do_alert_when_travel_impossible(self):
state = locality.State('locality', 'testuser', [
locality.Locality(
sourceipaddress='1.2.3.123',
city='Toronto',
country='CA',
lastaction=toUTC(datetime.now()) - timedelta(minutes=5),
latitude=43.6529,
longitude=-79.3849,
radius=50),
locality.Locality(
sourceipaddress='123.3.2.1',
city='San Francisco',
country='US',
lastaction=toUTC(datetime.now()) - timedelta(hours=1),
latitude=37.773972,
longitude=-122.431297,
radius=50)
])
alert_produced = alert(state.username, state.localities)
assert alert_produced is not None
assert alert_produced.username == 'testuser'
assert alert_produced.sourceipaddress == '1.2.3.123'
assert alert_produced.origin.city == 'Toronto'
|
from datetime import datetime, timedelta
from mozdef_util.utilities.toUTC import toUTC
from alerts.geomodel.alert import alert
import alerts.geomodel.locality as locality
class TestAlert:
'''Unit tests for alert generation.
'''
def test_do_not_alert_when_travel_possible(self):
state = locality.State('locality', 'testuser', [
locality.Locality(
sourceipaddress='1.2.3.123',
city='Toronto',
country='CA',
lastaction=toUTC(datetime.now()) - timedelta(minutes=5),
latitude=43.6529,
longitude=-79.3849,
radius=50),
locality.Locality(
sourceipaddress='123.3.2.1',
city='San Francisco',
country='US',
lastaction=toUTC(datetime.now()) - timedelta(hours=10),
latitude=37.773972,
longitude=-122.431297,
radius=50)
])
alert_produced = alert(state)
assert alert_produced is None
def test_do_alert_when_travel_impossible(self):
state = locality.State('locality', 'testuser', [
locality.Locality(
sourceipaddress='1.2.3.123',
city='Toronto',
country='CA',
lastaction=toUTC(datetime.now()) - timedelta(minutes=5),
latitude=43.6529,
longitude=-79.3849,
radius=50),
locality.Locality(
sourceipaddress='123.3.2.1',
city='San Francisco',
country='US',
lastaction=toUTC(datetime.now()) - timedelta(hours=1),
latitude=37.773972,
longitude=-122.431297,
radius=50)
])
alert_produced = alert(state)
assert alert_produced is not None
assert alert_produced.username == 'testuser'
assert alert_produced.sourceipaddress == '1.2.3.123'
assert alert_produced.origin.city == 'Toronto'
|
Python
| 0
|
6312f162eb37ac3e57a18ea80fe201ab08a6ded1
|
Add Docstring entry for the filter feature of dir_items()
|
convenience/file_convenience/dir_items.py
|
convenience/file_convenience/dir_items.py
|
import os
# ==============================================================================
# DIR ITEMS
# ==============================================================================
def dir_items(d, opt="all", rel=True, root="", filter=""):
"""
Takes a directory path (as a string). And returns the paths/names of the
items that are contained in that directory.
ALL ITEMS, DIRECTORIES, OR JUST FILES
----------------------------
Depending on the options you use, you can return:
- a list of ALL the items (opt="all")
- just the subdirectories (opt="dirs")
- only files (opt="files")
- directories and files as a tuple of two separate lists (opt="grouped")
RELATIVE OR ABSOLUTE PATHS
----------------------------
The items can be returned as:
- just the filenames of the items (rel=True)
- the absolute path to the items (rel=False)
- the path to the items relative to any directory whatsoever in the entire
system filestructure (rel=True, root="some/dir/")
STRING FILTERS
----------------------------
You can also specify to only return items where the filename contains some
specified text, eg, only return items containing ".jpg"
NOTE
----------------------------
The items are not returned in any particular order.
:param d: (str)
The full path to the directory you want to search in.
:param opt: (str or None) {default = "all"}
The option to use:
"dirs" : return just the subdirectories
"files": return just the files
"all" : return all items
"grouped" : returns a tuple of two lists. ([directories], [files])
:param rel: (optional)(boolean)
if True, then it returns the listed items as directories relative to
the d directory.
IF False, then it returns the FULL paths.
:param root: (Optional)(str)
A directory path that we want to use as the root for relative paths.
If left blank, then it uses the directory set in d as the root directory.
:param filter: (string)
Used to filter for items that contain this string in their name.
"""
# ==========================================================================
# TODO: Expand the filter, so you can filter using regex, file extensions,
# or mime types
# --------------------------------------------------------------------------
# Setup
# --------------------------------------------------------------------------
fList = [] # file List
dList = [] # Directory List
d = os.path.abspath(d) # desired directory as an absolute path
# --------------------------------------------------------------------------
# Set the ralative/absolute path to append to the output list of items
# --------------------------------------------------------------------------
if rel:
root = root.strip()
if root == "":
root = d
outpath = os.path.relpath(d, root)
else:
outpath = d
# if the root path is d, then remove the "." from path.
if outpath == ".":
outpath = ""
# --------------------------------------------------------------------------
# Sort each item in the directory into either a directory or a file
# --------------------------------------------------------------------------
for item in os.listdir(d):
# If item doesnt satisfy our filter condition then skip to the next item
if filter not in item:
continue
full_item_path = os.path.join(d, item) # Full path to the item
out_item_path = os.path.join(outpath, item) # Path used in output list
if os.path.isfile(full_item_path):
fList.append(out_item_path)
elif os.path.isdir(full_item_path):
dList.append(out_item_path)
else:
print "WARNING: directoryItems found an item that is neither a \n"\
" file, nor a directory"
# --------------------------------------------------------------------------
# Return the item lists
# --------------------------------------------------------------------------
if (opt is None) or (opt.lower() in ["none", "", "all"]):
return dList + fList
elif opt.lower() in ["file", "files", "f"]:
return fList
elif opt.lower() in ["dir", "dirs", "d", "folder", "folders"]:
return dList
elif opt.lower() in ["grouped", "group", "g"]:
return (dList, fList)
else:
msg = "\n dir_items(): the only valid values for the `opt` argument" \
"\n are 'all', 'dirs', 'files', and 'grouped'"
raise ValueError(msg)
|
import os
# ==============================================================================
# DIR ITEMS
# ==============================================================================
def dir_items(d, opt="all", rel=True, root="", filter=""):
"""
Takes a directory path (as a string). And returns the paths/names of the
items that are contained in that directory.
ALL ITEMS, DIRECTORIES, OR JUST FILES
----------------------------
Depending on the options you use, you can return:
- a list of ALL the items (opt="all")
- just the subdirectories (opt="dirs")
- only files (opt="files")
- directories and files as a tuple of two separate lists (opt="grouped")
RELATIVE OR ABSOLUTE PATHS
----------------------------
The items can be returned as:
- just the filenames of the items (rel=True)
- the absolute path to the items (rel=False)
- the path to the items relative to any directory whatsoever in the entire
system filestructure (rel=True, root="some/dir/")
STRING FILTERS
----------------------------
You can also specify to only return items where the filename contains some
specified text, eg, only return items containing ".jpg"
NOTE
----------------------------
The items are not returned in any particular order.
:param d: (str)
The full path to the directory you want to search in.
:param opt: (str or None) {default = "all"}
The option to use:
"dirs" : return just the subdirectories
"files": return just the files
"all" : return all items
"grouped" : returns a tuple of two lists. ([directories], [files])
:param rel: (optional)(boolean)
if True, then it returns the listed items as directories relative to
the d directory.
IF False, then it returns the FULL paths.
:param root: (Optional)(str)
A directory path that we want to use as the root for relative paths.
If left blank, then it uses the directory set in d as the root directory.
"""
# ==========================================================================
# TODO: create a filter, so you can filter for certain types of files, or
# directories, using something like regex, or file extensions, or
# mime types
# --------------------------------------------------------------------------
# Setup
# --------------------------------------------------------------------------
fList = [] # file List
dList = [] # Directory List
d = os.path.abspath(d) # desired directory as an absolute path
# --------------------------------------------------------------------------
# Set the ralative/absolute path to append to the output list of items
# --------------------------------------------------------------------------
if rel:
root = root.strip()
if root == "":
root = d
outpath = os.path.relpath(d, root)
else:
outpath = d
# if the root path is d, then remove the "." from path.
if outpath == ".":
outpath = ""
# --------------------------------------------------------------------------
# Sort each item in the directory into either a directory or a file
# --------------------------------------------------------------------------
for item in os.listdir(d):
# If item doesnt satisfy our filter condition then skip to the next item
if filter not in item:
continue
full_item_path = os.path.join(d, item) # Full path to the item
out_item_path = os.path.join(outpath, item) # Path used in output list
if os.path.isfile(full_item_path):
fList.append(out_item_path)
elif os.path.isdir(full_item_path):
dList.append(out_item_path)
else:
print "WARNING: directoryItems found an item that is neither a \n"\
" file, nor a directory"
# --------------------------------------------------------------------------
# Return the item lists
# --------------------------------------------------------------------------
if (opt is None) or (opt.lower() in ["none", "", "all"]):
return dList + fList
elif opt.lower() in ["file", "files", "f"]:
return fList
elif opt.lower() in ["dir", "dirs", "d", "folder", "folders"]:
return dList
elif opt.lower() in ["grouped", "group", "g"]:
return (dList, fList)
else:
msg = "\n dir_items(): the only valid values for the `opt` argument" \
"\n are 'all', 'dirs', 'files', and 'grouped'"
raise ValueError(msg)
|
Python
| 0
|
2eb3a682706a5ff0255474230ec32fd0ac96c727
|
update mac build script
|
sansview/setup_mac.py
|
sansview/setup_mac.py
|
"""
This is a setup.py script partly generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
import periodictable.xsf
import DataLoader.readers
from distutils.sysconfig import get_python_lib
import os
DATA_FILES = []
RESOURCES_FILES = []
#Periodictable data file
DATA_FILES = periodictable.data_files()
#invariant and calculator help doc
import sans.perspectives.calculator as calculator
DATA_FILES += calculator.data_files()
import sans.perspectives.invariant as invariant
DATA_FILES += invariant.data_files()
#CANSAxml reader data files
RESOURCES_FILES.append(os.path.join(DataLoader.readers.get_data_path(),'defaults.xml'))
# Locate libxml2 library
lib_locs = ['/usr/local/lib', '/usr/lib']
libxml_path = None
for item in lib_locs:
libxml_path_test = '%s/libxml2.dylib' % item
if os.path.isfile(libxml_path_test):
libxml_path = libxml_path_test
if libxml_path == None:
raise RuntimeError, "Could not find libxml2 on the system"
APP = ['sansview.py']
DATA_FILES += ['images','test','plugins','media']
OPTIONS = {'argv_emulation': True,
'packages': ['lxml','periodictable'],
'iconfile': 'images/ball.icns',
'frameworks':[libxml_path],
'resources': RESOURCES_FILES
}
setup(
app=APP,
data_files=DATA_FILES,
include_package_data= True,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
|
"""
This is a setup.py script partly generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
import periodictable.xsf
import DataLoader.readers
from distutils.sysconfig import get_python_lib
import os
DATA_FILES = []
RESOURCES_FILES = []
#Periodictable data file
DATA_FILES = periodictable.data_files()
#invariant and calculator help doc
import sans.perspectives.calculator as calculator
DATA_FILES += calculator.data_files()
import sans.perspectives.invariant as invariant
DATA_FILES += invariant.data_files()
#CANSAxml reader data files
RESOURCES_FILES.append(os.path.join(DataLoader.readers.get_data_path(),'defaults.xml'))
# Locate libxml2 library
lib_locs = ['/usr/local/lib', '/usr/lib']
libxml_path = None
for item in lib_locs:
libxml_path = '%s/libxml2.dylib' % item
if os.path.isfile(libxml_path):
break
if libxml_path == None:
raise RuntimeError, "Could not find libxml2 on the system"
APP = ['sansview.py']
DATA_FILES += ['images','test','plugins','media']
OPTIONS = {'argv_emulation': True,
'packages': ['lxml','periodictable'],
'iconfile': 'images/ball.icns',
'frameworks':[libxml_path],
'resources': RESOURCES_FILES
}
setup(
app=APP,
data_files=DATA_FILES,
include_package_data= True,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
|
Python
| 0
|
b6c8e38b96293bacd7739411200cd85f47f1efca
|
handle division by zero
|
prophet/analyze.py
|
prophet/analyze.py
|
from prophet.utils.formatters import dict_to_table
import math
import numpy as np
class Analyzer(object):
def __repr__(self):
return self.name
class Volatility(Analyzer):
name = 'volatility'
def run(self, backtest, **kwargs):
return backtest.get_daily_returns().std()
class Sharpe(Analyzer):
name = 'sharpe'
def run(self, data, config, **kwargs):
avg_daily_returns = data['average_return']
volatility = data['volatility']
risk_free_rate = config.get('RISK_FREE_RATE', 0)
trading_days = config.get('YEARLY_TRADING_DAYS', 252)
if volatility == 0:
return 0
return ((avg_daily_returns - risk_free_rate) / volatility
* math.sqrt(trading_days))
class Sortino(Analyzer):
name = 'sortino'
def run(self, backtest, data, config, **kwargs):
avg_daily_returns = data['average_return']
negative_returns = backtest.get_daily_returns()[backtest.get_daily_returns() < 0]
volatility_negative_returns = negative_returns.std()
risk_free_rate = config.get('RISK_FREE_RATE', 0)
trading_days = config.get('YEARLY_TRADING_DAYS', 252)
if volatility_negative_returns == 0:
return 0
return ((avg_daily_returns - risk_free_rate) / volatility_negative_returns
* math.sqrt(trading_days))
class AverageReturn(Analyzer):
name = 'average_return'
def run(self, backtest, **kwargs):
return backtest.get_daily_returns().mean()
class CumulativeReturn(Analyzer):
name = "cumulative_return"
def run(self, backtest, **kwargs):
return backtest.normalize0()[-1]
class MaximumDrawdown(Analyzer):
name = "maximum_drawdown"
def run(self, backtest, **kwargs):
dd_end = np.argmax(np.maximum.accumulate(backtest) - backtest)
dd_start = np.argmax(backtest[:dd_end])
if backtest[dd_start] == 0:
return 0
return 1-backtest[dd_end]/backtest[dd_start]
class Analysis(dict):
def __repr__(self):
""" Represents Analysis object as a text table. """
return dict_to_table(self)
default_analyzers = [Volatility(), AverageReturn(),
Sharpe(), CumulativeReturn(), MaximumDrawdown(), Sortino()]
|
from prophet.utils.formatters import dict_to_table
import math
import numpy as np
class Analyzer(object):
def __repr__(self):
return self.name
class Volatility(Analyzer):
name = 'volatility'
def run(self, backtest, **kwargs):
return backtest.get_daily_returns().std()
class Sharpe(Analyzer):
name = 'sharpe'
def run(self, data, config, **kwargs):
avg_daily_returns = data['average_return']
volatility = data['volatility']
risk_free_rate = config.get('RISK_FREE_RATE', 0)
trading_days = config.get('YEARLY_TRADING_DAYS', 252)
return ((avg_daily_returns - risk_free_rate) / volatility
* math.sqrt(trading_days))
class Sortino(Analyzer):
name = 'sortino'
def run(self, backtest, data, config, **kwargs):
avg_daily_returns = data['average_return']
negative_returns = backtest.get_daily_returns()[backtest.get_daily_returns() < 0]
volatility_negative_returns = negative_returns.std()
risk_free_rate = config.get('RISK_FREE_RATE', 0)
trading_days = config.get('YEARLY_TRADING_DAYS', 252)
return ((avg_daily_returns - risk_free_rate) / volatility_negative_returns
* math.sqrt(trading_days))
class AverageReturn(Analyzer):
name = 'average_return'
def run(self, backtest, **kwargs):
return backtest.get_daily_returns().mean()
class CumulativeReturn(Analyzer):
name = "cumulative_return"
def run(self, backtest, **kwargs):
return backtest.normalize0()[-1]
class MaximumDrawdown(Analyzer):
name = "maximum_drawdown"
def run(self, backtest, **kwargs):
dd_end = np.argmax(np.maximum.accumulate(backtest) - backtest)
dd_start = np.argmax(backtest[:dd_end])
return 1-backtest[dd_end]/backtest[dd_start]
class Analysis(dict):
def __repr__(self):
""" Represents Analysis object as a text table. """
return dict_to_table(self)
default_analyzers = [Volatility(), AverageReturn(),
Sharpe(), CumulativeReturn(), MaximumDrawdown(), Sortino()]
|
Python
| 0.002079
|
6ddc32c10124d10db53f9017044be65a00a35a33
|
Add newline to end of __init__.py.
|
propka/__init__.py
|
propka/__init__.py
|
"""PROPKA 3.1
See https://github.com/jensengroup/propka-3.1 for more information.
Please cite these PROPKA references in publications:
* Sondergaard, Chresten R., Mats HM Olsson, Michal Rostkowski, and Jan H. Jensen.
"Improved Treatment of Ligands and Coupling Effects in Empirical Calculation and
Rationalization of pKa Values." Journal of Chemical Theory and Computation 7,
no. 7 (2011): 2284-2295.
* Olsson, Mats HM, Chresten R. Sondergaard, Michal Rostkowski, and Jan H. Jensen.
"PROPKA3: consistent treatment of internal and surface residues in empirical pKa
predictions." Journal of Chemical Theory and Computation 7, no. 2 (2011): 525-537.
"""
__all__ = ["atom", "bonds", "calculations", "conformation_container",
"coupled_groups", "determinant", "determinants", "group",
"hybrid36", "iterative", "lib", "ligand_pka_values", "ligand",
"molecular_container", "output", "parameters", "pdb", "protonate",
"run", "vector_algebra", "version"]
|
"""PROPKA 3.1
See https://github.com/jensengroup/propka-3.1 for more information.
Please cite these PROPKA references in publications:
* Sondergaard, Chresten R., Mats HM Olsson, Michal Rostkowski, and Jan H. Jensen.
"Improved Treatment of Ligands and Coupling Effects in Empirical Calculation and
Rationalization of pKa Values." Journal of Chemical Theory and Computation 7,
no. 7 (2011): 2284-2295.
* Olsson, Mats HM, Chresten R. Sondergaard, Michal Rostkowski, and Jan H. Jensen.
"PROPKA3: consistent treatment of internal and surface residues in empirical pKa
predictions." Journal of Chemical Theory and Computation 7, no. 2 (2011): 525-537.
"""
__all__ = ["atom", "bonds", "calculations", "conformation_container",
"coupled_groups", "determinant", "determinants", "group",
"hybrid36", "iterative", "lib", "ligand_pka_values", "ligand",
"molecular_container", "output", "parameters", "pdb", "protonate",
"run", "vector_algebra", "version"]
|
Python
| 0.000005
|
0560f65eb6740281c77c9b016bd9a44f486be6ae
|
Use dense matrices instead of sparse where that makes sense
|
openprescribing/matrixstore/matrix_ops.py
|
openprescribing/matrixstore/matrix_ops.py
|
from __future__ import division
import numpy
import scipy.sparse
# Above a certain level of density it becomes more efficient to use a normal
# dense matrix instead of a sparse one. This threshold was determined through a
# not particularly scientific process of trial and error. Due to the
# compression we apply there's very little difference in storage requirements
# between sparse and dense matrices, but the difference comes in the time take
# to perform operations (e.g summing) using these matrices and that's harder to
# measure. At some point we can profile and optimise the performance of the
# MatrixStore, but for now it's fast enough.
DENSITY_THRESHOLD = 0.5
def sparse_matrix(shape, integer=False):
"""
Create a new sparse matrix (either integer or floating point) in a form
suitable for populating with data
"""
dtype = numpy.int_ if integer else numpy.float_
return scipy.sparse.lil_matrix(shape, dtype=dtype)
def finalise_matrix(matrix):
"""
Return a copy of a sparse matrix in a form suitable for storage
"""
if get_density(matrix) < DENSITY_THRESHOLD:
matrix = matrix.tocsc()
matrix.sort_indices()
else:
matrix = matrix.toarray()
if is_integer(matrix):
matrix = convert_to_smallest_int_type(matrix)
return matrix
def is_integer(matrix):
"""
Return whether or not the matrix has integer type
"""
return numpy.issubdtype(matrix.dtype, numpy.integer)
def get_density(matrix):
"""
Return the density of a sparse matrix
"""
return matrix.getnnz() / (matrix.shape[0] * matrix.shape[1])
def convert_to_smallest_int_type(matrix):
"""
Convert a matrix to use the smallest integer type capable of representing
all the values currently stored in it
"""
target_type = smallest_int_type_for_range(matrix.min(), matrix.max())
if target_type != matrix.dtype:
matrix = matrix.astype(target_type, copy=False)
return matrix
def smallest_int_type_for_range(minimum, maximum):
"""
Return smallest numpy integer type capable of representing all values in
the supplied range
"""
signed = minimum < 0
abs_max = max(maximum, abs(minimum))
if signed:
if abs_max < 1 << 7:
return numpy.int8
elif abs_max < 1 << 15:
return numpy.int16
elif abs_max < 1 << 31:
return numpy.int32
else:
if abs_max < 1 << 8:
return numpy.uint8
elif abs_max < 1 << 16:
return numpy.uint16
elif abs_max < 1 << 32:
return numpy.uint32
# Return default integer type (other than in the exceptional case that the
# value is too big to store in a signed 64-bit int)
if not signed and abs_max > 1 << 63:
return numpy.uint64
else:
return numpy.int64
|
import numpy
import scipy.sparse
def sparse_matrix(shape, integer=False):
"""
Create a new sparse matrix (either integer or floating point) in a form
suitable for populating with data
"""
dtype = numpy.int_ if integer else numpy.float_
return scipy.sparse.lil_matrix(shape, dtype=dtype)
def finalise_matrix(matrix):
"""
Return a copy of a sparse matrix in a form suitable for storage
"""
matrix = matrix.tocsc()
matrix.sort_indices()
if is_integer(matrix):
matrix = convert_to_smallest_int_type(matrix)
return matrix
def is_integer(matrix):
"""
Return whether or not the matrix has integer type
"""
return numpy.issubdtype(matrix.dtype, numpy.integer)
def convert_to_smallest_int_type(matrix):
"""
Convert a matrix to use the smallest integer type capable of representing
all the values currently stored in it
"""
target_type = smallest_int_type_for_range(matrix.min(), matrix.max())
if target_type != matrix.dtype:
matrix = matrix.astype(target_type, copy=False)
return matrix
def smallest_int_type_for_range(minimum, maximum):
"""
Return smallest numpy integer type capable of representing all values in
the supplied range
"""
signed = minimum < 0
abs_max = max(maximum, abs(minimum))
if signed:
if abs_max < 1 << 7:
return numpy.int8
elif abs_max < 1 << 15:
return numpy.int16
elif abs_max < 1 << 31:
return numpy.int32
else:
if abs_max < 1 << 8:
return numpy.uint8
elif abs_max < 1 << 16:
return numpy.uint16
elif abs_max < 1 << 32:
return numpy.uint32
# Return default integer type (other than in the exceptional case that the
# value is too big to store in a signed 64-bit int)
if not signed and abs_max > 1 << 63:
return numpy.uint64
else:
return numpy.int64
|
Python
| 0.000011
|
34f6eef42401590bc1809af68e15992f63736027
|
Add sensor class for gyroscope uncalibrated
|
plyer/platforms/android/gyroscope.py
|
plyer/platforms/android/gyroscope.py
|
'''
Android Gyroscope
---------------------
'''
from plyer.facades import Gyroscope
from jnius import PythonJavaClass, java_method, autoclass, cast
from plyer.platforms.android import activity
Context = autoclass('android.content.Context')
Sensor = autoclass('android.hardware.Sensor')
SensorManager = autoclass('android.hardware.SensorManager')
class GyroscopeSensorListener(PythonJavaClass):
__javainterfaces__ = ['android/hardware/SensorEventListener']
def __init__(self):
super(GyroscopeSensorListener, self).__init__()
self.SensorManager = cast('android.hardware.SensorManager',
activity.getSystemService(Context.SENSOR_SERVICE))
self.sensor = self.SensorManager.getDefaultSensor(
Sensor.TYPE_GYROSCOPE)
self.values = [None, None, None]
def enable(self):
self.SensorManager.registerListener(self, self.sensor,
SensorManager.SENSOR_DELAY_NORMAL)
def disable(self):
self.SensorManager.unregisterListener(self, self.sensor)
@java_method('(Landroid/hardware/SensorEvent;)V')
def onSensorChanged(self, event):
self.values = event.values[:3]
@java_method('(Landroid/hardware/Sensor;I)V')
def onAccuracyChanged(self, sensor, accuracy):
# Maybe, do something in future?
pass
class GyroUncalibratedSensorListener(PythonJavaClass):
__javainterfaces__ = ['android/hardware/SensorEventListener']
def __init__(self):
super(GyroUncalibratedSensorListener, self).__init__()
service = activity.getSystemService(Context.SENSOR_SERVICE)
self.SensorManager = cast('android.hardware.SensorManager', service)
self.sensor = self.SensorManager.getDefaultSensor(
Sensor.TYPE_GYROSCOPE_UNCALIBRATED)
self.values = [None, None, None, None, None, None]
def enable(self):
self.SensorManager.registerListener(self, self.sensor,
SensorManager.SENSOR_DELAY_NORMAL)
def disable(self):
self.SensorManager.unregisterListener(self, self.sensor)
@java_method('(Landroid/hardware/SensorEvent;)V')
def onSensorChanged(self, event):
self.values = event.values[:6]
@java_method('(Landroid/hardware/Sensor;I)V')
def onAccuracyChanged(self, sensor, accuracy):
pass
class AndroidGyroscope(Gyroscope):
def __init__(self):
super(AndroidGyroscope, self).__init__()
self.bState = False
def _enable(self):
if (not self.bState):
self.listenerg = GyroscopeSensorListener()
self.listenergu = GyroUncalibratedSensorListener()
self.listenerg.enable()
self.listenergu.enable()
self.bState = True
def _disable(self):
if (self.bState):
self.bState = False
self.listenerg.disable()
self.listenergu.disable()
del self.listenerg
del self.listenergu
def _get_rotation(self):
if (self.bState):
return tuple(self.listenerg.values)
else:
return (None, None, None)
def _get_rotation_uncalib(self):
if (self.bState):
return tuple(self.listenergu.values)
else:
return (None, None, None, None, None, None)
def __del__(self):
if(self.bState):
self._disable()
super(self.__class__, self).__del__()
def instance():
return AndroidGyroscope()
|
'''
Android Gyroscope
---------------------
'''
from plyer.facades import Gyroscope
from jnius import PythonJavaClass, java_method, autoclass, cast
from plyer.platforms.android import activity
Context = autoclass('android.content.Context')
Sensor = autoclass('android.hardware.Sensor')
SensorManager = autoclass('android.hardware.SensorManager')
class GyroscopeSensorListener(PythonJavaClass):
__javainterfaces__ = ['android/hardware/SensorEventListener']
def __init__(self):
super(GyroscopeSensorListener, self).__init__()
self.SensorManager = cast('android.hardware.SensorManager',
activity.getSystemService(Context.SENSOR_SERVICE))
self.sensor = self.SensorManager.getDefaultSensor(
Sensor.TYPE_GYROSCOPE)
self.values = [None, None, None]
def enable(self):
self.SensorManager.registerListener(self, self.sensor,
SensorManager.SENSOR_DELAY_NORMAL)
def disable(self):
self.SensorManager.unregisterListener(self, self.sensor)
@java_method('(Landroid/hardware/SensorEvent;)V')
def onSensorChanged(self, event):
self.values = event.values[:3]
@java_method('(Landroid/hardware/Sensor;I)V')
def onAccuracyChanged(self, sensor, accuracy):
# Maybe, do something in future?
pass
class AndroidGyroscope(Gyroscope):
def __init__(self):
super(AndroidGyroscope, self).__init__()
self.bState = False
def _enable(self):
if (not self.bState):
self.listener = GyroscopeSensorListener()
self.listener.enable()
self.bState = True
def _disable(self):
if (self.bState):
self.bState = False
self.listener.disable()
del self.listener
def _get_orientation(self):
if (self.bState):
return tuple(self.listener.values)
else:
return (None, None, None)
def __del__(self):
if(self.bState):
self._disable()
super(self.__class__, self).__del__()
def instance():
return AndroidGyroscope()
|
Python
| 0.000001
|
1185595ea55f4b4be2f227a37b33d8142bcf92c1
|
bump version to 6.0.0.dev0
|
stellar_sdk/__version__.py
|
stellar_sdk/__version__.py
|
"""
_____ _______ ______ _ _ _____ _____ _____ _ __
/ ____|__ __| ____| | | | /\ | __ \ / ____| __ \| |/ /
| (___ | | | |__ | | | | / \ | |__) |____| (___ | | | | ' /
\___ \ | | | __| | | | | / /\ \ | _ /______\___ \| | | | <
____) | | | | |____| |____| |____ / ____ \| | \ \ ____) | |__| | . \
|_____/ |_| |______|______|______/_/ \_\_| \_\ |_____/|_____/|_|\_\
"""
__title__ = "stellar-sdk"
__description__ = "The Python Stellar SDK library provides APIs to build transactions and connect to Horizon."
__url__ = "https://github.com/StellarCN/py-stellar-base"
__issues__ = f"{__url__}/issues"
__version__ = "6.0.0.dev0"
__author__ = "Eno, overcat"
__author_email__ = "appweb.cn@gmail.com, 4catcode@gmail.com"
__license__ = "Apache License 2.0"
|
"""
_____ _______ ______ _ _ _____ _____ _____ _ __
/ ____|__ __| ____| | | | /\ | __ \ / ____| __ \| |/ /
| (___ | | | |__ | | | | / \ | |__) |____| (___ | | | | ' /
\___ \ | | | __| | | | | / /\ \ | _ /______\___ \| | | | <
____) | | | | |____| |____| |____ / ____ \| | \ \ ____) | |__| | . \
|_____/ |_| |______|______|______/_/ \_\_| \_\ |_____/|_____/|_|\_\
"""
__title__ = "stellar-sdk"
__description__ = "The Python Stellar SDK library provides APIs to build transactions and connect to Horizon."
__url__ = "https://github.com/StellarCN/py-stellar-base"
__issues__ = f"{__url__}/issues"
__version__ = "5.0.0"
__author__ = "Eno, overcat"
__author_email__ = "appweb.cn@gmail.com, 4catcode@gmail.com"
__license__ = "Apache License 2.0"
|
Python
| 0.000004
|
e589f4347a730abc64f43b3b427ac556643c361f
|
Use proper mesh
|
graphs/hydrogen_pfem.py
|
graphs/hydrogen_pfem.py
|
R_x = {
0: [5, 7, 10, 16, 18, 22, 28, 34],
}
R_y = {
(1, 0): [0.17076672795968373, 0.043356138894337204, 0.041045532914633254, 0.0058943597765469535, 0.00018759868059159412, 1.7891829137695048e-06, 7.0804719309869313e-09, 6.5346731359383625e-09],
(2, 0): [0.044544127593562716, 0.01096517172704424, 0.0089970596630603861, 0.0029545088700716898, 0.00014843931056320037, 1.4554012751771817e-06, 9.5016940149239559e-10, 1.3958578737316429e-10],
(3, 0): [0.015033623095632616, 0.009520817576687414, 0.0069807964180385249, 0.0016484608980817175, 0.00023362526206750084, 1.1715220633229384e-05, 3.2697808772796932e-06, 4.7940380232952551e-07],
}
|
R_x = {
0: [5, 7, 13, 15, 21, 25, 29],
}
R_y = {
(1, 0): [0.1843068472879863, 0.06235603500209852, 0.011111956105256449, 0.00050366115986938409, 7.1562463805907583e-06, 4.0298526238213839e-08, 3.9956294661802616e-08],
(2, 0): [0.046105675637867799, 0.013246017078074462, 0.005542485710768652, 0.00067997064168938415, 4.6063134426982399e-05, 1.0777808132356181e-06, 4.2930948795927293e-09],
(3, 0): [0.01713673973745012, 0.014641367192083615, 0.0026518937870781203, 0.00028054793547133139, 0.00010971626504485688, 2.7061872750686056e-06, 1.0121065385088057e-07],
}
|
Python
| 0.000032
|
9b3fee4f413c02c0b69465fe935b9ea48206191d
|
Add context to excerpts
|
conjure/controllers/jujumodel.py
|
conjure/controllers/jujumodel.py
|
from conjure.ui.views.jujumodel import (NewModelView, ExistingModelView)
from conjure.controllers.deploy import DeployController
from conjure.controllers.jujumodels.maas import MaasJujuModelController
from conjure.controllers.jujumodels.openstack import OpenStackJujuModelController # noqa
from conjure.controllers.jujumodels.local import LocalJujuModelController
class JujuModelController:
def __init__(self, common, jujumodels=None):
self.common = common
self.config = self.common['config']
self.jujumodels = jujumodels
if self.jujumodels is None:
self.excerpt = (
"A Juju environment is required to deploy the solution. "
"Since no existing environments were found please "
"select the model you wish to use. This would be the "
"equivalent of running `juju bootstrap -e <model>`.\n\n"
"For more information type `{cmd}` at your "
"command prompt.".format(cmd='juju help bootstrap'))
self.view = NewModelView(self.common,
self.render_model_view)
else:
self.excerpt = (
"It looks like there are existing Juju Models, please select "
"the model you wish to deploy to. This would be the "
"equivalent of running `juju list-models`.\n\n"
"For more information type `{cmd}` at your "
"command prompt.".format(cmd='juju help controllers'))
self.view = ExistingModelView(self.common,
self.jujumodels,
self.deploy)
def deploy(self, model):
""" An existing Juju model was found load the deploy controller
to start installation
Arguments:
model: Juju model to deploy to
"""
self.common['juju'].switch(model)
DeployController(self.common, model).render()
def render_model_view(self, model):
""" No juju model found, render the selected models view
for a new installation.
Arguments:
modmel: name of juju model to use
"""
model = model.lower()
if model == "maas":
MaasJujuModelController(self.common).render()
elif model == "openstack":
OpenStackJujuModelController(self.common).render()
elif model == "local":
LocalJujuModelController(self.common).render()
def render(self):
self.common['ui'].set_header(
title="Select a Juju Model",
excerpt=self.excerpt
)
self.common['ui'].set_body(self.view)
|
from conjure.ui.views.jujumodel import (NewModelView, ExistingModelView)
from conjure.controllers.deploy import DeployController
from conjure.controllers.jujumodels.maas import MaasJujuModelController
from conjure.controllers.jujumodels.openstack import OpenStackJujuModelController # noqa
from conjure.controllers.jujumodels.local import LocalJujuModelController
class JujuModelController:
def __init__(self, common, jujumodels=None):
self.common = common
self.config = self.common['config']
self.jujumodels = jujumodels
if self.jujumodels is None:
self.excerpt = (
"A Juju environment is required to deploy the solution. "
"Since no existing environments were found please "
"select the model you wish to use.")
self.view = NewModelView(self.common,
self.render_model_view)
else:
self.excerpt = (
"It looks like there are existing Juju Models, please select "
"the model you wish to deploy to.")
self.view = ExistingModelView(self.common,
self.jujumodels,
self.deploy)
def deploy(self, model):
""" An existing Juju model was found load the deploy controller
to start installation
Arguments:
model: Juju model to deploy to
"""
self.common['juju'].switch(model)
model_info = self.common['juju'].client.Client(request="ModelInfo")
if model_info['ProviderType'] in self.config['juju-models']:
model = self.config['juju-models'][model_info['ProviderType']]
DeployController(self.common, model).render()
else:
raise Exception("Unknown Provider Type found: {}".format(
model_info['ProviderType']
))
def render_model_view(self, model):
""" No juju model found, render the selected models view
for a new installation.
Arguments:
modmel: name of juju model to use
"""
model = model.lower()
if model == "maas":
MaasJujuModelController(self.common).render()
elif model == "openstack":
OpenStackJujuModelController(self.common).render()
elif model == "local":
LocalJujuModelController(self.common).render()
def render(self):
self.common['ui'].set_header(
title="Select a Juju Model",
excerpt=self.excerpt
)
self.common['ui'].set_body(self.view)
|
Python
| 0.999997
|
5ad7dfbea0b85fca283025e09a0dc33b2fbe97a6
|
switch to the cdn domain
|
crate_project/settings/production/base.py
|
crate_project/settings/production/base.py
|
from ..base import *
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"filters": {
"require_debug_false": {
"()": "django.utils.log.RequireDebugFalse",
},
},
"formatters": {
"simple": {
"format": "%(levelname)s %(message)s"
},
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "simple"
},
"mail_admins": {
"level": "ERROR",
"filters": ["require_debug_false"],
"class": "django.utils.log.AdminEmailHandler",
},
"sentry": {
"level": "ERROR",
"class": "raven.contrib.django.handlers.SentryHandler",
},
},
"loggers": {
"": {
"handlers": ["console", "sentry"],
"propagate": True,
"level": "INFO",
},
"django.request": {
"handlers": ["mail_admins"],
"level": "ERROR",
"propagate": True,
},
"sentry.errors": {
"level": "DEBUG",
"handlers": ["console"],
"propagate": False,
},
}
}
SITE_ID = 3
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
SERVER_EMAIL = "server@crate.io"
DEFAULT_FROM_EMAIL = "support@crate.io"
CONTACT_EMAIL = "support@crate.io"
# MIDDLEWARE_CLASSES += ["privatebeta.middleware.PrivateBetaMiddleware"]
PACKAGE_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage"
PACKAGE_FILE_STORAGE_OPTIONS = {
"bucket": "crate-production",
"custom_domain": "packages.crate-cdn.com",
}
DEFAULT_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage"
# STATICFILES_STORAGE = "storages.backends.s3boto.S3BotoStorage"
AWS_STORAGE_BUCKET_NAME = "crate-media-production"
AWS_S3_CUSTOM_DOMAIN = "media.crate-cdn.com"
# PRIVATE_BETA_ALLOWED_URLS = [
# "/account/login/",
# "/account/signup/",
# "/account/confirm_email/",
# ]
# PRIVATE_BETA_ALLOWED_HOSTS = [
# "simple.crate.io",
# ]
INTERCOM_APP_ID = "79qt2qu3"
SIMPLE_API_URL = "http://simple.crate.io/"
|
from ..base import *
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"filters": {
"require_debug_false": {
"()": "django.utils.log.RequireDebugFalse",
},
},
"formatters": {
"simple": {
"format": "%(levelname)s %(message)s"
},
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "simple"
},
"mail_admins": {
"level": "ERROR",
"filters": ["require_debug_false"],
"class": "django.utils.log.AdminEmailHandler",
},
"sentry": {
"level": "ERROR",
"class": "raven.contrib.django.handlers.SentryHandler",
},
},
"loggers": {
"": {
"handlers": ["console", "sentry"],
"propagate": True,
"level": "INFO",
},
"django.request": {
"handlers": ["mail_admins"],
"level": "ERROR",
"propagate": True,
},
"sentry.errors": {
"level": "DEBUG",
"handlers": ["console"],
"propagate": False,
},
}
}
SITE_ID = 3
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
SERVER_EMAIL = "server@crate.io"
DEFAULT_FROM_EMAIL = "support@crate.io"
CONTACT_EMAIL = "support@crate.io"
# MIDDLEWARE_CLASSES += ["privatebeta.middleware.PrivateBetaMiddleware"]
PACKAGE_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage"
PACKAGE_FILE_STORAGE_OPTIONS = {
"bucket": "crate-production",
"custom_domain": "packages.crate.io",
}
DEFAULT_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage"
# STATICFILES_STORAGE = "storages.backends.s3boto.S3BotoStorage"
AWS_STORAGE_BUCKET_NAME = "crate-media-production"
AWS_S3_CUSTOM_DOMAIN = "media.crate.io"
# PRIVATE_BETA_ALLOWED_URLS = [
# "/account/login/",
# "/account/signup/",
# "/account/confirm_email/",
# ]
# PRIVATE_BETA_ALLOWED_HOSTS = [
# "simple.crate.io",
# ]
INTERCOM_APP_ID = "79qt2qu3"
SIMPLE_API_URL = "http://simple.crate.io/"
|
Python
| 0.000001
|
59833d6a9ef216ae78c4116e6fae8441f1ba5d9c
|
remove redundants
|
tests/chainer_tests/functions_tests/activation_tests/test_sigmoid.py
|
tests/chainer_tests/functions_tests/activation_tests/test_sigmoid.py
|
import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import testing
from chainer.testing import attr
def _sigmoid(x):
half = x.dtype.type(0.5)
return numpy.tanh(x * half) * half + half
@testing.parameterize(*testing.product({
'shape': [(3, 2), ()],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
'contiguous': [None, 'C'],
}))
@testing.fix_random()
@testing.inject_backend_tests(
None,
# CPU tests
[
{},
{'use_ideep': True},
]
# GPU tests
+ testing.product({
'use_cuda': [True],
'use_cudnn': ['never', 'always'],
'cuda_device': [0, 1],
})
# ChainerX tests
+ testing.product({
'use_chainerx': [True],
'chainerx_device': ['native:0', 'cuda:0', 'cuda:1'],
})
)
class TestSigmoid(testing.FunctionTestCase):
def setUp(self):
if self.dtype == numpy.float16:
self.check_forward_options = {'atol': 1e-4, 'rtol': 1e-3}
self.check_backward_options = {'atol': 1e-2, 'rtol': 5e-2}
self.check_double_backward_options = {'atol': 1e-2, 'rtol': 5e-2}
def generate_inputs(self):
x = numpy.random.uniform(-.5, .5, self.shape).astype(self.dtype)
return x,
def forward_expected(self, inputs):
x, = inputs
y = _sigmoid(x)
if numpy.isscalar(y):
y = numpy.asarray(y)
return y,
def forward(self, inputs, device):
x, = inputs
return functions.sigmoid(x),
@testing.parameterize(*testing.product({
'use_cudnn': ['always', 'auto', 'never'],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
@attr.cudnn
class TestSigmoidCudnnCall(unittest.TestCase):
def setUp(self):
self.x = cuda.cupy.random.uniform(-1, 1, (2, 3)).astype(self.dtype)
self.gy = cuda.cupy.random.uniform(-1, 1, (2, 3)).astype(self.dtype)
with chainer.using_config('use_cudnn', self.use_cudnn):
self.expect = chainer.should_use_cudnn('==always')
def forward(self):
x = chainer.Variable(self.x)
return functions.sigmoid(x)
def test_call_cudnn_forward(self):
default_func = cuda.cupy.cudnn.activation_forward
with chainer.using_config('use_cudnn', self.use_cudnn):
with testing.patch('cupy.cudnn.activation_forward') as func:
func.side_effect = default_func
self.forward()
self.assertEqual(func.called, self.expect)
def test_call_cudnn_backward(self):
with chainer.using_config('use_cudnn', self.use_cudnn):
y = self.forward()
y.grad = self.gy
default_func = cuda.cupy.cudnn.activation_backward
with testing.patch('cupy.cudnn.activation_backward') as func:
func.side_effect = default_func
y.backward()
self.assertEqual(func.called, self.expect)
testing.run_module(__name__, __file__)
|
import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import testing
from chainer.testing import attr
def _sigmoid(x):
half = x.dtype.type(0.5)
return numpy.tanh(x * half) * half + half
@testing.parameterize(*testing.product({
'shape': [(3, 2), ()],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
'contiguous': [None, 'C'],
}))
@testing.fix_random()
@testing.inject_backend_tests(
None,
# CPU tests
[
{},
{'use_ideep': True},
]
# GPU tests
+ testing.product({
'use_cuda': [True],
'use_cudnn': ['never', 'always'],
'cuda_device': [0, 1],
})
# ChainerX tests
+ testing.product({
'use_chainerx': [True],
'chainerx_device': ['native:0', 'cuda:0', 'cuda:1'],
})
)
class TestSigmoid(testing.FunctionTestCase):
def setUp(self):
if self.dtype == numpy.float16:
self.check_forward_options = {'atol': 1e-4, 'rtol': 1e-3}
self.check_backward_options = {'atol': 1e-2, 'rtol': 5e-2}
self.check_double_backward_options = {'atol': 1e-2, 'rtol': 5e-2}
def generate_inputs(self):
x = numpy.random.uniform(-.5, .5, self.shape).astype(self.dtype)
return x,
def forward_expected(self, inputs):
x, = inputs
y = _sigmoid(x.copy()).astype(self.dtype)
if numpy.isscalar(y):
y = numpy.asarray(y)
return y,
def forward(self, inputs, device):
x, = inputs
return functions.sigmoid(x),
@testing.parameterize(*testing.product({
'use_cudnn': ['always', 'auto', 'never'],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
@attr.cudnn
class TestSigmoidCudnnCall(unittest.TestCase):
def setUp(self):
self.x = cuda.cupy.random.uniform(-1, 1, (2, 3)).astype(self.dtype)
self.gy = cuda.cupy.random.uniform(-1, 1, (2, 3)).astype(self.dtype)
with chainer.using_config('use_cudnn', self.use_cudnn):
self.expect = chainer.should_use_cudnn('==always')
def forward(self):
x = chainer.Variable(self.x)
return functions.sigmoid(x)
def test_call_cudnn_forward(self):
default_func = cuda.cupy.cudnn.activation_forward
with chainer.using_config('use_cudnn', self.use_cudnn):
with testing.patch('cupy.cudnn.activation_forward') as func:
func.side_effect = default_func
self.forward()
self.assertEqual(func.called, self.expect)
def test_call_cudnn_backward(self):
with chainer.using_config('use_cudnn', self.use_cudnn):
y = self.forward()
y.grad = self.gy
default_func = cuda.cupy.cudnn.activation_backward
with testing.patch('cupy.cudnn.activation_backward') as func:
func.side_effect = default_func
y.backward()
self.assertEqual(func.called, self.expect)
testing.run_module(__name__, __file__)
|
Python
| 0.999999
|
bbb9d7e65fdff189562d1e2a5cdf4a302e562f3e
|
Use parameterized test
|
tests/chainer_tests/functions_tests/math_tests/test_trigonometric.py
|
tests/chainer_tests/functions_tests/math_tests/test_trigonometric.py
|
import unittest
import numpy
import chainer
from chainer import cuda
import chainer.functions as F
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
@testing.parameterize(*testing.product({
'shape': [(3, 2), ()],
}))
class UnaryFunctionsTest(unittest.TestCase):
def make_data(self):
raise NotImplementedError
def setUp(self):
self.x = numpy.random.uniform(.5, 1, self.shape).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
def check_forward(self, op, op_np, x_data):
x = chainer.Variable(x_data)
y = op(x)
testing.assert_allclose(
op_np(self.x), y.data, atol=1e-7, rtol=1e-7)
def check_forward_cpu(self, op, op_np):
self.check_forward(op, op_np, self.x)
def check_forward_gpu(self, op, op_np):
self.check_forward(op, op_np, cuda.to_gpu(self.x))
@condition.retry(3)
def test_cos_forward_cpu(self):
self.check_forward_cpu(F.cos, numpy.cos)
@condition.retry(3)
def test_sin_forward_cpu(self):
self.check_forward_cpu(F.sin, numpy.sin)
@condition.retry(3)
def test_tan_forward_cpu(self):
self.check_forward_cpu(F.tan, numpy.tan)
@attr.gpu
@condition.retry(3)
def test_cos_forward_gpu(self):
self.check_forward_gpu(F.cos, numpy.cos)
@attr.gpu
@condition.retry(3)
def test_sin_forward_gpu(self):
self.check_forward_gpu(F.sin, numpy.sin)
@attr.gpu
@condition.retry(3)
def test_tan_forward_gpu(self):
self.check_forward_gpu(F.tan, numpy.tan)
def check_backward(self, op, x_data, y_grad):
gradient_check.check_backward(op, x_data, y_grad)
def check_backward_cpu(self, op):
self.check_backward(op, self.x, self.gy)
def check_backward_gpu(self, op):
self.check_backward(op, cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
@condition.retry(3)
def test_cos_backward_cpu(self):
self.check_backward_cpu(F.cos)
@condition.retry(3)
def test_sin_backward_cpu(self):
self.check_backward_cpu(F.sin)
@condition.retry(3)
def test_tan_backward_cpu(self):
self.check_backward_cpu(F.tan)
@attr.gpu
@condition.retry(3)
def test_cos_backward_gpu(self):
self.check_backward_gpu(F.cos)
@attr.gpu
@condition.retry(3)
def test_sin_backward_gpu(self):
self.check_backward_gpu(F.sin)
@attr.gpu
@condition.retry(3)
def test_tan_backward_gpu(self):
self.check_backward_gpu(F.tan)
def test_sin(self):
self.assertEqual(F.Sin().label, 'sin')
def test_cos(self):
self.assertEqual(F.Cos().label, 'cos')
def test_tan(self):
self.assertEqual(F.Tan().label, 'tan')
testing.run_module(__name__, __file__)
|
import unittest
import numpy
import chainer
from chainer import cuda
import chainer.functions as F
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
class UnaryFunctionsTestBase(object):
def make_data(self):
raise NotImplementedError
def setUp(self):
self.x, self.gy = self.make_data()
def check_forward(self, op, op_np, x_data):
x = chainer.Variable(x_data)
y = op(x)
testing.assert_allclose(
op_np(self.x), y.data, atol=1e-7, rtol=1e-7)
def check_forward_cpu(self, op, op_np):
self.check_forward(op, op_np, self.x)
def check_forward_gpu(self, op, op_np):
self.check_forward(op, op_np, cuda.to_gpu(self.x))
@condition.retry(3)
def test_cos_forward_cpu(self):
self.check_forward_cpu(F.cos, numpy.cos)
@condition.retry(3)
def test_sin_forward_cpu(self):
self.check_forward_cpu(F.sin, numpy.sin)
@condition.retry(3)
def test_tan_forward_cpu(self):
self.check_forward_cpu(F.tan, numpy.tan)
@attr.gpu
@condition.retry(3)
def test_cos_forward_gpu(self):
self.check_forward_gpu(F.cos, numpy.cos)
@attr.gpu
@condition.retry(3)
def test_sin_forward_gpu(self):
self.check_forward_gpu(F.sin, numpy.sin)
@attr.gpu
@condition.retry(3)
def test_tan_forward_gpu(self):
self.check_forward_gpu(F.tan, numpy.tan)
def check_backward(self, op, x_data, y_grad):
gradient_check.check_backward(op, x_data, y_grad)
def check_backward_cpu(self, op):
self.check_backward(op, self.x, self.gy)
def check_backward_gpu(self, op):
self.check_backward(op, cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
@condition.retry(3)
def test_cos_backward_cpu(self):
self.check_backward_cpu(F.cos)
@condition.retry(3)
def test_sin_backward_cpu(self):
self.check_backward_cpu(F.sin)
@condition.retry(3)
def test_tan_backward_cpu(self):
self.check_backward_cpu(F.tan)
@attr.gpu
@condition.retry(3)
def test_cos_backward_gpu(self):
self.check_backward_gpu(F.cos)
@attr.gpu
@condition.retry(3)
def test_sin_backward_gpu(self):
self.check_backward_gpu(F.sin)
@attr.gpu
@condition.retry(3)
def test_tan_backward_gpu(self):
self.check_backward_gpu(F.tan)
def test_sin(self):
self.assertEqual(F.Sin().label, 'sin')
def test_cos(self):
self.assertEqual(F.Cos().label, 'cos')
def test_tan(self):
self.assertEqual(F.Tan().label, 'tan')
class TestUnaryFunctionsSimple(UnaryFunctionsTestBase, unittest.TestCase):
def make_data(self):
x = numpy.random.uniform(.5, 1, (3, 2)).astype(numpy.float32)
gy = numpy.random.uniform(-1, 1, (3, 2)).astype(numpy.float32)
return x, gy
class TestUnaryFunctionsZeroDimension(UnaryFunctionsTestBase,
unittest.TestCase):
def make_data(self):
x = numpy.random.uniform(.5, 1, ()).astype(numpy.float32)
gy = numpy.random.uniform(-1, 1, ()).astype(numpy.float32)
return x, gy
testing.run_module(__name__, __file__)
|
Python
| 0.000001
|
a985f0ac4ef31e534e96319da9980745f5169252
|
Disable some Pylint errors
|
tests/integration/modules/pillar.py
|
tests/integration/modules/pillar.py
|
# -*- coding: utf-8 -*-
# Import Python Libs
from distutils.version import LooseVersion # pylint: disable=import-error,no-name-in-module
# Import Salt Testing libs
from salttesting import skipIf
from salttesting.helpers import (
ensure_in_syspath,
requires_network
)
ensure_in_syspath('../../')
# Import salt libs
import integration
GIT_PYTHON = '0.3.2'
HAS_GIT_PYTHON = False
try:
import git
if LooseVersion(git.__version__) >= LooseVersion(GIT_PYTHON):
HAS_GIT_PYTHON = True
except ImportError:
pass
class PillarModuleTest(integration.ModuleCase):
'''
Validate the pillar module
'''
def test_data(self):
'''
pillar.data
'''
grains = self.run_function('grains.items')
pillar = self.run_function('pillar.data')
self.assertEqual(pillar['os'], grains['os'])
self.assertEqual(pillar['monty'], 'python')
if grains['os'] == 'Fedora':
self.assertEqual(pillar['class'], 'redhat')
else:
self.assertEqual(pillar['class'], 'other')
@requires_network()
@skipIf(HAS_GIT_PYTHON is False,
'GitPython must be installed and >= version {0}'.format(GIT_PYTHON))
def test_two_ext_pillar_sources_override(self):
'''
https://github.com/saltstack/salt/issues/12647
'''
self.assertEqual(
self.run_function('pillar.data')['info'],
'bar'
)
@requires_network()
@skipIf(HAS_GIT_PYTHON is False,
'GitPython must be installed and >= version {0}'.format(GIT_PYTHON))
def test_two_ext_pillar_sources(self):
'''
https://github.com/saltstack/salt/issues/12647
'''
self.assertEqual(
self.run_function('pillar.data')['abc'],
'def'
)
def test_issue_5449_report_actual_file_roots_in_pillar(self):
'''
pillar['master']['file_roots'] is overwritten by the master
in order to use the fileclient interface to read the pillar
files. We should restore the actual file_roots when we send
the pillar back to the minion.
'''
self.assertIn(
integration.TMP_STATE_TREE,
self.run_function('pillar.data')['master']['file_roots']['base']
)
def test_ext_cmd_yaml(self):
'''
pillar.data for ext_pillar cmd.yaml
'''
self.assertEqual(
self.run_function('pillar.data')['ext_spam'], 'eggs'
)
def test_issue_5951_actual_file_roots_in_opts(self):
self.assertIn(
integration.TMP_STATE_TREE,
self.run_function('pillar.data')['test_ext_pillar_opts']['file_roots']['base']
)
def no_test_issue_10408_ext_pillar_gitfs_url_update(self):
import os
from salt.pillar import git_pillar
import git
original_url = 'git+ssh://original@example.com/home/git/test'
changed_url = 'git+ssh://changed@example.com/home/git/test'
rp_location = os.path.join(self.master_opts['cachedir'], 'pillar_gitfs/0/.git')
opts = {
'ext_pillar': [{'git': 'master {0}'.format(original_url)}],
'cachedir': self.master_opts['cachedir'],
}
git_pillar.GitPillar('master', original_url, opts)
opts['ext_pillar'] = [{'git': 'master {0}'.format(changed_url)}]
grepo = git_pillar.GitPillar('master', changed_url, opts)
repo = git.Repo(rp_location)
self.assertEqual(grepo.rp_location, repo.remotes.origin.url)
if __name__ == '__main__':
from integration import run_tests
run_tests(PillarModuleTest)
|
# -*- coding: utf-8 -*-
# Import Python Libs
from distutils.version import LooseVersion
# Import Salt Testing libs
from salttesting import skipIf
from salttesting.helpers import (
ensure_in_syspath,
requires_network
)
ensure_in_syspath('../../')
# Import salt libs
import integration
GIT_PYTHON = '0.3.2'
HAS_GIT_PYTHON = False
try:
import git
if LooseVersion(git.__version__) >= LooseVersion(GIT_PYTHON):
HAS_GIT_PYTHON = True
except ImportError:
pass
class PillarModuleTest(integration.ModuleCase):
'''
Validate the pillar module
'''
def test_data(self):
'''
pillar.data
'''
grains = self.run_function('grains.items')
pillar = self.run_function('pillar.data')
self.assertEqual(pillar['os'], grains['os'])
self.assertEqual(pillar['monty'], 'python')
if grains['os'] == 'Fedora':
self.assertEqual(pillar['class'], 'redhat')
else:
self.assertEqual(pillar['class'], 'other')
@requires_network()
@skipIf(HAS_GIT_PYTHON is False,
'GitPython must be installed and >= version {0}'.format(GIT_PYTHON))
def test_two_ext_pillar_sources_override(self):
'''
https://github.com/saltstack/salt/issues/12647
'''
self.assertEqual(
self.run_function('pillar.data')['info'],
'bar'
)
@requires_network()
@skipIf(HAS_GIT_PYTHON is False,
'GitPython must be installed and >= version {0}'.format(GIT_PYTHON))
def test_two_ext_pillar_sources(self):
'''
https://github.com/saltstack/salt/issues/12647
'''
self.assertEqual(
self.run_function('pillar.data')['abc'],
'def'
)
def test_issue_5449_report_actual_file_roots_in_pillar(self):
'''
pillar['master']['file_roots'] is overwritten by the master
in order to use the fileclient interface to read the pillar
files. We should restore the actual file_roots when we send
the pillar back to the minion.
'''
self.assertIn(
integration.TMP_STATE_TREE,
self.run_function('pillar.data')['master']['file_roots']['base']
)
def test_ext_cmd_yaml(self):
'''
pillar.data for ext_pillar cmd.yaml
'''
self.assertEqual(
self.run_function('pillar.data')['ext_spam'], 'eggs'
)
def test_issue_5951_actual_file_roots_in_opts(self):
self.assertIn(
integration.TMP_STATE_TREE,
self.run_function('pillar.data')['test_ext_pillar_opts']['file_roots']['base']
)
def no_test_issue_10408_ext_pillar_gitfs_url_update(self):
import os
from salt.pillar import git_pillar
import git
original_url = 'git+ssh://original@example.com/home/git/test'
changed_url = 'git+ssh://changed@example.com/home/git/test'
rp_location = os.path.join(self.master_opts['cachedir'], 'pillar_gitfs/0/.git')
opts = {
'ext_pillar': [{'git': 'master {0}'.format(original_url)}],
'cachedir': self.master_opts['cachedir'],
}
git_pillar.GitPillar('master', original_url, opts)
opts['ext_pillar'] = [{'git': 'master {0}'.format(changed_url)}]
grepo = git_pillar.GitPillar('master', changed_url, opts)
repo = git.Repo(rp_location)
self.assertEqual(grepo.rp_location, repo.remotes.origin.url)
if __name__ == '__main__':
from integration import run_tests
run_tests(PillarModuleTest)
|
Python
| 0.000001
|
abb72e478fd0bdf5929111d3bc782b94e98819ab
|
Revert b00b3c4 (but keep addition to allow_failure list in test_valid_docs())
|
tests/integration/modules/sysmod.py
|
tests/integration/modules/sysmod.py
|
# -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import
import re
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
# Import 3rd-party libs
import salt.ext.six as six
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_valid_docs(self):
'''
Make sure no functions are exposed that don't have valid docstrings
'''
docs = self.run_function('sys.doc')
nodoc = set()
noexample = set()
allow_failure = (
'cp.recv',
'libcloud_dns.get_driver',
'lxc.run_cmd',
'ipset.long_range',
'pkg.expand_repo_def',
'runtests_decorators.depends',
'runtests_decorators.depends_will_fallback',
'runtests_decorators.missing_depends',
'runtests_decorators.missing_depends_will_fallback',
'swift.head',
'glance.warn_until',
'yumpkg.expand_repo_def',
'yumpkg5.expand_repo_def',
'container_resource.run',
'nspawn.stop',
'nspawn.restart',
'lowpkg.bin_pkg_info',
'state.apply',
'cmd.win_runas',
'status.list2cmdline'
)
for fun in docs:
if fun.startswith('runtests_helpers'):
continue
if fun in allow_failure:
continue
if not isinstance(docs[fun], six.string_types):
nodoc.add(fun)
elif not re.search(r'([E|e]xample(?:s)?)+(?:.*)::?', docs[fun]):
noexample.add(fun)
if not nodoc and not noexample:
return
raise AssertionError(
'There are some functions which do not have a docstring or do not '
'have an example:\nNo docstring:\n{0}\nNo example:\n{1}\n'.format(
'\n'.join([' - {0}'.format(f) for f in sorted(nodoc)]),
'\n'.join([' - {0}'.format(f) for f in sorted(noexample)]),
)
)
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
|
# -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import
import re
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
# Import 3rd-party libs
import salt.ext.six as six
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
# Get all functions
funcs = self.run_function('sys.list_functions')
self.assertIn('hosts.list_hosts', funcs)
self.assertIn('pkg.install', funcs)
# Just pkg
funcs = self.run_function('sys.list_functions', ('pkg.',))
self.assertNotIn('sys.doc', funcs)
self.assertIn('pkg.install', funcs)
# Just sys
funcs = self.run_function('sys.list_functions', ('sys.',))
self.assertNotIn('pkg.install', funcs)
self.assertIn('sys.doc', funcs)
# Starting with sys
funcs = self.run_function('sys.list_functions', ('sys',))
self.assertNotIn('sysctl.get', funcs)
self.assertIn('sys.doc', funcs)
def test_list_modules(self):
'''
sys.list_modules
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
def test_list_modules_with_arg_glob(self):
'''
sys.list_modules u*
Tests getting the list of modules with 'u*', and looking for the
"user" module
'''
mods = self.run_function('sys.list_modules', ['u*'])
self.assertNotIn('bigip', mods)
self.assertIn('user', mods)
def test_list_modules_with_arg_exact_match(self):
'''
sys.list_modules user
Tests getting the list of modules looking for the "user" module with
an exact match of 'user' being passed at the CLI instead of something
with '*'.
'''
mods = self.run_function('sys.list_modules', ['user'])
self.assertEqual(mods, ['user'])
def test_valid_docs(self):
'''
Make sure no functions are exposed that don't have valid docstrings
'''
docs = self.run_function('sys.doc')
nodoc = set()
noexample = set()
allow_failure = (
'cp.recv',
'libcloud_dns.get_driver',
'lxc.run_cmd',
'ipset.long_range',
'pkg.expand_repo_def',
'runtests_decorators.depends',
'runtests_decorators.depends_will_fallback',
'runtests_decorators.missing_depends',
'runtests_decorators.missing_depends_will_fallback',
'swift.head',
'glance.warn_until',
'yumpkg.expand_repo_def',
'yumpkg5.expand_repo_def',
'container_resource.run',
'nspawn.stop',
'nspawn.restart',
'lowpkg.bin_pkg_info',
'state.apply',
'cmd.win_runas',
'status.list2cmdline'
)
for fun in docs:
if fun.startswith('runtests_helpers'):
continue
if fun in allow_failure:
continue
if not isinstance(docs[fun], six.string_types):
nodoc.add(fun)
elif not re.search(r'([E|e]xample(?:s)?)+(?:.*)::?', docs[fun]):
noexample.add(fun)
if not nodoc and not noexample:
return
raise AssertionError(
'There are some functions which do not have a docstring or do not '
'have an example:\nNo docstring:\n{0}\nNo example:\n{1}\n'.format(
'\n'.join([' - {0}'.format(f) for f in sorted(nodoc)]),
'\n'.join([' - {0}'.format(f) for f in sorted(noexample)]),
)
)
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
|
Python
| 0
|
c98ef2e8b64b7daa94c0e883f71813a3e3226a78
|
set current day to none if there is none
|
riskgame/context_processors.py
|
riskgame/context_processors.py
|
# -*- coding: utf-8
from riskgame.models import Player, TeamPlayer, EpisodeDay, Game
def player(request):
returnDict = {}
if request.user.is_authenticated():
try:
currentPlayer = Player.objects.get(user=request.user)
except Player.DoesNotExist:
currentPlayer = Player.objects.create(user=request.user)
returnDict['current_player'] = currentPlayer
try:
returnDict['current_teamplayer'] = TeamPlayer.objects.get(player=currentPlayer)
except TeamPlayer.DoesNotExist:
pass
try:
returnDict['current_day'] = EpisodeDay.objects.get(current=True)
except EpisodeDay.DoesNotExist:
returnDict['current_day'] = None
try:
returnDict['current_game'] = Game.objects.get_latest_game()
except Game.DoesNotExist:
pass
# try:
# game = Game.objects.get_latest_game()
# returnDict['game'] = game
# except:
# pass
return returnDict
|
# -*- coding: utf-8
from riskgame.models import Player, TeamPlayer, EpisodeDay, Game
def player(request):
returnDict = {}
if request.user.is_authenticated():
try:
currentPlayer = Player.objects.get(user=request.user)
except Player.DoesNotExist:
currentPlayer = Player.objects.create(user=request.user)
returnDict['current_player'] = currentPlayer
try:
returnDict['current_teamplayer'] = TeamPlayer.objects.get(player=currentPlayer)
except TeamPlayer.DoesNotExist:
pass
try:
returnDict['current_day'] = EpisodeDay.objects.get(current=True)
except EpisodeDay.DoesNotExist:
pass
try:
returnDict['current_game'] = Game.objects.get_latest_game()
except Game.DoesNotExist:
pass
# try:
# game = Game.objects.get_latest_game()
# returnDict['game'] = game
# except:
# pass
return returnDict
|
Python
| 0.020175
|
734c7c9ad2d1290a202e48ebb5535a0bc371bd75
|
fix iarc constant test
|
mkt/constants/tests/test_ratingsbodies.py
|
mkt/constants/tests/test_ratingsbodies.py
|
from contextlib import contextmanager
from nose.tools import eq_
from tower import activate
import amo.tests
import mkt.constants.ratingsbodies as ratingsbodies
class TestRatingsBodies(amo.tests.TestCase):
def test_all_ratings_waffle_off(self):
ratings = ratingsbodies.ALL_RATINGS()
# Assert only CLASSIND and GENERIC ratings are present.
assert ratingsbodies.CLASSIND_L in ratings
assert ratingsbodies.GENERIC_3 in ratings
assert ratingsbodies.ESRB_E not in ratings
assert ratingsbodies.PEGI_3 not in ratings
assert ratingsbodies.USK_0 not in ratings
def test_all_ratings_waffle_on(self):
self.create_switch('iarc')
ratings = ratingsbodies.ALL_RATINGS()
# Assert all ratings bodies are present.
assert ratingsbodies.CLASSIND_L in ratings
assert ratingsbodies.GENERIC_3 in ratings
assert ratingsbodies.ESRB_E in ratings
assert ratingsbodies.PEGI_3 in ratings
assert ratingsbodies.USK_0 in ratings
def test_ratings_by_name_waffle(self):
without_waffle = ratingsbodies.RATINGS_BY_NAME()
self.create_switch('iarc', db=True)
with_waffle = ratingsbodies.RATINGS_BY_NAME()
# Test waffle off excludes ratings.
assert len(without_waffle) < len(with_waffle)
def test_ratings_by_name_lazy_translation(self):
generic_3_choice = ratingsbodies.RATINGS_BY_NAME()[6]
eq_(generic_3_choice[1], 'Generic - For ages 3+')
def test_ratings_has_ratingsbody(self):
eq_(ratingsbodies.GENERIC_3.ratingsbody, ratingsbodies.GENERIC)
eq_(ratingsbodies.CLASSIND_L.ratingsbody, ratingsbodies.CLASSIND)
eq_(ratingsbodies.ESRB_E.ratingsbody, ratingsbodies.ESRB)
eq_(ratingsbodies.USK_0.ratingsbody, ratingsbodies.USK)
eq_(ratingsbodies.PEGI_3.ratingsbody, ratingsbodies.PEGI)
def test_dehydrate_rating(self):
self.create_switch('iarc')
for rating in ratingsbodies.ALL_RATINGS():
rating = ratingsbodies.dehydrate_rating(rating)
assert isinstance(rating.name, unicode), rating
assert rating.label and rating.label != str(None), rating
def test_dehydrate_ratings_body(self):
self.create_switch('iarc')
for k, body in ratingsbodies.RATINGS_BODIES.iteritems():
body = ratingsbodies.dehydrate_ratings_body(body)
assert isinstance(body.name, unicode)
assert body.label and body.label != str(None)
assert isinstance(body.description, unicode)
@contextmanager
def tower_activate(self, region):
try:
activate(region)
yield
finally:
activate('en-US')
def test_dehydrate_rating_language(self):
self.create_switch('iarc')
with self.tower_activate('es'):
rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T)
eq_(rating.name, 'Adolescente')
with self.tower_activate('fr'):
rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T)
eq_(rating.name, 'Adolescents')
rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T)
eq_(rating.name, 'Teen')
|
from contextlib import contextmanager
from nose.tools import eq_
from tower import activate
import amo.tests
import mkt.constants.ratingsbodies as ratingsbodies
class TestRatingsBodies(amo.tests.TestCase):
def test_all_ratings_waffle_off(self):
ratings = ratingsbodies.ALL_RATINGS()
# Assert only CLASSIND and GENERIC ratings are present.
assert ratingsbodies.CLASSIND_L in ratings
assert ratingsbodies.GENERIC_3 in ratings
assert ratingsbodies.ESRB_E not in ratings
assert ratingsbodies.PEGI_3 not in ratings
assert ratingsbodies.USK_0 not in ratings
def test_all_ratings_waffle_on(self):
self.create_switch('iarc')
ratings = ratingsbodies.ALL_RATINGS()
# Assert all ratings bodies are present.
assert ratingsbodies.CLASSIND_L in ratings
assert ratingsbodies.GENERIC_3 in ratings
assert ratingsbodies.ESRB_E in ratings
assert ratingsbodies.PEGI_3 in ratings
assert ratingsbodies.USK_0 in ratings
def test_ratings_by_name_waffle(self):
without_waffle = ratingsbodies.RATINGS_BY_NAME()
self.create_switch('iarc', db=True)
with_waffle = ratingsbodies.RATINGS_BY_NAME()
# Test waffle off excludes ratings.
assert len(without_waffle) < len(with_waffle)
def test_ratings_by_name_lazy_translation(self):
generic_3_choice = ratingsbodies.RATINGS_BY_NAME()[6]
eq_(generic_3_choice[1], 'Generic - For ages 3+')
def test_ratings_has_ratingsbody(self):
eq_(ratingsbodies.GENERIC_3.ratingsbody, ratingsbodies.GENERIC)
eq_(ratingsbodies.CLASSIND_L.ratingsbody, ratingsbodies.CLASSIND)
eq_(ratingsbodies.ESRB_E.ratingsbody, ratingsbodies.ESRB)
eq_(ratingsbodies.USK_0.ratingsbody, ratingsbodies.USK)
eq_(ratingsbodies.PEGI_3.ratingsbody, ratingsbodies.PEGI)
def test_dehydrate_rating(self):
self.create_switch('iarc')
for rating in ratingsbodies.ALL_RATINGS():
rating = ratingsbodies.dehydrate_rating(rating)
assert isinstance(rating.name, unicode), rating
assert rating.label and rating.label != str(None), rating
assert isinstance(rating.description, unicode), rating
def test_dehydrate_ratings_body(self):
self.create_switch('iarc')
for k, body in ratingsbodies.RATINGS_BODIES.iteritems():
body = ratingsbodies.dehydrate_ratings_body(body)
assert isinstance(body.name, unicode)
assert body.label and body.label != str(None)
assert isinstance(body.description, unicode)
@contextmanager
def tower_activate(self, region):
try:
activate(region)
yield
finally:
activate('en-US')
def test_dehydrate_rating_language(self):
self.create_switch('iarc')
with self.tower_activate('es'):
rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T)
eq_(rating.name, 'Adolescente')
with self.tower_activate('fr'):
rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T)
eq_(rating.name, 'Adolescents')
rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T)
eq_(rating.name, 'Teen')
|
Python
| 0.000002
|
7bb6ac8e1d6e742c2c02c3cc489ea350175dc8cd
|
Refactor source grab's try/catch
|
vizydrop/rest/base.py
|
vizydrop/rest/base.py
|
from http.client import INTERNAL_SERVER_ERROR
import json
from vizydrop.rest import VizydropAppRequestHandler
from vizydrop.sdk.source import StreamingDataSource
from tornado.gen import coroutine
from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError
from . import TpaHandlerMixin
class BaseHandler(VizydropAppRequestHandler, TpaHandlerMixin):
def options(self):
self.set_status(204)
self.finish()
def get(self):
meta = self.tpa.Meta
self.finish({
"version": meta.version,
"tags": meta.tags,
"name": meta.name,
"color": meta.color or None,
"description": meta.description,
"site": meta.website,
"sources": [source.get_schema() for source in meta.sources],
"authentication": [account.get_schema() for account in meta.authentication]
})
@coroutine
def post(self):
post_body = json.loads(self.request.body.decode('utf-8'))
source = post_body.get('source')
account_fields = post_body.get('account')
account_identifier = account_fields.pop('auth')
filter_fields = post_body.get('filter')
limit = post_body.get('limit', 100)
skip = post_body.get('skip', 0)
account_type = self.tpa.get_auth(account_identifier)
account = account_type(account_fields)
source_type = self.tpa.get_source(source)
filter = source_type.Meta.filter(filter_fields)
try:
if issubclass(source_type, StreamingDataSource):
source_type.stream_callback = self.on_stream_data
yield source_type.get_data(account, filter, limit=limit, skip=skip)
self.flush()
self.finish('')
else:
data = yield source_type.get_data(account, filter, limit=limit, skip=skip)
self.finish(data, encode=False)
except HTTPError as e:
self.set_status(e.code)
self.finish(str(e), encode=False)
except Exception as e:
self.set_status(INTERNAL_SERVER_ERROR)
self._handle_request_exception(e)
def on_stream_data(self, data):
self.write(data)
self.flush()
|
from http.client import INTERNAL_SERVER_ERROR
import json
from vizydrop.rest import VizydropAppRequestHandler
from vizydrop.sdk.source import StreamingDataSource
from tornado.gen import coroutine
from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError
from . import TpaHandlerMixin
class BaseHandler(VizydropAppRequestHandler, TpaHandlerMixin):
def options(self):
self.set_status(204)
self.finish()
def get(self):
meta = self.tpa.Meta
self.finish({
"version": meta.version,
"tags": meta.tags,
"name": meta.name,
"color": meta.color or None,
"description": meta.description,
"site": meta.website,
"sources": [source.get_schema() for source in meta.sources],
"authentication": [account.get_schema() for account in meta.authentication]
})
@coroutine
def post(self):
post_body = json.loads(self.request.body.decode('utf-8'))
source = post_body.get('source')
account_fields = post_body.get('account')
account_identifier = account_fields.pop('auth')
filter_fields = post_body.get('filter')
limit = post_body.get('limit', 100)
skip = post_body.get('skip', 0)
account_type = self.tpa.get_auth(account_identifier)
account = account_type(account_fields)
source_type = self.tpa.get_source(source)
filter = source_type.Meta.filter(filter_fields)
if issubclass(source_type, StreamingDataSource):
source_type.stream_callback = self.on_stream_data
try:
yield source_type.get_data(account, filter, limit=limit, skip=skip)
self.flush()
self.finish('')
except HTTPError as e:
raise e
except Exception as e:
self.set_status(INTERNAL_SERVER_ERROR)
self._handle_request_exception(e)
else:
try:
data = yield source_type.get_data(account, filter, limit=limit, skip=skip)
self.finish(data, encode=False)
except HTTPError as e:
raise e
except Exception as e:
self.set_status(INTERNAL_SERVER_ERROR)
self._handle_request_exception(e)
def on_stream_data(self, data):
self.write(data)
self.flush()
|
Python
| 0
|
546d4ac0bbf14da2bc5610aa09b5a75627b297a6
|
Add 131
|
100_to_199/euler_131.py
|
100_to_199/euler_131.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Problem 131
There are some prime values, p, for which there exists a positive integer, n, such that the expression n3 + n2p is a perfect cube.
For example, when p = 19, 83 + 82×19 = 123.
What is perhaps most surprising is that for each prime with this property the value of n is unique, and there are only four such primes below one-hundred.
How many primes below one million have this remarkable property?
'''
from util import prime_sieve, is_prime
from itertools import count
def is_perfect_cube(x):
# x = abs(x)
return int(round(x ** (1. / 3))) ** 3 == x
def p131_slow(): # Answer: 173, 68.54s Mac pro 2016
cnt = 0
primes = prime_sieve(1000000)
for p in primes:
for i in count(1):
n = i ** 3
if is_perfect_cube(n + p):
if is_perfect_cube(n ** 2):
# print('[great] ', [p, n, i], n**2, n+p)
cnt += 1
break
if i > 600:
break
print(cnt)
def p131():
# n**3 + p = (n+1)**3
# p = 3n**2 + 3n + 1
cnt = 0
for n in count(1):
p = 3 * (n ** 2) + 3 * n + 1
if p >= 1000000:
break
if is_prime(p):
cnt += 1
print(cnt)
p131()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Problem 131
There are some prime values, p, for which there exists a positive integer, n, such that the expression n3 + n2p is a perfect cube.
For example, when p = 19, 83 + 82×19 = 123.
What is perhaps most surprising is that for each prime with this property the value of n is unique, and there are only four such primes below one-hundred.
How many primes below one million have this remarkable property?
'''
from util import prime_sieve
from itertools import count
def is_perfect_cube(x):
# x = abs(x)
return int(round(x ** (1. / 3))) ** 3 == x
def p131(): # Answer: 173, 68.54s Mac pro 2016
cnt = 0
primes = prime_sieve(1000000)
for p in primes:
for i in count(1):
n = i ** 3
if is_perfect_cube(n + p):
if is_perfect_cube(n ** 2):
# print('[great] ', [p, n, i], n**2, n+p)
cnt += 1
break
if i > 600:
break
print(cnt)
p131()
|
Python
| 0.999937
|
6e04d3ab8a6d967c14afaf45869152c93d94d1ec
|
Refactor remote URI formatting functions
|
gutenberg/textsource.py
|
gutenberg/textsource.py
|
"""Module providing implementations of the api.TextSource interface."""
from __future__ import absolute_import
from . import beautify
from . import api
from .common import wget
import itertools
import logging
import os
import rdflib
import tarfile
def _is_legacy_uid(uid):
return 0 < uid < 10
def _format_uri(uid):
if _is_legacy_uid(uid):
raise ValueError('should use legacy URI format for UIDs in (0..10)')
uid = str(uid)
return '{root}/{path}/{uid}/{uid}.txt'.format(
root=r'http://www.gutenberg.lib.md.us',
path='/'.join(uid[:len(uid) - 1]),
uid=uid)
def _format_legacy_uri(uid):
if not _is_legacy_uid(uid):
raise ValueError('should use non-legacy URI format for UIDs >= 10')
legacy_files = (
'when11',
'bill11',
'jfk11',
'getty11',
'const11',
'liber11',
'mayfl11',
'linc211',
'linc111',
)
return '{root}/{path}/{name}.txt'.format(
root=r'http://www.gutenberg.lib.md.us',
path='etext90',
name=legacy_files[uid - 1])
def remote_uri_formatter(uid):
if _is_legacy_uid(uid):
return _format_legacy_uri
return _format_uri
class GutenbergEbooks(api.TextSource):
"""Implementation of api.TextSource that fetches books from Project
Gutenberg.
"""
RDF_URL = r'http://www.gutenberg.org/cache/epub/feeds/rdf-files.tar.bz2'
def cleanup_text(self, lines):
return beautify.strip_headers(lines)
def _raw_source(self, start, stop, step):
logging.info('fetching meta-data archive (this might take a while)')
filename, _ = wget.grab(GutenbergEbooks.RDF_URL)
with tarfile.open(filename) as archive:
for tarinfo in itertools.islice(archive, start, stop, step):
graph = rdflib.Graph()
graph.parse(archive.extractfile(tarinfo))
yield graph
def _format_remote_uris(self, text_info):
uri_formatter = remote_uri_formatter(text_info.uid)
yield uri_formatter(text_info.uid)
def textinfo_converter(self, rdf_graph):
ebook = next(iter(rdf_graph.query('''
SELECT
?ebook
?author
?title
WHERE {
?ebook a pgterms:ebook.
OPTIONAL { ?ebook dcterms:creator [ pgterms:name ?author ]. }
OPTIONAL { ?ebook dcterms:title ?title. }
}
LIMIT 1
''')))
return api.TextInfo(
uid=int(os.path.basename(ebook.ebook.toPython())),
author=ebook.author.toPython() if ebook.author else None,
title=ebook.title.toPython() if ebook.title else None)
|
"""Module providing implementations of the api.TextSource interface."""
from __future__ import absolute_import
from . import beautify
from . import api
from .common import wget
import itertools
import logging
import os
import rdflib
import tarfile
class GutenbergEbooks(api.TextSource):
"""Implementation of api.TextSource that fetches books from Project
Gutenberg.
"""
RDF_URL = r'http://www.gutenberg.org/cache/epub/feeds/rdf-files.tar.bz2'
def cleanup_text(self, lines):
return beautify.strip_headers(lines)
def _raw_source(self, start, stop, step):
logging.info('fetching meta-data archive (this might take a while)')
filename, _ = wget.grab(GutenbergEbooks.RDF_URL)
with tarfile.open(filename) as archive:
for tarinfo in itertools.islice(archive, start, stop, step):
graph = rdflib.Graph()
graph.parse(archive.extractfile(tarinfo))
yield graph
def _format_remote_uris(self, text_info):
if 0 < text_info.uid < 10:
basic_url = '{root}/{path}/{file}.txt'.format(
root=r'http://www.gutenberg.lib.md.us',
path='etext90',
file=["when11", "bill11", "jfk11", "getty11",
"const11", "liber11", "mayfl11",
"linc211", "linc111"][text_info.uid-1])
else:
uid = str(text_info.uid)
basic_url = '{root}/{path}/{uid}/{uid}.txt'.format(
root=r'http://www.gutenberg.lib.md.us',
path='/'.join(uid[:len(uid)-1]),
uid=text_info.uid)
yield basic_url
def textinfo_converter(self, rdf_graph):
ebook = next(iter(rdf_graph.query('''
SELECT
?ebook
?author
?title
WHERE {
?ebook a pgterms:ebook.
OPTIONAL { ?ebook dcterms:creator [ pgterms:name ?author ]. }
OPTIONAL { ?ebook dcterms:title ?title. }
}
LIMIT 1
''')))
return api.TextInfo(
uid=int(os.path.basename(ebook.ebook.toPython())),
author=ebook.author.toPython() if ebook.author else None,
title=ebook.title.toPython() if ebook.title else None)
|
Python
| 0
|
10318a11dded5e69c3d9c98325613700c9b3db63
|
Fix for dependent package detection.
|
lib/spack/spack/cmd/dependents.py
|
lib/spack/spack/cmd/dependents.py
|
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
description = "Show dependent packages."
def setup_parser(subparser):
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="specs to list dependencies of.")
def dependents(parser, args):
specs = spack.cmd.parse_specs(args.spec, concretize=True)
if len(specs) != 1:
tty.die("spack dependents takes only one spec.")
fmt = '$_$@$%@$+$=$#'
deps = [d.format(fmt) for d in specs[0].package.installed_dependents]
tty.msg("Dependents of %s" % specs[0].format(fmt), *deps)
|
Python
| 0
|
|
4f3c3755d9fcbfd9ce0551c19bb893e7ba73db91
|
Add missing setup.py
|
numpy/numarray/setup.py
|
numpy/numarray/setup.py
|
from os.path import join
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numarray',parent_package,top_path)
config.add_data_files('include/numarray/*.h')
# Configure fftpack_lite
config.add_extension('_capi',
sources=['_capi.c']
)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
Python
| 0.000003
|
|
735ce7f11b70bf8e916bad5610093b5886c57db6
|
Add quickseg.py
|
cyvlfeat/quickshift/quickseg.py
|
cyvlfeat/quickshift/quickseg.py
|
import numpy as np
from cyvlfeat.quickshift import quickshift
from cyvlfeat.quickshift import flatmap
from cyvlfeat.quickshift import imseg
def quickseg(image, ratio, kernel_size, max_dist):
"""
Produce a quickshift segmentation of a greyscale image.
Parameters
----------
image : [H, W] or [H, W, 1] `float64` `ndarray`
Input image, Greyscale. A single channel, greyscale,
`float64` numpy array (ndarray).
ratio : `double`
Trade-off between spatial consistency and color consistency.
Small ratio gives more importance to the spatial component.
Note that distance calculations happen in unnormalized image
coordinates, so RATIO should be adjusted to compensate for
larger images.
kernel_size : `double`
The standard deviation of the parzen window density estimator.
max_dist : `double`
The maximum distance between nodes in the quickshift tree. Used
to cut links in the tree to form the segmentation.
Returns
-------
i_seg :
A color image where each pixel is labeled by the mean color in its
region.
labels : [H, W] `float64` `ndarray`.
Array of the same size of image.
A labeled image where the number corresponds to the cluster identity.
maps : [H, W] `float64` `ndarray`.
Array of the same size of image.
`maps` as returned by `quickshift`: For each pixel, the pointer to the
nearest pixel which increases the estimate of the density.
gaps : [H, W] `float64` `ndarray`.
Array of the same size of image.
`gaps` as returned by `quickshift`: For each pixel, the distance to
the nearest pixel which increases the estimate of the density.
estimate : [H, W] `float64` `ndarray`.
Array of the same size of image.
`estimate` as returned by `quickshift`: The estimate of the density.
"""
# validate image
if image.dtype != np.float64:
raise ValueError('Image array must be of Double precision')
# image = np.asarray(image, dtype=np.float64)
# Add less than one pixel noise to break ties caused by
# constant regions in an arbitrary fashions
noise = np.random.random(image.shape) / 2250
image += noise
# For now we're dealing with Greyscale images only.
if image.shape[2] == 1:
imagex = ratio * image
# Perform quickshift to obtain the segmentation tree, which is already cut by
# maxdist. If a pixel has no nearest neighbor which increases the density, its
# parent in the tree is itself, and gaps is inf.
(maps, gaps, estimate) = quickshift(image, kernel_size, max_dist)
# Follow the parents of the tree until we have reached the root nodes
# mapped: a labeled segmentation where the labels are the indices of the modes
# in the original image.
# labels: mapped after having been renumbered 1: nclusters and reshaped into a
# vector
(mapped, labels) = flatmap(maps)
labels = np.resize(labels, maps.shape)
# imseg builds an average description of the region by color
i_seg = imseg(image, labels)
return i_seg, labels, maps, gaps, estimate
|
Python
| 0
|
|
766569894a5c849c449971aca5c7d999e7019aef
|
check required packages
|
check_packages.py
|
check_packages.py
|
"""
Checking and reinstalling the external packages
"""
import os
import sys
if sys.platform =='win32':
IS_WIN = True
else:
IS_WIN = False
try:
import setuptools
print "==> setuptools-%s installed."% setuptools.__version__
#if setuptools.__version__ != '0.6c11':
# print "!!! Recommend to install 0.6c11."
except:
print "!!! setuptools-0.6c11 not installed."
try:
import pyparsing
if pyparsing.__version__ == '1.5.5':
print "==> pyparsing-%s installed."% pyparsing.__version__
else:
print "!!! 1.5.5 recommended."
except:
print "==> pyparsing-1.5.5 not installed."
try:
import html5lib
if html5lib.__version__.count('0.95') > 0:
print "==> html5lib-%s installed."% html5lib.__version__
else:
print "!!! 0.95 recommended."
except:
print "!!! html5lib-0.95 not installed."
try:
import pyPdf
print "==> pyPdf installed."
except:
print "!!! pyPdf-1.13 not installed (optional)."
try:
import reportlab
print "==> reportlab installed."
except:
print "!!! reportlab-2.5 not installed."
try:
import lxml
print "==> lxml installed."
except:
print "!!! lxml-2.3 not installed."
try:
import PIL
print "==> PIL installed."
except:
print "!!! PIL-1.1.7 not installed."
try:
import pylint
print "==> pylint installed."
except:
print "!!! pylint not installed (optional)."
#os.system("START /B C:\python26\Scripts\easy_install pylint==0.25.0")
try:
import periodictable
print "==> periodictable-%s installed."% periodictable.__version__
if periodictable.__version__ != '1.3.0':
print "!!! Recommend to install 1.3.0."
except:
print "!!! periodictable-1.3.0 is not installed."
try:
import numpy
print "==> numpy-%s installed."% numpy.__version__
if numpy.__version__ != '1.6.1':
print "==> Recommend to install 1.6.1 (1.5.1 for MAC)."
except:
print "!!! numpy-1.6.1 not installed (1.5.1 for MAC)."
try:
import scipy
print "==> scipy-%s installed."% scipy.__version__
if scipy.__version__ != '0.10.1':
print "!!! Recommend to install 0.10.1 (1.10.0 for MAC)."
except:
print "!!! scipy-0.10.1 not installed (1.10.0 for MAC)."
try:
import wx
print "==> wxpython-%s installed."% wx.__version__
if wx.__version__ != '2.8.12.1':
print "!!! Recommend to install unicode-2.8.12.1."
except:
print "!!! wxpython-unicode-2.8.12.1 not installed."
try:
import matplotlib
print "==> matplotlib-%s installed."% matplotlib.__version__
if matplotlib.__version__ != '1.1.0':
print "!!! Recommend to install 1.1.0 (1.0.1 for MAC) or higher."
except:
print "!!! matplotlib-1.1.0 not installed (1.0.1 for MAC)."
try:
from ho import pisa
if pisa.__version__ == '3.0.27':
print "==> pisa-%s installed."% pisa.__version__
else:
print "!!! Incorrect version of pisa installed."
print "!!! 3.0.27 required."
except:
print "!!! pisa-3.0.27 not installed."
if IS_WIN:
try:
import pywin
print "==> pywin32 installed."
except:
print "!!! pywin32 not installed. Please install pywin32-217."
try:
import py2exe
print "==> py2exe-%s installed."% py2exe.__version__
if py2exe.__version__ != '0.6.9':
print "!!! Recommend to install 0.6.9."
except:
print "!!! py2exe-0.6.9 not installed. Installing..."
try:
import comtypes
print "==> comtypes-%s installed."% comtypes.__version__
if comtypes.__version__ != '0.6.2':
print "!!! Recommend to install 0.6.2."
except:
print "!!! comtypes-0.6.2 not installed. Installing..."
print "==> Require subversion = 1.6.0 or lower version."
print "Installed:"
os.system("CALL svn --version --quiet")
print "==> Checking gcc compiler ( >= 4.2 required):"
os.system("CALL gcc -v")
|
Python
| 0
|
|
8080128d2ca5718ac971160bf964c3ca73b235b7
|
add downloader
|
operators/downloader.py
|
operators/downloader.py
|
import os
from drivers import driver
from file_utils import file_util
class Downloader(object):
def __init__(self, server_driver):
"""Init a Uploader object
Args:
server_driver: a driver already connected to cloud service
"""
if not issubclass(driver.Driver, driver.Driver):
raise TypeError('Driver should be a subclass of drivers.driver.Driver')
self.driver = server_driver
def download(self, remote_filename, local_filename=None, local_dir=None):
if local_filename is None and local_dir is None:
raise AttributeError('Need at least one of local_filename or local_dir.')
if local_dir:
local_filename = os.path.join(local_dir, file_util.path_leaf(remote_filename))
self.driver.download(local_filename=local_filename,
remote_filename=remote_filename)
|
Python
| 0.000001
|
|
49071cbeda14133019b0969e7499ea8f26cdb1cf
|
Create OogOrderedSet.py
|
OogOrderedSet.py
|
OogOrderedSet.py
|
# -*- coding: utf8-*-
# coding: utf-8
__author__ = "Dominique Dutoit"
__date__ = "$5 march 2014$"
from collections import MutableSet
class OrderedSet(MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
"""def __getslice__(self, x, y):
'''x>=0 & y>=0...'''
if x>len(self) or y <= x : return []
count=-1 ; res = []
for i in self.__iter__():
count+=1
if x > count : continue
elif y <= count : break
else: res.append(i)
return res"""
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
if __name__ == '__main__':
from time import clock
s = OrderedSet('abracadaba')
t = OrderedSet('simsbalabim')
assert list(s) == ['a', 'b', 'r', 'c', 'd']
assert list(s.__reversed__()) == ['d', 'c', 'r', 'b', 'a']
assert len(s) == 5
print '__iter__ and __ reversed : pass'
assert s | t == OrderedSet(['a', 'b', 'r', 'c', 'd', 's', 'i', 'm', 'l'])
assert s & t == OrderedSet(['b', 'a'])
assert t & s == OrderedSet(['a', 'b']) #
assert s - t == OrderedSet(['r', 'c', 'd'])
print 'logical operators : pass'
mylist = []
for i in xrange(10000):
[mylist.append(i) for j in xrange(int(i**0.3))]
from random import shuffle
shuffle(mylist)
assert len(mylist) == 116864
begin = clock()
a=set()
for i in mylist:
a.add(i)
assert len(a) == 9999
setduration = clock()-begin
print 'duration for ', a.__class__.__name__, setduration
a = OrderedSet()
begin = clock()
for i in mylist:
a.add(i)
assert len(a) == 9999
orderedsetduration = clock()-begin
print 'duration for ', a.__class__.__name__, orderedsetduration
#count=0
#for i in a:
# print i,
# count+=1
# if count>=10:break
#print
#print a[0:10]
from collections import defaultdict
begin = clock()
dico=defaultdict(OrderedSet)
for i in mylist:
dico['truc'].add(i)
assert len(a) == 9999
defaultdicduration = clock()-begin
print 'duration for ', dico.__class__.__name__, defaultdicduration
try:
assert 2.5*setduration > orderedsetduration #python 2.7
except: pass #pypy donne *10 parce que le set de base de pypy est beaucoup plus rapide
|
Python
| 0
|
|
5be32f4022135a10585cf094b6fb8118dd87a2f6
|
Add files via upload (#396)
|
ciphers/Atbash.py
|
ciphers/Atbash.py
|
def Atbash():
inp=raw_input("Enter the sentence to be encrypted ")
output=""
for i in inp:
extract=ord(i)
if extract>=65 and extract<=90:
output+=(unichr(155-extract))
elif extract>=97 and extract<=122:
output+=(unichr(219-extract))
else:
output+=i
print output
Atbash() ;
|
Python
| 0
|
|
8d1016437e87794fb39b447b51427bae98a51bc2
|
Add one public IP provider
|
classes/jsonip.py
|
classes/jsonip.py
|
from json import load
from urllib2 import urlopen
class JsonIp:
def __init__(self):
url = 'https://jsonip.com/'
uri = urlopen(url)
response = load(uri)
self.ip = response["ip"]
# self.ip = '1.1.1.1'
|
Python
| 0
|
|
b1be1bbf785406f4d286c7eb85ea459309ea03a2
|
Fix file hierarchy.
|
batch_image_resizer2/batch_image_resizer.py
|
batch_image_resizer2/batch_image_resizer.py
|
"""Resize images in a folder using imagemagick command line tools.
http://hakanu.net
"""
import glob
import os
def main():
print 'Started'
images = glob.glob("/home/h/Desktop/all_karikatur_resized/*.jpg")
counter = 0
for image in images:
print 'Processing: ', image
index = image[image.rfind('/') + 1:image.rfind('.jpg')]
print 'index: ', index
os.system("convert " + index + ".jpg -resize 128x128 resize_128_" + index + ".jpg")
counter += 1
if counter % 100 == 0:
print 'Completed: ', counter
print '\n'
main()
|
Python
| 0
|
|
6cbc9230e241511ccc922eb179f62e08db78bf14
|
1689. Partitioning Into Minimum Number Of Deci-Binary Numbers
|
LeetCode/PartitioningIntoMinimumNumberOfDeciBinaryNumbers.py
|
LeetCode/PartitioningIntoMinimumNumberOfDeciBinaryNumbers.py
|
""" a convoluted way of describing finding the biggest decimal digit :D """
class Solution:
def minPartitions(self, n: str) -> int:
return max(int(d) for d in str(n))
|
Python
| 0.999999
|
|
3b9d85b9b41636a43821b0551e7162894131cbf5
|
add new file for clusterisation
|
clusterisation.py
|
clusterisation.py
|
import sys
import os
import random
import math
import statistics
from collections import namedtuple
from fasta import *
from fastq import *
from log_progress import *
Read = namedtuple('Read', 'index seq qual')
def replace_low_quality(data, q_threshold):
res = ''
for nuc, q in zip(data.seq, data.qual):
if ord(q) - ord('!') <= q_threshold:
res += 'N'
else:
res += nuc
return FASTQData(seq = res, qual = data.qual, name = data.name, attr = data.attr)
def median(data):
return statistics.median([ord(q) - ord('!') for q in data.qual])
def hamm(alpha, beta, max_err = 0):
# if len(alpha) != len(beta):
# print("!!!!")
err = 0
for a, b in zip(alpha, beta):
err += (a != b) and (a != 'N') and (b != 'N')
if err > max_err:
return False
return True
def make_best_pos_consensus(orig_seq, orig_num, seq_list):
res = ''
for i in range(len(orig_seq)):
d = {'A': 0, 'C': 0, 'G': 0, 'T': 0, 'N': 0}
d[orig_seq[i]] += orig_num
for seq, cnt in seq_list:
d[seq[i]] += cnt
d.pop('N')
res += max(d.items(), key = lambda x: x[1])[0]
return res
def classify(fqdata, low_threshold, high_threshold):
qmed = median(fqdata)
if qmed >= low_threshold and qmed < high_threshold:
return 1
elif qmed >= high_threshold:
return 2
return 0
def classify_fastq_file(filepath, replace_threshold, low_threshold, high_threshold):
cls_seq = [{}, {}, {}]
for fqdata in FASTQParser(filepath):
fqdata = replace_low_quality(fqdata, replace_threshold)
cls = classify(fqdata, low_threshold, high_threshold)
cls_seq[cls][fqdata.seq] = cls_seq[cls].get(fqdata.seq, 0) + 1
return cls_seq
def merge_with_clusters(seq_dict, seq_threshold, qmed_threshold):
print("sum pre filter", sum(seq_dict.values()))
seq_dict = dict(filter(lambda x: x[1] >= seq_threshold, seq_dict.items()))
print("sum post filter", sum(seq_dict.values()))
keys = list(seq_dict.keys())
key_ind = dict([(keys[i], i) for i in range(len(keys))])
cands = {}
merged_seq = {x: set([]) for x in keys}
for i in range(len(seq_dict) - 1):
for j in range(i + 1, len(seq_dict)):
if hamm(keys[i], keys[j]):
key = i
append_to = j
if seq_dict[keys[i]] > seq_dict[keys[j]]:
key = j
append_to = i
if key not in cands: cands[key] = []
cands[key].append(append_to)
while cands:
for seq, cnt in sorted(seq_dict.items(), key = lambda x: x[1]):
if key_ind[seq] in cands:
for append_to in cands[key_ind[seq]]:
# seq_dict[keys[append_to]] += math.ceil(seq_dict[seq] / len(cands[key_ind[seq]]))
seq_dict[keys[append_to]] += seq_dict[seq] / len(cands[key_ind[seq]])
if merged_seq[seq]:
merged_seq[keys[append_to]] = merged_seq[keys[append_to]].union(merged_seq[seq])
# merged_seq[keys[append_to]].add((key_ind[seq], math.ceil(seq_dict[seq] / len(cands[key_ind[seq]]))))
merged_seq[keys[append_to]].add((key_ind[seq], seq_dict[seq] / len(cands[key_ind[seq]])))
seq_dict[seq] = 0
to_pop = set([x for x in cands])
for source in cands:
for to_append in cands[source]:
if seq_dict[keys[source]] and source in to_pop:
to_pop.remove(source)
for p in to_pop:
cands.pop(p)
merged_seq.pop(keys[p])
seq_dict.pop(keys[p])
print("Making consensuses...")
print("sum pre merging", sum(seq_dict.values()))
n_merged = 0
new_seq_dict = {}
for seq, seq_ls in merged_seq.items():
new_seq = make_best_pos_consensus(seq, seq_dict[seq], [(keys[x[0]], x[1]) for x in seq_ls])
if new_seq not in new_seq_dict:
new_seq_dict[new_seq] = 0
else:
n_merged += 1
new_seq_dict[new_seq] += seq_dict[seq]
print("# merged:", n_merged)
print("sum post merging", sum(new_seq_dict.values()))
return new_seq_dict
def aggregate_sequences(f1, max_sequences, qpos_threshold, qmed_threshold, seq_threshold, out_seq = "tmp.topseq1.txt", out_blast = "tmp.blast1.txt"):
prefix = f1[:f1.find(".fastq")]
majors = {}
minors = {}
r = FASTQParser(f1)
print("Searching for unique sequences..."); d1 = {}
# Divide reads by major and minor
for data1 in r:
data1 = replace_low_quality(data1, qpos_threshold)
# Divide sequences to two groups: below (minor) median and above (major) median group
if median(data1) >= qmed_threshold:
majors[data1.seq] = majors.get(data1.seq, 0) + 1
else:
minors[data1.seq] = minors.get(data1.seq, 0) + 1
d1[data1.seq] = d1.get(data1.seq, 0) + 1
print("Clustering error sequences...")
d1 = merge_with_clusters(d1, seq_threshold, qmed_threshold)
print("Writing results...")
with open(out_seq, 'w') as file:
i = 0
for key, val in reversed(sorted(d1.items(), key = lambda x: x[1])):
# print(val, " (", round(100 * val / sum(d1.values()), 4), "%)", sep = '')
print(val, key, sep = '\t', file = file)
i += 1
if i == max_sequences: break
# if val < 2: break
ls = []
i = 0
for key, val in reversed(sorted(d1.items(), key = lambda x: x[1])):
ls.append(faseq(name = "sequence" + str(i) + "_" + str(val) + "_(" + str(round(100 * val / sum(d1.values()), 4)) + ")", seq = key, comm = ''))
i += 1
if i == max_sequences: break
write_fasta(f1 + ".seq.fasta.txt", ls)
os.system("blastn -query " + f1 + ".seq.fasta.txt" + " -db hlabase/hlabase.fasta -outfmt 6 -num_alignments 4 > " + out_blast)
if __name__ == '__main__':
aggregate_sequences(sys.argv[1], 50, 7, 5, "tmp.topseq1.txt", "tmp.blast1.txt")
aggregate_sequences(sys.argv[2], 50, 7, 5, "tmp.topseq2.txt", "tmp.blast2.txt")
|
Python
| 0
|
|
19ca3a0a16b321f0b5caadb09a3db78d857c4d0c
|
Create jrcsv2gncsv.py
|
jrcsv2gncsv.py
|
jrcsv2gncsv.py
|
'''
Created on Feb 3, 2014
This code converts from the klm like hierarchical csv format used by junar open data platform
to a flat csv format.
The script reads in a junar csv and emits a standard csv, paths given by fnamein and fnameout
Code tested with Sacramento Cites locations-of-city-trees.csv and PARKI-SPACE.csv
@author: jay venti
Created for codw4sac a Code 4 America brigade
'''
#fnamein = "D:\Storage\Project and Reasch\Work Consulting\Code Sacramento\sac city apis\PARKI-SPACE.csv"
#fnameout = "D:\Storage\Project and Reasch\Work Consulting\Code Sacramento\sac city apis\gen-PARKI-SPACE.csv"
fnamein = "D:\Storage\Project and Reasch\Work Consulting\Code Sacramento\sac city apis\80137-locations-of-city-trees.csv"
fnameout = "D:\Storage\Project and Reasch\Work Consulting\Code Sacramento\sac city apis\gen-locations-of-city-trees.csv"
from time import gmtime, strftime
#from pprint import pprint
def htmlstr2rowheaderstr(htmlstring):
from bs4 import BeautifulSoup # @UnresolvedImport
soup = BeautifulSoup(htmlstring)
delm ='"'
key = ''
i =0
for tables in soup.find_all('table'):
i +=1
if i > 1000 : break
rowstr = '';
# if first row has data, then process this table
for trow in tables.find_all('tr'): # get each table row
tds = trow.find_all('td')
if len(tds) == 2 : # then first row has a key vale pair
key = tds[0].text
# print key + " = " + val
# fillin column entries
rowstr = rowstr + delm + key.encode('utf-8') + delm +','
# if first or any other html row has other than 2 elements
else : break
# last key val pair
# last html table
rowstr = rowstr.rstrip(',')
return rowstr
def htmlstr2rowdatastr(htmlstring):
from bs4 import BeautifulSoup # @UnresolvedImport
soup = BeautifulSoup(htmlstring)
delm ='"'
val = ''
i =0
for tables in soup.find_all('table'):
i +=1
if i > 1000 : break
rowstr = '';
# if first row has data, then process this table
for trow in tables.find_all('tr'): # get each table row
tds = trow.find_all('td')
if len(tds) == 2 : # then first row has a key vale pair
val = tds[1].text
if val == '<Null>': val = 'NULL'
# print key + " = " + val
# fillin column entries
rowstr = rowstr + delm + val.encode('utf-8')+delm + ','
# if first or any other html row has other than 2 elements
else : break
# last key val pair
# last html table
rowstr = rowstr.rstrip(',')
return rowstr
def sumtagfrq(row,tgfrq):
for key in row:
if key not in tgfrq:
tgfrq[key.encode('utf-8')] = 0
tgfrq[key.encode('utf-8')] += 1
return tgfrq
import csv
delm ='"'
csvfile = open(fnamein, "rb")
reader = csv.reader(csvfile)
print strftime("%b %d %Y %H:%M:%S", gmtime()), 'reader'
print
csvout = open(fnameout, "w")
rownum = 0
csvheadstr = ''
csvdatastr = ''
firsthit = True
for row in reader:
if rownum == 0:
header = row
# Save header row but omits the hierarchical element 'Description' from csvheadstr output string
for e in row:
if e != 'Description':
csvheadstr = csvheadstr + delm + e + delm + ','
else:
colnum = 0
csvdatastr = ''
for col in row:
#print '%-8s: %s' % (header[colnum], col)
if header[colnum]=='Description': # processes a hierarchical element stored within the html code
if firsthit == True: # the first time record the rest of the csvheadstr header string
csvheadstr = csvheadstr + htmlstr2rowheaderstr(col)
#print csvheadstr
csvout.write(csvheadstr+'\n')
firsthit = False
csvdatastr = csvdatastr + htmlstr2rowdatastr(col)
#print csvdatastr
csvout.write(csvdatastr+'\n')
else: # processes the non-hierarchical non-html elements
csvdatastr = csvdatastr + delm+col.encode('utf-8')+delm + ','
colnum += 1
if rownum % 1000 == 0 : #to test on a limited run set 0 t0 999 and uncommon to the break below
print 'rownum =',rownum
print strftime("%b %d %Y %H:%M:%S", gmtime()), 'write'
#break
rownum += 1
csvfile.close()
csvout.close()
|
Python
| 0.000001
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.