commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
9cc13ca511987584ea4f52cf0c2e57e6b98a9e8b | Add lc0350_intersection_of_two_arrays_ii.py | lc0350_intersection_of_two_arrays_ii.py | lc0350_intersection_of_two_arrays_ii.py | """Leetcode 350. Intersection of Two Arrays II
Easy
URL: https://leetcode.com/problems/intersection-of-two-arrays-ii/
Given two arrays, write a function to compute their intersection.
Example 1:
Input: nums1 = [1,2,2,1], nums2 = [2,2]
Output: [2,2]
Example 2:
Input: nums1 = [4,9,5], nums2 = [9,4,9,8,4]
Output: [4,9]
Note:
- Each element in the result should appear as many times as it shows in both arrays.
- The result can be in any order.
Follow up:
- What if the given array is already sorted? How would you optimize your algorithm?
- What if nums1's size is small compared to nums2's size? Which algorithm is better?
- What if elements of nums2 are stored on disk, and the memory is limited such that
you cannot load all elements into the memory at once?
"""
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| Python | 0.000351 | |
eb3882051241843716ef9b7ceef8aeb6ee2a35c6 | add mysqlproxy.py | misc/mysqlproxy.py | misc/mysqlproxy.py | #!/usr/bin/env python3
##############################################################################
#The MIT License (MIT)
#
#Copyright (c) 2016 Hajime Nakagami
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
##############################################################################
import sys
import socket
import binascii
def recv_mysql_packet(sock):
head = sock.recv(4)
n = int.from_bytes(head[:3], byteorder='little')
recieved = b''
while n:
bs = sock.recv(n)
recieved += bs
n -= len(bs)
return head + recieved
def asc_dump(s):
r = ''
for c in s:
r += chr(c) if (c >= 32 and c < 128) else '.'
if r:
print('[' + r + ']')
def proxy_wire(server_name, server_port, listen_host, listen_port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((listen_host, listen_port))
sock.listen(1)
client_sock, addr = sock.accept()
server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_sock.connect((server_name, server_port))
while True:
client_data = recv_mysql_packet(client_sock)
server_sock.send(client_data)
print('>>', binascii.b2a_hex(client_data).decode('ascii'))
asc_dump(client_data)
server_data = recv_mysql_packet(server_sock)
client_sock.send(server_data)
print('<<', binascii.b2a_hex(server_data).decode('ascii'))
asc_dump(server_data)
if __name__ == '__main__':
if len(sys.argv) < 3:
print('Usage : ' + sys.argv[0] + ' server[:port] [listen_host:]listen_port')
sys.exit()
server = sys.argv[1].split(':')
server_name = server[0]
if len(server) == 1:
server_port = 3306
else:
server_port = int(server[1])
listen = sys.argv[2].split(':')
if len(listen) == 1:
listen_host = 'localhost'
listen_port = int(listen[0])
else:
listen_host = listen[0]
listen_port = int(listen[1])
proxy_wire(server_name, server_port, listen_host, listen_port)
| Python | 0.000002 | |
f1c389a0028c6f92300573bef587c084204e858f | Create circlecli.py | mocks/circlecli.py | mocks/circlecli.py | # -*- coding: utf-8 -*-
"""
Mocks for the CircleCLI API library tests.
"""
from httmock import response, urlmatch
NETLOC = r'(.*\.)?circleci\.com$'
HEADERS = {'content-type': 'application/json'}
GET = 'get'
class Resource:
""" A CircleCli resource.
:param path: The file path to the resource.
"""
def __init__(self, path):
self.path = path
def get(self):
""" Perform a GET request on the resource.
:rtype: str
"""
with open(self.path, 'r') as f:
content = f.read()
return content
@urlmatch(netloc=NETLOC, method=GET)
def resource_get(url, request):
file_path = url.netloc + url.path
try:
content = Resource(file_path).get()
except EnvironmentError:
# catch any environment errors (i.e. file does not exist) and return a
# 404.
return response(404, {}, HEADERS, None, 5, request)
return response(200, content, HEADERS, None, 5, request)
| Python | 0 | |
efc76b2a0abe82cad0e1074bc51b6e9a6c5b5b35 | add test when no description in object | tests/check-no-description.py | tests/check-no-description.py | #!/usr/bin/python3
# coding: utf-8
import sys
import json
import requests
import pprint
import unittest
import string
import random
import os
import json
import time
import datetime
import base64
import uuid
import argparse
import string
pp = pprint.PrettyPrinter(depth=6)
parser = argparse.ArgumentParser()
parser.add_argument('--quite',
help='Just print an OK at the end and fade out the printed data',
action='store_true')
args = parser.parse_args()
def pprnt(data):
if args.quite:
pass
else:
pp.pprint(data)
def random_image():
with open("data/plot.png", "rb") as f:
content = f.read()
return base64.b64encode(content)
def random_id():
return str(uuid.uuid4())[0:5]
def random_title(words):
words = ['Foo', 'Bar']
s = ' '.join(random.choice(words) for _ in range(1))
return s
def random_result():
d = ['passed', 'failed', 'nonapplicable' ]
return d[random.randint(0, len(d) - 1)]
def random_submitter():
d = ['john_doe']
return d[random.randint(0, len(d) - 1)]
def query_full(id, sub_id):
url = 'http://localhost:8080/api/v1/object/{}/{}'.format(id, sub_id)
data = ''' '''
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
r = requests.get(url, data=data, headers=headers)
print("\nStatus Code:")
print(r.status_code)
print("\nRet Data:")
data = r.json()
pprnt(data)
def add_n(n):
url = 'http://localhost:8080/api/v1/object'
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
for i in range(n):
data = dict()
data["submitter"] = random_submitter()
data["object-item"] = dict()
data["object-item"]['categories'] = [ "team:orange", "topic:ip", "subtopic:route-cache" ]
data["object-item"]['version'] = random.randint(0,1)
data['object-item']['title'] = "{}".format(random_title(3))
data['object-item']['data'] = list()
desc_data = dict()
# base64 requires a byte array for encoding -> .encode('utf-8')
# json requires a string -> convert to UTF-8
img_data = dict()
img_data['name'] = 'image.png'
img_data['mime-type'] = 'image/png'
img_data['data'] = random_image().decode("utf-8")
data['object-item']['data'].append(img_data)
img_data = dict()
img_data['name'] = 'trace.pcap'
img_data['mime-type'] = 'application/vnd.tcpdump.pcap'
img_data['data'] = "R0lGODlhDwAPAKECAAAAzxzM/////wAAACwAAAAADwAPAAACIISPeQHsrZ5ModrLlN48CXF8m2iQ3YmmKqVlRtW4MLwWACH+H09wdGltaXplZCBieSBVbGVhZCBTbWFydFNhdmVyIQAAOw=="
data['object-item']['data'].append(img_data)
data["attachment"] = dict()
data["attachment"]['references'] = [ "doors:234236", "your-tool:4391843" ]
data["attachment"]['tags'] = [ "ip", "route", "cache", "performance" ]
data["attachment"]['categories'] = [ "team:orange", "topic:ip", "subtopic:route-cache" ]
data["attachment"]['responsible'] = data["submitter"]
achievement = dict()
achievement["test-date"] = datetime.datetime.now().isoformat('T')
achievement["result"] = random_result()
# 1/4 of all achievements are anchored
if random.randint(0, 3) == 0:
achievement["anchor"] = random_id()
# add data entry to achievement, can be everything
# starting from images, over log files to pcap files, ...
achievement['data'] = list()
log_data = dict()
log_data['name'] = 'result-trace.pcap'
log_data['mime-type'] = 'application/vnd.tcpdump.pcap'
log_data['data'] = "R0lGODlhDwAPAKECAAABzMzM/////wAAACwAAAAADwAPAAACIISPeQHsrZ5ModrLlN48CXF8m2iQ3YmmKqVlRtW4MLwWACH+H09wdGltaXplZCBieSBVbGVhZCBTbWFydFNhdmVyIQAAOw=="
achievement['data'].append(log_data)
if random.randint(0, 3) == 0:
variety = dict()
variety['os-version'] = 'rhel23'
variety['platform'] = 'xeon-foo'
achievement["variety"] = variety
data["achievements"] = list()
data["achievements"].append(achievement)
#os.system('cls' if os.name == 'nt' else 'clear')
print("New Data:\n-----------\n")
print(json.dumps(data, sort_keys=True, separators=(',', ': '), indent=4))
print("\n-----------\n")
dj = json.dumps(data, sort_keys=True, separators=(',', ': '))
r = requests.post(url, data=dj, headers=headers)
print("Return Data:\n-----------\n")
ret_data = r.json()
print(json.dumps(ret_data, sort_keys=True, separators=(',', ': '), indent=4))
assert len(ret_data['data']['id']) > 0
processing_time = ret_data['processing-time']
sys.stderr.write("\nHTTPStatusCode: {} ServerProcTime {}s\n".format(r.status_code, processing_time))
query_full(ret_data['data']['id'], ret_data['data']['sub_id'])
time.sleep(1)
print("\r\n\n")
sys.exit(0)
print("\r\n\n")
url = 'http://localhost:8080/api/v1/objects'
data = '''
{
"limit": 0,
"ordering": "by-submitting-date-reverse",
"maturity-level": "all"
}
'''
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
r = requests.get(url, data=data, headers=headers)
print("\nStatus Code:")
print(r.status_code)
print("\nRet Data:")
data = r.json()
pprnt(data)
return r.status_code
if __name__ == '__main__':
status = add_n(10000)
if status==200:
print("OK")
else:
print("FAIL")
| Python | 0 | |
30359b6e9ec105b2938cedd59127e5fa40964396 | Create setrun.py | rect-shelf/setrun.py | rect-shelf/setrun.py | Python | 0.000001 | ||
39d2a5eec167e659cd30f5522a9e4e9ca11a620a | Create layoutUVPlus.py | af_scripts/uv/layoutUVPlus.py | af_scripts/uv/layoutUVPlus.py | import pymel.core as pm
import math
sels = pm.ls(sl=1)
gap = 0.003
for i, x in enumerate(sels):
x=x.getShape()
pm.select('{0}.map[:]'.format(x), r=1)
buv = pm.polyEvaluate(x,b2=1)
w = abs(buv[0][1] - buv[0][0])
if i==0:
pm.polyEditUV(u=-buv[0][0]+(gap*(i+1)),v=-buv[1][0]+gap)
else:
pm.polyEditUV(u=-buv[0][0]+(w*i+gap*(i+1)),v=-buv[1][0]+gap)
pm.select(sels,r=1)
| Python | 0 | |
eee6b08e07e60a8ec1f3c2fa2e156344e01737d2 | clean out | avatar/admin.py | avatar/admin.py | from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from django.utils import six
from django.template.loader import render_to_string
from avatar.models import Avatar
from avatar.signals import avatar_updated
from avatar.util import get_user_model
class AvatarAdmin(admin.ModelAdmin):
list_display = ('get_avatar', 'user', 'primary', "date_uploaded")
list_filter = ('primary',)
search_fields = ('user__%s' % getattr(get_user_model(), 'USERNAME_FIELD', 'username'),)
list_per_page = 50
def get_avatar(self, avatar_in):
context = dict({
'user': avatar_in.user,
'url': avatar_in.avatar.url,
'alt': six.text_type(avatar_in.user),
'size': 80,
})
return render_to_string('avatar/avatar_tag.html', context)
get_avatar.short_description = _('Avatar')
get_avatar.allow_tags = True
def save_model(self, request, obj, form, change):
super(AvatarAdmin, self).save_model(request, obj, form, change)
avatar_updated.send(sender=Avatar, user=request.user, avatar=obj)
admin.site.register(Avatar, AvatarAdmin)
| from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from django.utils import six
from django.template.loader import render_to_string
from avatar.models import Avatar
from avatar.signals import avatar_updated
from avatar.util import get_user_model
class AvatarAdmin(admin.ModelAdmin):
list_display = ('get_avatar', 'user', 'primary', "date_uploaded")
list_filter = ('primary',)
search_fields = ('user__%s' % getattr(get_user_model(), 'USERNAME_FIELD', 'username'),)
list_per_page = 50
def get_avatar(self, avatar_in):
context = dict({
'user': avatar_in.user,
'url': avatar_in.avatar.url,
'alt': six.text_type(avatar_in.user),
'size': 80,
})
return render_to_string('avatar/avatar_tag.html',context)
get_avatar.short_description = _('Avatar')
get_avatar.allow_tags = True
def save_model(self, request, obj, form, change):
super(AvatarAdmin, self).save_model(request, obj, form, change)
avatar_updated.send(sender=Avatar, user=request.user, avatar=obj)
admin.site.register(Avatar, AvatarAdmin)
| Python | 0.000002 |
2d9300aeefc840e007d7c615ce48ad36343038f2 | Add "--optimize-autoloader" to `composer.phar install` command | php-silex/setup.py | php-silex/setup.py | import subprocess
import sys
import setup_util
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("php-silex/web/index.php", "192.168.100.102", "" + args.database_host + "")
setup_util.replace_text("php-silex/deploy/php-silex", "\".*\/FrameworkBenchmarks", "\"" + home + "/FrameworkBenchmarks")
setup_util.replace_text("php-silex/deploy/php-silex", "Directory .*\/FrameworkBenchmarks", "Directory " + home + "/FrameworkBenchmarks")
setup_util.replace_text("php-silex/deploy/nginx.conf", "root .*\/FrameworkBenchmarks", "root " + home + "/FrameworkBenchmarks")
try:
#subprocess.check_call("sudo cp cake/deploy/cake /etc/apache2/sites-available/", shell=True)
#subprocess.check_call("sudo a2ensite cake", shell=True)
#subprocess.check_call("sudo chown -R www-data:www-data cake", shell=True)
#subprocess.check_call("sudo /etc/init.d/apache2 start", shell=True)
subprocess.check_call("composer.phar install --optimize-autoloader", shell=True, cwd="php-silex")
subprocess.check_call("sudo php-fpm --fpm-config config/php-fpm.conf -g " + home + "/FrameworkBenchmarks/php-silex/deploy/php-fpm.pid", shell=True)
subprocess.check_call("sudo /usr/local/nginx/sbin/nginx -c " + home + "/FrameworkBenchmarks/php-silex/deploy/nginx.conf", shell=True)
return 0
except subprocess.CalledProcessError:
return 1
def stop():
try:
subprocess.call("sudo /usr/local/nginx/sbin/nginx -s stop", shell=True)
subprocess.call("sudo kill -QUIT $( cat php-silex/deploy/php-fpm.pid )", shell=True)
#subprocess.check_call("sudo a2dissite cake", shell=True)
#subprocess.check_call("sudo /etc/init.d/apache2 stop", shell=True)
#subprocess.check_call("sudo chown -R $USER:$USER cake", shell=True)
return 0
except subprocess.CalledProcessError:
return 1
|
import subprocess
import sys
import setup_util
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("php-silex/web/index.php", "192.168.100.102", "" + args.database_host + "")
setup_util.replace_text("php-silex/deploy/php-silex", "\".*\/FrameworkBenchmarks", "\"" + home + "/FrameworkBenchmarks")
setup_util.replace_text("php-silex/deploy/php-silex", "Directory .*\/FrameworkBenchmarks", "Directory " + home + "/FrameworkBenchmarks")
setup_util.replace_text("php-silex/deploy/nginx.conf", "root .*\/FrameworkBenchmarks", "root " + home + "/FrameworkBenchmarks")
try:
#subprocess.check_call("sudo cp cake/deploy/cake /etc/apache2/sites-available/", shell=True)
#subprocess.check_call("sudo a2ensite cake", shell=True)
#subprocess.check_call("sudo chown -R www-data:www-data cake", shell=True)
#subprocess.check_call("sudo /etc/init.d/apache2 start", shell=True)
subprocess.check_call("composer.phar install", shell=True, cwd="php-silex")
subprocess.check_call("sudo php-fpm --fpm-config config/php-fpm.conf -g " + home + "/FrameworkBenchmarks/php-silex/deploy/php-fpm.pid", shell=True)
subprocess.check_call("sudo /usr/local/nginx/sbin/nginx -c " + home + "/FrameworkBenchmarks/php-silex/deploy/nginx.conf", shell=True)
return 0
except subprocess.CalledProcessError:
return 1
def stop():
try:
subprocess.call("sudo /usr/local/nginx/sbin/nginx -s stop", shell=True)
subprocess.call("sudo kill -QUIT $( cat php-silex/deploy/php-fpm.pid )", shell=True)
#subprocess.check_call("sudo a2dissite cake", shell=True)
#subprocess.check_call("sudo /etc/init.d/apache2 stop", shell=True)
#subprocess.check_call("sudo chown -R $USER:$USER cake", shell=True)
return 0
except subprocess.CalledProcessError:
return 1
| Python | 0.000004 |
26011563bf0880206269582a87f9fff61f262c83 | add a new migration | osf/migrations/0056_citationstyle_has_bibliography.py | osf/migrations/0056_citationstyle_has_bibliography.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-29 14:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('osf', '0055_auto_20170823_1648'),
]
operations = [
migrations.AddField(
model_name='citationstyle',
name='has_bibliography',
field=models.BooleanField(default=False),
),
]
| Python | 0.000001 | |
d637cbe9c904fb0f0b67fbc10f66db299d153f4e | Add basic smoke tests for doc generation | tests/functional/test_docs.py | tests/functional/test_docs.py | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import botocore.session
from boto3.docs import docs_for
class TestDocs(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
def test_resource_docs_generated(self):
docs_str = docs_for('s3', self.session)
self.assertIn('Service Resource', docs_str)
self.assertIn('A resource representing Amazon Simple Storage Service',
docs_str)
def test_client_docs_generated(self):
docs_str = docs_for('s3', self.session)
self.assertIn('s3.Client', docs_str)
self.assertIn(
'A low-level client representing Amazon Simple Storage Service',
docs_str)
def test_waiter_docs_generated(self):
docs_str = docs_for('s3', self.session)
self.assertIn('Waiter', docs_str)
self.assertIn('bucket_exists', docs_str)
| Python | 0 | |
8fb94de6b72847bdb618ffa60fa037d16bab443e | Add closing tests module | tests/plantcv/test_closing.py | tests/plantcv/test_closing.py | import pytest
import cv2
import numpy as np
from plantcv.plantcv import closing
def test_closing(test_data):
# Read in test data
bin_img = cv2.imread(test_data.small_bin_img, -1)
filtered_img = closing(gray_img=bin_img)
# Assert that the output image has the dimensions of the input image and is binary
assert bin_img.shape == filtered_img.shape and np.array_equal(np.unique(filtered_img), np.array([0, 255]))
def test_closing_grayscale(test_data):
# Read in test data
gray_img = cv2.imread(test_data.small_gray_img, -1)
filtered_img = closing(gray_img=gray_img, kernel=np.ones((4, 4), np.uint8))
assert np.sum(filtered_img) == 33160632
def test_closing_bad_input(test_data):
# Read in test data
rgb_img = cv2.imread(test_data.small_rgb_img)
with pytest.raises(RuntimeError):
_ = closing(gray_img=rgb_img)
| Python | 0 | |
548f4f6512ced9a9c41a074a3c8382f87ccafa66 | add image resizing to python script and give it a better name | xkcd1110_stitch.py | xkcd1110_stitch.py | #! /usr/bin/env python
"""Combines resized tiles grabbed from xkcd 1110 into one large png"""
import Image
import os
import sys
def coord(image_x, image_y, tilesize):
'''
converts x, y coordinates to tile naming format
'''
image_dir = "images/"
if image_x > 0:
#east
lng = "%se" % (image_x)
else:
#west
lng = "%sw" % (-image_x+1)
if image_y > 0:
#north
lat = "%sn" % (image_y)
else:
#south
lat = "%ss" % (-image_y+1)
return_file = image_dir + lat + lng + ".png"
if os.path.isfile(return_file):
return return_file
else:
# insert black or white tiles in the empty spots
if image_y > 0:
return image_dir + "white.png"
else:
return image_dir + "black.png"
def merge_images(xmin, xmax, ymin, ymax, tilesize) :
'''
combines tiles into one large image
'''
out = Image.new('RGB', ((xmax-xmin+1) * tilesize, (ymax-ymin+1) * tilesize))
imx = 0
for image_x in range(xmin, xmax+1) :
imy = 0
for image_y in range(ymin, ymax+1) :
#print image_x, image_y, "->",
#print coord(image_x, -image_y, tilesize), "->", imx, imy
tile = Image.open(coord(image_x, -image_y, tilesize))
resized_tile = tile.resize((tilesize, tilesize))
out.paste(tile, (imx, imy))
imy += tilesize
imx += tilesize
out.save("xkcd_1110_combined_%s.png" % (tilesize))
try:
input_arg = int(sys.argv[1])
if 0 < input_arg <= 2048:
merge_images(-32, 48, -13, 18, input_arg)
except ValueError:
sys.exit(-1)
| Python | 0.000001 | |
c32d62eb82bbe6d728d66d7544c45dfb296afcd4 | Implement regression test runner in Python | tests/run_regression_tests.py | tests/run_regression_tests.py | #!/usr/bin/env python3
# this file is an experiment to run regression tests in parallel
# using Python for cross platform to run with the same script on all platforms
# (currently .sh for Linux, .bat for Windows, ??? for Mac)
import os
import queue
import subprocess
import threading
class TestRunner(object):
def __init__(self):
self.basedir = "regression_tests"
self.q_test = queue.Queue()
self.q_test_result = queue.Queue()
self.test_timeout = 5*60
return
def list_tests(self):
test_list = []
print(os.listdir(self.basedir))
for regression_test in os.listdir(self.basedir):
if regression_test.startswith("_"):
print("failed _")
continue
if regression_test.startswith("."):
print("failed .")
continue
if not os.path.isdir(os.path.join(self.basedir, regression_test)):
print("failed isdir")
continue
if not os.path.exists(os.path.join(self.basedir, regression_test, "test.config")):
print("failed exists test.config")
continue
if os.path.exists(os.path.join(self.basedir, regression_test, "wip")):
print("failed exists wip")
continue
test_list.append(regression_test)
print("here is tests:", test_list)
return test_list
def run_regression_test(self, name):
result = subprocess.call(["./wallet_tests", "-t", "regression_tests_without_network/"+name],
stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
timeout=self.test_timeout,
)
return result
def run_worker_thread(self):
while True:
try:
test_name = self.q_test.get_nowait()
except queue.Empty:
break
try:
result = self.run_regression_test(test_name)
except subprocess.TimeoutExpired:
result = "TIMEOUT"
self.q_test_result.put((test_name, result))
return
def run_parallel_regression_tests(self, test_list, parallel_jobs=8):
worker_threads = []
for test_name in test_list:
print("adding test:", test_name)
self.q_test.put(test_name)
for n in range(parallel_jobs):
t = threading.Thread(target=self.run_worker_thread, name="test-worker-"+str(n))
worker_threads.append(t)
t.start()
# block and show results
while True:
# are there any results?
while True:
try:
test_name, test_result = self.q_test_result.get(block=False)
except queue.Empty:
break
if test_result == 0:
condition = "passed"
elif test_result == "TIMEOUT":
condition = "timed out"
else:
condition = "failed"
print("test "+test_name+" "+condition+" (rc="+repr(test_result)+")")
for t in worker_threads:
if t.is_alive():
t.join(0.050)
break
else:
# no threads were alive
break
return
if __name__ == "__main__":
runner = TestRunner()
runner.run_parallel_regression_tests(runner.list_tests())
| Python | 0.000001 | |
c226835aa56a2d5ba8583e63c4b75765cd24711d | add new package (#27971) | var/spack/repos/builtin/packages/py-zipfile-deflate64/package.py | var/spack/repos/builtin/packages/py-zipfile-deflate64/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyZipfileDeflate64(PythonPackage):
"""Extract Deflate64 ZIP archives with Python's zipfile API."""
homepage = "https://github.com/brianhelba/zipfile-deflate64"
pypi = "zipfile-deflate64/zipfile-deflate64-0.2.0.tar.gz"
version('0.2.0', sha256='875a3299de102edf1c17f8cafcc528b1ca80b62dc4814b9cb56867ec59fbfd18')
depends_on('python@3.6:', type=('build', 'run'))
depends_on('py-setuptools@42:', type='build')
depends_on('py-setuptools-scm@3.4:+toml', type='build')
| Python | 0 | |
8c6646d75ec6f9345e1582c02611984a1d953582 | add reproducing case | tests/trac/test-issue-0092.py | tests/trac/test-issue-0092.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import pyxb.binding.generate
import pyxb.utils.domutils
import xml.dom.minidom as dom
if __name__ == '__main__':
logging.basicConfig()
_log = logging.getLogger(__name__)
xsd = '''<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:element name="HOST">
<xs:complexType>
<xs:sequence>
<xs:element name="ID" type="xs:integer"/>
<xs:element name="TEMPLATE" type="xs:anyType"/>
</xs:sequence>
</xs:complexType>
</xs:element>
</xs:schema>
'''
code = pyxb.binding.generate.GeneratePython(schema_text=xsd)
rv = compile(code, 'test', 'exec')
eval(rv)
import unittest
class TestIssue0092 (unittest.TestCase):
def testCreateEmptyTemplate (self):
xmlt = '<HOST><ID>1</ID><TEMPLATE/></HOST>';
xmld = xmlt.encode('utf-8');
doc = CreateFromDocument(xmld);
self.assertEqual(doc.ID,1)
def testCreateToDom (self):
xmlt = '<HOST><ID>1</ID><TEMPLATE><NODE>1</NODE></TEMPLATE></HOST>';
xmld = xmlt.encode('utf-8');
doc = CreateFromDocument(xmld);
templateFragment=doc.TEMPLATE.toDOM()
self.assertEqual(templateFragment.toxml(), '''<?xml version="1.0" ?><TEMPLATE><NODE>1</NODE></TEMPLATE>''')
def testCreateWithCDATAToDom (self):
xmlt = '<HOST><ID>1</ID><TEMPLATE><NODE><![CDATA[text]]></NODE></TEMPLATE></HOST>';
xmld = xmlt.encode('utf-8');
doc = CreateFromDocument(xmld);
templateFragment=doc.TEMPLATE.toDOM()
self.assertEqual(templateFragment.toxml(), '''<?xml version="1.0" ?><TEMPLATE><NODE>text</NODE></TEMPLATE>''')
def testCreateFromDOMWithCDATAToDom (self):
xmlt = '<HOST><ID>1</ID><TEMPLATE><NODE><![CDATA[text]]></NODE></TEMPLATE></HOST>';
xmld = xmlt.encode('utf-8');
domDoc=dom.parseString(xmld);
doc = CreateFromDOM(domDoc);
templateFragment=doc.TEMPLATE.toDOM()
self.assertEqual(templateFragment.toxml(), '''<?xml version="1.0" ?><TEMPLATE><NODE>text</NODE></TEMPLATE>''')
if __name__ == '__main__':
unittest.main()
| Python | 0.000141 | |
86d8f0fd48ccb577a8300362ea9d181e63d2fa5d | Add unit tests for bandit.core.issue | tests/unit/core/test_issue.py | tests/unit/core/test_issue.py | # -*- coding:utf-8 -*-
#
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
import bandit
from bandit.core import issue
class IssueTests(testtools.TestCase):
def test_issue_create(self):
new_issue = _get_issue_instance()
self.assertIsInstance(new_issue, issue.Issue)
def test_issue_str(self):
test_issue = _get_issue_instance()
self.assertEqual(
("Issue: 'Test issue' from bandit_plugin: Severity: MEDIUM "
"Confidence: MEDIUM at code.py:1"),
str(test_issue)
)
def test_issue_as_dict(self):
test_issue = _get_issue_instance()
test_issue_dict = test_issue.as_dict(with_code=False)
self.assertIsInstance(test_issue_dict, dict)
for attr in [
'filename', 'test_name', 'issue_severity', 'issue_confidence',
'issue_text', 'line_number', 'line_range'
]:
self.assertIn(attr, test_issue_dict)
def test_issue_filter(self):
test_issue = _get_issue_instance()
result = test_issue.filter(bandit.HIGH, bandit.HIGH)
self.assertFalse(result)
result = test_issue.filter(bandit.MEDIUM, bandit.MEDIUM)
self.assertTrue(result)
result = test_issue.filter(bandit.LOW, bandit.LOW)
self.assertTrue(result)
result = test_issue.filter(bandit.LOW, bandit.HIGH)
self.assertFalse(result)
result = test_issue.filter(bandit.HIGH, bandit.LOW)
self.assertFalse(result)
def _get_issue_instance():
new_issue = issue.Issue(bandit.MEDIUM, bandit.MEDIUM, 'Test issue')
new_issue.fname = 'code.py'
new_issue.test = 'bandit_plugin'
new_issue.lineno = 1
return new_issue
| Python | 0.000001 | |
3d935fcc7d2d2afb004348a8839f2ec7813fe78c | Add unbound performance plugin | satori-rules/plugin/unbound/30_unbound.py | satori-rules/plugin/unbound/30_unbound.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
# -- prioritized --
import sys
import os.path
# sys.path.append(os.path.join(os.path.dirname(__file__), '../libs'))
# -- stdlib --
import json
import re
import socket
import subprocess
import time
# -- third party --
# -- own --
# -- code --
endpoint = socket.gethostname()
ts = int(time.time())
proc = subprocess.Popen(['/usr/sbin/unbound-control', 'stats'], stdout=subprocess.PIPE)
stats = {
match[0]: float(match[1])
for match in re.findall(r'(.*)\=(.*)', proc.stdout.read(), re.MULTILINE)
}
rst = {
'uptime': stats['time.up'],
'queries.total': stats['total.num.queries'],
'queries.pending': stats['total.requestlist.current.all'],
'queries.hit_rate': (stats['total.num.cachehits'] / stats['total.num.queries']) * 100,
}
print json.dumps([
{
"metric": "unbound.{}".format(k),
"endpoint": endpoint,
"timestamp": ts,
"step": 30,
"value": int(v),
"tags": {"server": endpoint},
}
for k, v in rst.items()
])
| Python | 0 | |
a0714c8754c769c4fee868f2b449d9dc69d144a9 | Add Welcome plugin to welcome new members | plugins/welcome.py | plugins/welcome.py | import json
from plugin import Plugin
class Welcome(Plugin):
"""
Welcomes new members when they join the Slack team
"""
def __init__(self):
Plugin.__init__(self)
self.event_type = 'team_join'
def on_event(self, bot, event, response):
# Get list of all channels (don't include archived channels)
channel_response = bot.sc.api_call('channels.list', **{'exclude_archived': 1})
# Convert string response to JSON
channel_response = json.loads(channel_response)
# Find general channel
general_channel = None
if channel_response.get('ok'):
for channel in channel_response['channels']:
if channel.get('is_general'):
general_channel = channel['id']
# Post welcome to general channel if one found
if general_channel:
user = event['user']['id']
response['channel'] = general_channel
response['link_names'] = 1 # Enables linking of names
response['text'] = 'Welcome to the Slack team <@%s>!' % user
bot.sc.api_call('chat.postMessage', **response)
| Python | 0 | |
3ce048f8c0346c30173b52a691bd18ece1cbc13d | Add a TensorFlow Probability sample | scripts/stock_price/tough_question_tfp.py | scripts/stock_price/tough_question_tfp.py | #!/usr/bin/python3
# coding: utf-8
'''
Implementation of the article below with TensorFlow Probability
'Bayesian Methods for Hackers'
https://github.com/CamDavidsonPilon/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers/blob/master/Chapter2_MorePyMC/Ch2_MorePyMC_PyMC3.ipynb
Based on an example of TensorFlow Probability
https://github.com/tensorflow/probability/tree/master/tensorflow_probability/python/edward2
https://www.hellocybernetics.tech/entry/2018/11/09/231817
'''
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
## from tensorflow_probability import edward2 as ed
tfd = tfp.distributions
N = 1000
X = 300
N_RESULTS = 2000
N_BURNIN = 1000
## Explanatory variable(s)
true_prob = tf.random_uniform([], minval=0.0, maxval=1.0)
## Observed data
observations = tf.random.shuffle(tf.concat([tf.ones(X, dtype=tf.int32), tf.zeros(N-X, dtype=tf.int32)], 0))
def target_log_prob_fn(true_prob):
log_prob_parts = [
tfd.Bernoulli(probs=0.5).log_prob(tf.fill([N], 1)) + tfd.Bernoulli(probs=true_prob).log_prob(observations),
tfd.Bernoulli(probs=0.5).log_prob(tf.fill([N], 0)) + tfd.Bernoulli(probs=0.5).log_prob(observations)
]
sum_log_prob = tf.reduce_sum(tf.reduce_logsumexp(log_prob_parts, 0))
return sum_log_prob
hmc_kernel = tfp.mcmc.HamiltonianMonteCarlo(
target_log_prob_fn=target_log_prob_fn,
step_size=0.01,
num_leapfrog_steps=5)
states, kernels_results = tfp.mcmc.sample_chain(
num_results=N_RESULTS,
current_state=[true_prob],
kernel=hmc_kernel,
num_burnin_steps=N_BURNIN)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
states_, results_ = sess.run([states, kernels_results])
plt.hist(states_[0], bins=50)
plt.show()
| Python | 0.000184 | |
e42c115f8a612b3995e30b3606913acb7e7b0f63 | Create 2-off.py | Code/2-off.py | Code/2-off.py | import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(18,GPIO.OUT)
GPIO.setup(23,GPIO.OUT)
GPIO.setup(24,GPIO.OUT)
print "Lights off"
GPIO.output(18,GPIO.LOW)
GPIO.output(23,GPIO.LOW)
GPIO.output(24,GPIO.LOW)
GPIO.cleanup()
| Python | 0.000001 | |
a2214039defb1094d47b7ce0abc4e56032136508 | Add merge migration | osf/migrations/0137_merge_20181011_1525.py | osf/migrations/0137_merge_20181011_1525.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-10-11 15:25
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0136_merge_20181010_2242'),
('osf', '0136_add_ember_auth_register_waffle_flag'),
]
operations = [
]
| Python | 0.000001 | |
1a3b9eec2a947a8f036fdea80a4d7de4b7549211 | Add prime functions library | prime_functions.py | prime_functions.py | import numpy as np
from math import sqrt
def get_primes_below(n):
# http://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188
""" Input n>=6, Returns a array of primes, 2 <= p < n """
sieve = np.ones(n/3 + (n % 6 == 2), dtype=np.bool)
sieve[0] = False
for i in xrange(int(n**0.5)/3+1):
if sieve[i]:
k = 3*i+1 | 1
sieve[((k*k)/3)::2*k] = False
sieve[(k*k+4*k-2*k*(i & 1))/3::2*k] = False
return np.r_[2, 3, ((3*np.nonzero(sieve)[0]+1) | 1)]
def is_prime(n):
if n == 2:
return True
if n % 2 == 0 or n <= 1:
return False
for i in range(3, int(sqrt(n))+1, 2):
if n % i == 0:
return False
return True
| Python | 0.000005 | |
3f01678bdcdf62d1e6e7189db6ed1337f57bdfd8 | Create rename_add_prefix.py | rename_add_prefix.py | rename_add_prefix.py | def applyPrefix( prefix, name):
test = name.find(prefix)
if test == -1 or test != 0:
name = prefix + name
return name
class renamePrefixDialog(MQWidget.Dialog):
def __init__(self, parent):
MQWidget.Dialog.__init__(self, parent)
self.title = "Add Prefix to Selected Objects"
self.frame0 = self.createHorizontalFrame(self)
self.label = MQWidget.Label(self.frame0)
self.label.text = "Prefix:"
self.pText = MQWidget.Edit(self.frame0)
self.pText.text = ""
self.plabel = MQWidget.Label(self.frame0)
self.plabel.text = "Recursive:"
self.rCheck = MQWidget.CheckBox(self.frame0)
self.rCheck.checked = 1
self.frame1 = self.createHorizontalFrame(self)
self.frame1.uniformSize = True
self.okbtn = MQWidget.Button(self.frame1)
self.okbtn.text = MQSystem.getResourceString("OK")
self.okbtn.modalResult = "ok"
self.okbtn.default = 1
self.okbtn.fillBeforeRate = 1
self.cancelbtn = MQWidget.Button(self.frame1)
self.cancelbtn.text = MQSystem.getResourceString("Cancel")
self.cancelbtn.modalResult = "cancel"
self.cancelbtn.default = 1
self.cancelbtn.fillAfterRate = 1
# for all objects that are selected , rename them with prefix.
# if recursive is on, rename their children objects too.
dlg = renamePrefixDialog(MQWidget.getMainWindow())
if dlg.execute() == "ok":
recursiveApply = dlg.rCheck.checked
prefix = dlg.pText.text
if prefix[len(prefix) - 1] != '_':
prefix += '_'
doc = MQSystem.getDocument()
for i in range(0, len(doc.object)):
if doc.object[i] is None: continue
if doc.object[i].select == 1:
doc.object[i].name = applyPrefix(prefix, doc.object[i].name)
if recursiveApply == 1:
for k in range(i, len(doc.object)):
if doc.object[k] is None: continue
if doc.isAncestorObject( doc.object[i], doc.object[k]):
doc.object[k].name = applyPrefix(prefix, doc.object[k].name)
'''
currentObj.name = applyPrefix(prefix, currentObj.name)
if recursiveApply == 1:
chd = doc.getChildObjectCount(doc.object[i])
for j in range(0, chd):
childObj = doc.getChildObject( doc.object[i] , j )
childObj.name = applyPrefix(prefix, childObj.name)
'''
| Python | 0.000005 | |
451a65d6b5cbc182418f00703e2da84b7c346a70 | Create Dictionary._((glas+python?glaskrypt)) | Dictionary.py | Dictionary.py | #!/usr/bin/env python
#
# Basic hack
#
# What?Needed??
# Bison like parser for js
# _WHY? Because I forget things easily
class Dict(dict)
def __init__(self, keyd, *arguments,**context):
self._context = context
# Elaborate on that
class Elaboration(Dict)
pass
# To bind them together... I have no idea what the hell I am doing, here
class Dictionary(Elaboration):
def __init__(self, *args, **kw):
pass
#
# Read the input(STDIN), and translate it on the output(STDOUT)
#
class py(object):
class code(dict):
pass
#
#... Interface !translate
#
class Rune(py.code, dict, Dictionary):
pass
def translate(outputs, *runes):
rune = next(runes)
output.append(
translate(next(outputs), rune)
)
# Dictionary: `py; ~ translate: ``glas.cup
| Python | 0.00002 | |
0ebddf569f291ceca050972fe9cfd3d9e498e87c | add timeout decorator | pyannote/audio/utils/timeout.py | pyannote/audio/utils/timeout.py | #!/usr/bin/env python
# encoding: utf-8
# Shamelessly stolen from
# https://gist.github.com/TySkby/143190ad1b88c6115597c45f996b030c
"""Easily put time restrictions on things
Note: Requires Python 3.x
Usage as a context manager:
```
with timeout(10):
something_that_should_not_exceed_ten_seconds()
```
Usage as a decorator:
```
@timeout(10)
def something_that_should_not_exceed_ten_seconds():
do_stuff_with_a_timeout()
```
Handle timeouts:
```
try:
with timeout(10):
something_that_should_not_exceed_ten_seconds()
except TimeoutError:
log('Got a timeout, couldn't finish')
```
Suppress TimeoutError and just die after expiration:
```
with timeout(10, suppress_timeout_errors=True):
something_that_should_not_exceed_ten_seconds()
print('Maybe exceeded 10 seconds, but finished either way')
```
"""
import contextlib
import errno
import os
import signal
DEFAULT_TIMEOUT_MESSAGE = os.strerror(errno.ETIME)
class timeout(contextlib.ContextDecorator):
def __init__(self, seconds, *, timeout_message=DEFAULT_TIMEOUT_MESSAGE, suppress_timeout_errors=False):
self.seconds = int(seconds)
self.timeout_message = timeout_message
self.suppress = bool(suppress_timeout_errors)
def _timeout_handler(self, signum, frame):
raise TimeoutError(self.timeout_message)
def __enter__(self):
signal.signal(signal.SIGALRM, self._timeout_handler)
signal.alarm(self.seconds)
def __exit__(self, exc_type, exc_val, exc_tb):
signal.alarm(0)
if self.suppress and exc_type is TimeoutError:
return True
| Python | 0.000001 | |
152def3ad56af928b81586867e3c8ee85cbf5311 | Add a client application class | blitz/client.py | blitz/client.py | __author__ = 'Will Hart'
from blitz.io.database import DatabaseClient
from blitz.io.tcp import TcpClient
import blitz.web.api as blitz_api
import blitz.web.http as blitz_http
import logging
#import json
import os.path
import tornado.httpserver
import tornado.ioloop
import tornado.web
class Config(object):
"""
Holds configuration for a client application
"""
settings = {}
def __init__(self):
"""
Sets up default settings
"""
self.settings = {
"template_path": os.path.join(os.path.dirname(__file__), "templates"),
"static_path": os.path.join(os.path.dirname(__file__), "static"),
"database_path": os.path.join(os.path.dirname(__file__), "data", "app.db"),
#"schema_path": os.path.join(
# os.path.dirname(__file__), "data", "client_schema.sql"),
#"settings_path": os.path.join(
# os.path.dirname(__file__), "data", "settings.txt"),
"autoescape": None,
"debug": True
}
def get(self, key):
"""
Gets an item from settings
:raises: KeyError if the item doesn't exist
:returns: A value corresponding to the given key
"""
if key in self.settings.keys:
return self.settings[key]
raise KeyError("Unknown configuration setting - " + key)
def set(self, key, value):
"""
Sets the given configuration key to value
:param key: the key to set
:param value: the value to set the key to
:returns: the value that was set
"""
self.settings[key] = value
return value
def __getitem__(self, item):
"""
A custom implementation of dict getters
"""
return self.get(item)
def __setitem__(self, key, value):
"""
A custom implementation of dict setters
"""
return self.set(key, value)
class Application(object):
"""
A basic application which exposes the Api and HTTP request handlers
provided by Tornado
"""
def __init__(self):
"""
Create a new client web application, setting defaults
"""
# create a file logger and set it up for logging to file
self.logger = logging.getLogger('Application')
log_handler = logging.FileHandler(os.path.join(os.path.dirname(__file__), "log.txt"))
log_formatter = self.logger.Formatter('%(asctime)s %(levelname)s %(message)s')
log_handler.setFormatter(log_formatter)
self.logger.addHandler(log_handler)
self.logger.setLevel(logging.INFO)
# load configuration
self.config = Config()
# create a database connection
self.data = DatabaseClient()
self.logger.info("Initialised client database")
# create a TCP connection
self.socket = TcpClient()
self.logger.info("Initialised TCP socket - not connected")
# create an application
self.application = tornado.web.Application([
(r'/', blitz_http.IndexHandler),
('r/categories', blitz_api.CategoriesHandler),
('r/cache/(?P<since>[^\/]+)', blitz_api.CacheHandler),
('r/download/(?P<session_id>[^\/]+)', blitz_api.DownloadHandler),
('r/session/(?P<session_id>[^\/]+)', blitz_api.SessionHandler),
('r/sessions', blitz_api.SessionsHandler),
('r/config', blitz_api.ConfigHandler)
], **self.config.settings)
self.logger.info("Initialised client application")
# create an HTTP server
self.http_server = tornado.httpserver.HTTPServer(self.application)
self.logger.info("Initialised client HTTP server")
def start(self):
"""
Starts the application
"""
# start listening on the configured port and IP
self.http_server.listen(self.config['port'])
self.logger.info("HTTP server started listening on port " + self.config['port'])
# start the IO loop
self.logger.info("HTTP server starting IO loop")
tornado.ioloop.IOLoop.instance().start()
| Python | 0.000001 | |
927a49da0ac7fe633c72f6d08ed93710c1d71630 | Refactor image alias tests to reduce copypasta | pylxd/tests/test_image_alias.py | pylxd/tests/test_image_alias.py | # Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ddt import data
from ddt import ddt
import mock
from pylxd import connection
from pylxd.tests import annotated_data
from pylxd.tests import fake_api
from pylxd.tests import LXDAPITestBase
@ddt
@mock.patch.object(connection.LXDConnection, 'get_object')
class LXDAPIImageAliasTestObject(LXDAPITestBase):
def test_alias_list(self, ms):
ms.return_value = ('200', fake_api.fake_alias_list())
self.assertEqual(['ubuntu'], self.lxd.alias_list())
ms.assert_called_once_with('GET', '/1.0/images/aliases')
def test_alias_show(self, ms):
ms.return_value = ('200', fake_api.fake_alias())
self.assertEqual(
fake_api.fake_alias(), self.lxd.alias_show('fake')[1])
ms.assert_called_once_with('GET', '/1.0/images/aliases/fake')
@ddt
@mock.patch.object(connection.LXDConnection, 'get_status')
class LXDAPIImageAliasTestStatus(LXDAPITestBase):
@data(True, False)
def test_alias_defined(self, expected, ms):
ms.return_value = expected
self.assertEqual(expected, self.lxd.alias_defined('fake'))
ms.assert_called_once_with('GET', '/1.0/images/aliases/fake')
@annotated_data(
('create', 'POST', '', ('fake',), ('"fake"',)),
('update', 'PUT', '/test-alias',
('test-alias', 'fake',), ('"fake"',)),
('rename', 'POST', '/test-alias',
('test-alias', 'fake',), ('"fake"',)),
('delete', 'DELETE', '/test-alias', ('test-alias',), ()),
)
def test_alias_operations(self, method, http, path, args, call_args, ms):
self.assertTrue(getattr(self.lxd, 'alias_' + method)(*args))
ms.assert_called_once_with(
http,
'/1.0/images/aliases' + path,
*call_args
)
| # Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ddt import data
from ddt import ddt
import mock
import unittest
from pylxd import api
from pylxd import connection
from pylxd.tests import annotated_data
from pylxd.tests import fake_api
@ddt
class LXDUnitTestAlias(unittest.TestCase):
def setUp(self):
super(LXDUnitTestAlias, self).setUp()
self.lxd = api.API()
def test_alias_list(self):
with mock.patch.object(connection.LXDConnection, 'get_object') as ms:
ms.return_value = ('200', fake_api.fake_alias_list())
self.assertEqual(['ubuntu'], self.lxd.alias_list())
ms.assert_called_once_with('GET', '/1.0/images/aliases')
@data(True, False)
def test_alias_defined(self, expected):
with mock.patch.object(connection.LXDConnection, 'get_status') as ms:
ms.return_value = expected
self.assertEqual(expected, self.lxd.alias_defined('fake'))
ms.assert_called_once_with('GET', '/1.0/images/aliases/fake')
def test_alias_show(self):
with mock.patch.object(connection.LXDConnection, 'get_object') as ms:
ms.return_value = ('200', fake_api.fake_alias())
self.assertEqual(
fake_api.fake_alias(), self.lxd.alias_show('fake')[1])
ms.assert_called_once_with('GET', '/1.0/images/aliases/fake')
@annotated_data(
('create', 'POST', '', ('fake',), ('"fake"',)),
('update', 'PUT', '/test-alias',
('test-alias', 'fake',), ('"fake"',)),
('rename', 'POST', '/test-alias',
('test-alias', 'fake',), ('"fake"',)),
('delete', 'DELETE', '/test-alias', ('test-alias',)),
)
def test_alias_operations(self, method, http, path, args, call_args=()):
with mock.patch.object(connection.LXDConnection, 'get_status') as ms:
ms.return_value = True
self.assertTrue(getattr(self.lxd, 'alias_' + method)(*args))
ms.assert_called_once_with(
http,
'/1.0/images/aliases' + path,
*call_args
)
| Python | 0 |
ca0bec705a6c68c7540c9b7f0a02972e1f26723c | Create py-递归设置.py | py-递归设置.py | py-递归设置.py | #!/usr/bin/python
# -*- encoding:utf-8 -*-
import sys
sys.setrecursionlimit(1500) # set the maximum depth as 1500
def recursion(n):
if(n <= 0):
return
print n
recursion(n - 1)
if __name__ == "__main__":
recursion(1200)
| Python | 0.000001 | |
cc5cf15bbbdcf6a4481bc88854b3a80f1fc99241 | rename file openFileRooms.py in openfilerooms.py | trunk/editor/openfilerooms.py | trunk/editor/openfilerooms.py | #!/usr/bin/env python
from xml.etree import ElementTree
#to use OrderedDict in python < 2.7
try:
from collections import OrderedDict
except ImportError:
from misc.dict import OrderedDict
from structData.area import Area
from structData.action import Action
from structData.param import Param
from structData.item import Item
from structData.event import Event
from structData.room import Room
from structData.var import Var
from structData.image import Image
from structData.information import Information
from structData.varRequirement import VarRequirement
from structData.itemRequirement import ItemRequirement
from structData.world import g_world
from upgradeVersion import upgradeVersion
def loadRooms(xml_file):
rooms = OrderedDict()
room = None
for line in list(xml_file.find("rooms")):
if line.tag == "room":
room = Room(line.attrib["id"], line.attrib["bg"],
line.attrib["bgm"])
for child in line:
if child.tag == "area":
area = Area(child.attrib["id"],
child.attrib["x"],
child.attrib["y"],
child.attrib["height"],
child.attrib["width"],
child.attrib["event"])
else:
raise ValueError("invalid tag %s in room" % child.tag)
room.areas.append(area)
rooms[room.id] = room
else:
raise ValueError("invalid tag %s in rooms" % line.tag)
return rooms
def loadEvents(xml_file):
events = OrderedDict()
event = None
for line in list(xml_file.find("events")):
if line.tag == "event":
event = Event(line.attrib["id"])
events[event.id] = event
for child in line:
if child.tag == "item_req":
requirement = ItemRequirement(child.attrib['id'],
child.attrib['value'])
event.requirements.append(requirement)
elif child.tag == "var_req":
requirement = VarRequirement(child.attrib['id'],
child.attrib['value'])
event.requirements.append(requirement)
elif child.tag == "action":
action = Action(child.attrib['id'])
event.actions.append(action)
for second_child in child:
if second_child.tag == "param":
param = Param(second_child.attrib['value'])
action.params.append(param)
else:
raise ValueError("invalid tag %s in action"
% second_child.tag)
else:
raise ValueError("invalid tag %s in event" % child.tag)
else:
raise ValueError("invalid tag %s in events" % line.tag)
return events
def loadItems(xml_file):
items = OrderedDict()
for line in list(xml_file.find("items")):
if line.tag == "item":
item = Item(line.attrib['id'],
line.attrib["x"],
line.attrib["y"],
line.attrib["height"],
line.attrib["width"],
line.attrib["room"],
line.attrib["image"],
line.attrib['event'])
items[item.id] = item
else:
raise ValueError("invalid tag %s in events" % line.tag)
return items
def loadInformation(xml_file):
informations = None
for node in xml_file.iter('world'):
informations = Information(node.attrib['version'],
node.attrib['name'],
node.attrib['width'],
node.attrib['height'],
node.attrib['start'])
if informations:
return informations
else:
raise ValueError("invalid file format")
def loadImages(xml_file):
images = {}
for line in list(xml_file.find("images")):
if line.tag == "img":
images[line.attrib['file']] = Image(line.attrib['file'])
else:
raise ValueError("invalid tag %s in images" % line.tag)
return images
def loadVars(xml_file):
variable = {}
for line in list(xml_file.find("vars")):
if line.tag == "var":
variable[line.attrib['id']] = Var(line.attrib['id'],
line.attrib['value'])
else:
ValueError("invalid tag %s in vars" % line.tag)
return variable
def openFileRooms(path_file):
"""
funzione per il caricamento dei dati salvati da un file .rooms
prende in ingresso il path del file da controllare
ritorna un dizionario con tutte le informazioni su rooms, events, items,
images
la funzione puo' prendere anche un file .rooms che ha una versione
precedente all'ultima realizzata
"""
xml_file = upgradeVersion(path_file)
g_world.informations = loadInformation(xml_file)
g_world.selected_room = g_world.informations.start
g_world.images = loadImages(xml_file)
g_world.items = loadItems(xml_file)
g_world.vars = loadVars(xml_file)
g_world.events = loadEvents(xml_file)
g_world.rooms = loadRooms(xml_file)
| Python | 0.000004 | |
61419ddc8db5f393bd79d200fc09424721877729 | Change BG color of None state TIs | airflow/utils/state.py | airflow/utils/state.py | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
from builtins import object
class State(object):
"""
Static class with task instance states constants and color method to
avoid hardcoding.
"""
# scheduler
NONE = None
REMOVED = "removed"
SCHEDULED = "scheduled"
# set by the executor (t.b.d.)
# LAUNCHED = "launched"
# set by a task
QUEUED = "queued"
RUNNING = "running"
SUCCESS = "success"
SHUTDOWN = "shutdown" # External request to shut down
FAILED = "failed"
UP_FOR_RETRY = "up_for_retry"
UPSTREAM_FAILED = "upstream_failed"
SKIPPED = "skipped"
task_states = (
SUCCESS,
RUNNING,
FAILED,
UPSTREAM_FAILED,
UP_FOR_RETRY,
QUEUED,
NONE,
)
dag_states = (
SUCCESS,
RUNNING,
FAILED,
)
state_color = {
QUEUED: 'gray',
RUNNING: 'lime',
SUCCESS: 'green',
SHUTDOWN: 'blue',
FAILED: 'red',
UP_FOR_RETRY: 'gold',
UPSTREAM_FAILED: 'orange',
SKIPPED: 'pink',
REMOVED: 'lightgrey',
SCHEDULED: 'white',
NONE: 'lightblue',
}
@classmethod
def color(cls, state):
if state in cls.state_color:
return cls.state_color[state]
else:
return 'white'
@classmethod
def color_fg(cls, state):
color = cls.color(state)
if color in ['green', 'red']:
return 'white'
else:
return 'black'
@classmethod
def finished(cls):
"""
A list of states indicating that a task started and completed a
run attempt. Note that the attempt could have resulted in failure or
have been interrupted; in any case, it is no longer running.
"""
return [
cls.SUCCESS,
cls.SHUTDOWN,
cls.FAILED,
cls.SKIPPED,
]
@classmethod
def unfinished(cls):
"""
A list of states indicating that a task either has not completed
a run or has not even started.
"""
return [
cls.NONE,
cls.SCHEDULED,
cls.QUEUED,
cls.RUNNING,
cls.UP_FOR_RETRY
]
| # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
from builtins import object
class State(object):
"""
Static class with task instance states constants and color method to
avoid hardcoding.
"""
# scheduler
NONE = None
REMOVED = "removed"
SCHEDULED = "scheduled"
# set by the executor (t.b.d.)
# LAUNCHED = "launched"
# set by a task
QUEUED = "queued"
RUNNING = "running"
SUCCESS = "success"
SHUTDOWN = "shutdown" # External request to shut down
FAILED = "failed"
UP_FOR_RETRY = "up_for_retry"
UPSTREAM_FAILED = "upstream_failed"
SKIPPED = "skipped"
task_states = (
SUCCESS,
RUNNING,
FAILED,
UPSTREAM_FAILED,
UP_FOR_RETRY,
QUEUED,
)
dag_states = (
SUCCESS,
RUNNING,
FAILED,
)
state_color = {
QUEUED: 'gray',
RUNNING: 'lime',
SUCCESS: 'green',
SHUTDOWN: 'blue',
FAILED: 'red',
UP_FOR_RETRY: 'gold',
UPSTREAM_FAILED: 'orange',
SKIPPED: 'pink',
REMOVED: 'lightgrey',
SCHEDULED: 'white',
}
@classmethod
def color(cls, state):
if state in cls.state_color:
return cls.state_color[state]
else:
return 'white'
@classmethod
def color_fg(cls, state):
color = cls.color(state)
if color in ['green', 'red']:
return 'white'
else:
return 'black'
@classmethod
def finished(cls):
"""
A list of states indicating that a task started and completed a
run attempt. Note that the attempt could have resulted in failure or
have been interrupted; in any case, it is no longer running.
"""
return [
cls.SUCCESS,
cls.SHUTDOWN,
cls.FAILED,
cls.SKIPPED,
]
@classmethod
def unfinished(cls):
"""
A list of states indicating that a task either has not completed
a run or has not even started.
"""
return [
cls.NONE,
cls.SCHEDULED,
cls.QUEUED,
cls.RUNNING,
cls.UP_FOR_RETRY
]
| Python | 0 |
ad7d04f73637d6228b82fbb89d51c13844cb1025 | Fix styling of the invites icon | shell/view/frame/ActivitiesBox.py | shell/view/frame/ActivitiesBox.py | # Copyright (C) 2006, Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import hippo
import logging
import conf
from sugar.graphics.canvasicon import CanvasIcon
from sugar.presence import PresenceService
from sugar.graphics import style
class ActivityItem(CanvasIcon):
def __init__(self, activity):
icon_name = activity.get_icon()
CanvasIcon.__init__(self, icon_name=icon_name)
style.apply_stylesheet(self, 'frame.ActivityIcon')
self._activity = activity
def get_bundle_id(self):
return self._activity.get_id()
class InviteItem(CanvasIcon):
def __init__(self, invite):
CanvasIcon.__init__(self, icon_name=invite.get_icon())
style.apply_stylesheet(self, 'frame.ActivityIcon')
self.props.color = invite.get_color()
self._invite = invite
def get_activity_id(self):
return self._invite.get_activity_id()
def get_bundle_id(self):
return self._invite.get_bundle_id()
def get_invite(self):
return self._invite
class ActivitiesBox(hippo.CanvasBox):
def __init__(self, shell):
hippo.CanvasBox.__init__(self, orientation=hippo.ORIENTATION_HORIZONTAL)
self._shell = shell
self._invite_to_item = {}
self._invites = self._shell.get_model().get_invites()
registry = conf.get_activity_registry()
for activity in registry.list_activities():
if activity.get_show_launcher():
self.add_activity(activity)
for invite in self._invites:
self.add_invite(invite)
self._invites.connect('invite-added', self._invite_added_cb)
self._invites.connect('invite-removed', self._invite_removed_cb)
def _activity_clicked_cb(self, icon):
self._shell.start_activity(icon.get_bundle_id())
def _invite_clicked_cb(self, icon):
self._invites.remove_invite(icon.get_invite())
self._shell.join_activity(icon.get_bundle_id(),
icon.get_activity_id())
def _invite_added_cb(self, invites, invite):
self.add_invite(invite)
def _invite_removed_cb(self, invites, invite):
self.remove_invite(invite)
def add_activity(self, activity):
item = ActivityItem(activity)
item.connect('activated', self._activity_clicked_cb)
self.append(item, 0)
def add_invite(self, invite):
item = InviteItem(invite)
item.connect('activated', self._invite_clicked_cb)
self.append(item, 0)
self._invite_to_item[invite] = item
def remove_invite(self, invite):
self.remove(self._invite_to_item[invite])
del self._invite_to_item[invite]
| # Copyright (C) 2006, Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import hippo
import logging
import conf
from sugar.graphics.canvasicon import CanvasIcon
from sugar.presence import PresenceService
from sugar.graphics import style
class ActivityItem(CanvasIcon):
def __init__(self, activity):
icon_name = activity.get_icon()
CanvasIcon.__init__(self, icon_name=icon_name)
style.apply_stylesheet(self, 'frame.ActivityIcon')
self._activity = activity
def get_bundle_id(self):
return self._activity.get_id()
class InviteItem(CanvasIcon):
def __init__(self, invite):
CanvasIcon.__init__(self, icon_name=invite.get_icon(),
color=invite.get_color())
self._invite = invite
def get_activity_id(self):
return self._invite.get_activity_id()
def get_bundle_id(self):
return self._invite.get_bundle_id()
def get_invite(self):
return self._invite
class ActivitiesBox(hippo.CanvasBox):
def __init__(self, shell):
hippo.CanvasBox.__init__(self, orientation=hippo.ORIENTATION_HORIZONTAL)
self._shell = shell
self._invite_to_item = {}
self._invites = self._shell.get_model().get_invites()
registry = conf.get_activity_registry()
for activity in registry.list_activities():
if activity.get_show_launcher():
self.add_activity(activity)
for invite in self._invites:
self.add_invite(invite)
self._invites.connect('invite-added', self._invite_added_cb)
self._invites.connect('invite-removed', self._invite_removed_cb)
def _activity_clicked_cb(self, icon):
self._shell.start_activity(icon.get_bundle_id())
def _invite_clicked_cb(self, icon):
self._invites.remove_invite(icon.get_invite())
self._shell.join_activity(icon.get_bundle_id(),
icon.get_activity_id())
def _invite_added_cb(self, invites, invite):
self.add_invite(invite)
def _invite_removed_cb(self, invites, invite):
self.remove_invite(invite)
def add_activity(self, activity):
item = ActivityItem(activity)
item.connect('activated', self._activity_clicked_cb)
self.append(item, 0)
def add_invite(self, invite):
item = InviteItem(invite)
item.connect('activated', self._invite_clicked_cb)
self.append(item, 0)
self._invite_to_item[invite] = item
def remove_invite(self, invite):
self.remove(self._invite_to_item[invite])
del self._invite_to_item[invite]
| Python | 0 |
c926a130853f33155c57e621b52ab62aecef8049 | 20170104 initial commit py2mysql.py | python/py2mysql.py | python/py2mysql.py | # -*- coding: utf-8 -*-
"""
#
# author : jiankaiwang (http://welcome-jiankaiwang.rhcloud.com/)
# source code in github : seed (https://github.com/jiankaiwang/seed)
# document in gitbook : seed (https://www.gitbook.com/book/jiankaiwang/seed/details)
#
# desc : function to print key-value query data
def dictRes(a):
for item in range(0, len(a['data']), 1):
for key in range(0, len(a['data'][item].keys()), 1):
print a['data'][item].keys()[key], a['data'][item].values()[key]
# desc : function to print query data without key
def nonDictRes(a):
for item in range(0, len(a['data']), 1):
for each in range(0, len(a['data'][item]), 1):
print a['data'][item][each]
# desc : connect to the mysql server
py2my = py2mysql("127.0.0.1", "3306", "user", "password", "database_name")
print py2my.checkConnectionValid()
# desc : query data
queryData = py2my.execsql("select * from table where year = %s limit 10;", (2000,), True, True)
dictRes(queryData)
# desc : insert data
insertData = py2my.execsql("insert into table (year, week, value) values (%s, %s, %s);", (2000, 1, 'value'), False, False)
print insertData
# desc : update data
updateData = py2my.execsql("update table set value = %s where year = %s and week = %s;", ('new_value', 2000, 1), False, False)
print updateData
# desc : delete data
deleteData = py2my.execsql("delete from table where year = %s and week = %s;", (2000, 1), False, False)
print deleteData
"""
import mysql.connector
class py2mysql:
# ----------
# private
# ----------
__host = ""
__port = ""
__user = ""
__pass = ""
__dbname = ""
__connectionValid = False
__msg = ""
#
# desc : return status
# retn : { "state" : [success|failure|warning], "info" : "message", "data" : []}
#
def __retStatus(self, state, info, data):
return {"state" : state, "info" : info, "data" : data}
#
# desc : check mysql server connection
#
def __checkConnect(self):
try:
conn = mysql.connector.connect(\
host = self.__host, \
port = self.__port, \
user = self.__user, \
password = self.__pass, \
database = self.__dbname\
)
self.__connectionValid = True
conn.close()
except mysql.connector.Error as err:
self.__connectionValid = False
self.__msg = "{}".format(err)
# ----------
# public
# ----------
#
# desc : constructor
#
def __init__(self, host, port, user, pwd, dbname):
self.__host = host
self.__port = port
self.__user = user
self.__pass = pwd
self.__dbname = dbname
self.__connectionValid = False
self.__msg = ""
# check connect
self.__checkConnect()
#
# desc : get conection status
#
def checkConnectionValid(self):
if self.__connectionValid == False:
return self.__retStatus("failure", self.__msg, "")
else :
return self.__retStatus("success", "Connection is valid.", "")
#
# desc : execute sql command
# inpt :
# |- sqlCmd : "SELECT first_name, hire_date FROM employees WHERE hire_date BETWEEN %s AND %s"
# |- parameterInSeq (tuple) : (datetime.date(1999, 1, 1), datetime.date(1999, 12, 31))
# |- isQueryFlag : {True|False}
# |- asdict (return as dictionary) : {True|False}
#
def execsql(self, sqlCmd, parameterInSeq, isQueryFlag, asdict=True):
if self.__connectionValid == False:
return self.__retStatus("failure", self.__msg, "")
if not (isinstance(sqlCmd, str) \
and isinstance(parameterInSeq, tuple) \
and isinstance(isQueryFlag, bool)\
and isinstance(asdict, bool))\
:
return self.__retStatus("failure", "Parameters passed are wrong.", "")
# connection is valid
try:
conn = mysql.connector.connect(\
host = self.__host, \
port = self.__port, \
user = self.__user, \
password = self.__pass, \
database = self.__dbname\
)
cursor = conn.cursor()
cursor.execute(sqlCmd, parameterInSeq)
if isQueryFlag:
curInfo = [desc[0] for desc in cursor.description]
rawData = cursor.fetchall()
retData = []
if asdict:
tmp = {}
for item in range(0, len(rawData), 1):
tmp = {}
for col in range(0, len(curInfo), 1):
tmp.setdefault(curInfo[col], rawData[item][col])
retData.append(tmp)
else:
retData.append(curInfo)
tmp = []
for item in range(0, len(rawData), 1):
tmp = []
for col in range(0, len(curInfo), 1):
tmp.append(rawData[item][col])
retData.append(tmp)
return self.__retStatus("success", "Complete query.", retData)
else:
conn.commit();
return self.__retStatus("success", "Complete non-query sql command.", "")
except mysql.connector.Error as err:
return self.__retStatus("failure", "{}".format(err), "")
| Python | 0.999371 | |
5ade8c78e6ab875047ca29779dc37f9029a9f0d6 | Create set_auth.py | bluemix/set_auth.py | bluemix/set_auth.py | import airflow
from airflow import models, settings
from airflow.contrib.auth.backends.password_auth import PasswordUser
user = PasswordUser(models.User())
user.username = 'username'
user.email = 'your@email.com'
user.password = 'pwd'
session = settings.Session()
session.add(user)
session.commit()
session.close()
| Python | 0.000003 | |
6ce3ae4ef4a274e76bf1f6d76f0675bec2391d17 | add first pass of maria DB slave nagiors monitor - slave_sql and slave_io checks are done | check_mariadb_slaves.py | check_mariadb_slaves.py | #!/usr/bin/env python
"""MariaDB slave status checker"""
import sys
import argparse
import MySQLdb
class SlaveStatusCheck(object):
"""Class to help us run slave status queries against MariaDB"""
REPLICATION_LAG_MODE = 'replication_lag'
SLAVESQL_MODE = 'slave_sql'
SLAVEIO_MODE = 'slave_io'
MODES = (REPLICATION_LAG_MODE,
SLAVESQL_MODE,
SLAVEIO_MODE)
def __init__(self, hostname, username, password, slave_conn,
mode, verbose=False, warning=None, critical=None):
self.hostname = hostname
self.username = username
self.password = password
self.warning = warning
self.critical = critical
self.verbose = verbose
self.mode = mode
# Execute the query and store the results
self._result = {}
self.get_slave_status(slave_conn)
def run_check(self):
"""Execute the check against the given mode"""
check_fn = getattr(self, self.mode)
check_fn()
def replication_lag(self):
pass
def slave_sql(self):
"""Check that Slave_SQL_Running = Yes"""
if self._result.get('Slave_SQL_Running') == "Yes":
print "OK - Slave sql is running"
sys.exit(0)
else:
print "CRITICAL - Slave sql is not running"
sys.exit(2)
def slave_io(self):
"""Check that Slave_IO_Running = Yes"""
if self._result.get('Slave_IO_Running') == "Yes":
print "OK - Slave io is running"
sys.exit(0)
else:
print "CRITICAL - Slave io is not running"
sys.exit(2)
def get_slave_status(self, slave_connection):
"""Run the query!"""
try:
sql = 'SHOW SLAVE "{0}" STATUS'.format(slave_connection)
conn = None
conn = MySQLdb.Connection(
self.hostname,
self.username,
self.password)
curs = conn.cursor(MySQLdb.cursors.DictCursor)
curs.execute(sql)
conn.commit()
self._result = curs.fetchall()[0]
if self.verbose:
print self._result
except MySQLdb.Error, exc:
print "ERROR - {0}: {1}".format(exc.args[0], exc.args[1])
sys.exit(1)
finally:
if conn:
conn.close()
def main():
"""starter method"""
parser = argparse.ArgumentParser(description='MariaDB slave status checker')
parser.add_argument('--hostname', default='localhost', type=str,
help="MariaDB hostname")
parser.add_argument('--username', type=str, help="MariaDB username")
parser.add_argument('--password', type=str, help="MariaDB password")
parser.add_argument('--connection', required=True, type=str,
help="MariaDB slave connection")
parser.add_argument('--mode', type=str, required=True,
choices=SlaveStatusCheck.MODES,
help="slave state to check")
parser.add_argument('-w', '--warning', type=int, default=None,
help="warning limit")
parser.add_argument('-c', '--critical', type=int, default=None,
help="critical limit")
parser.add_argument('--verbose', action='store_true', default=False,
help="enable verbose mode")
args = parser.parse_args()
ssc = SlaveStatusCheck(args.hostname, args.username, args.password,
args.connection, args.mode, args.verbose,
args.warning, args.critical)
ssc.run_check()
| Python | 0 | |
cb5b85fc4a011f7eb9628b7099311b399f4d033d | Create born_on_a_friday.py | born_on_a_friday.py | born_on_a_friday.py | #!/usr/bin/env python3
from datetime import datetime
from typing import Tuple
def ask_month_day_year(prompt: str = "Enter your birthday") -> Tuple[int, int, int]:
date = input(f"{prompt} in the format: MM/DD/YYYY ")
month, day, year = (int(x.strip()) for x in date.split("/"))
return month, day, year
def day_of_the_week(year, month, day):
return f"{datetime(year, month, day):%A}"
month, day, year = ask_month_day_year()
print(f"You were born on a {day_of_the_week(year, month, day)}.")
| Python | 0.000048 | |
576dd7270714ec63beab9ce6af22f94e20dc1dd5 | Add admin model classes to forum_tracking app | machina/apps/forum_tracking/admin.py | machina/apps/forum_tracking/admin.py | # -*- coding: utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
# Third party imports
from django.contrib import admin
# Local application / specific library imports
from machina.core.db.models import get_model
ForumReadTrack = get_model('forum_tracking', 'ForumReadTrack')
TopicReadTrack = get_model('forum_tracking', 'TopicReadTrack')
class ForumReadTrackAdmin(admin.ModelAdmin):
list_display = ('__str__', 'user', 'forum', 'mark_time',)
list_filter = ('mark_time',)
class TopicReadTrackAdmin(admin.ModelAdmin):
list_display = ('__str__', 'user', 'topic', 'mark_time',)
list_filter = ('mark_time',)
admin.site.register(ForumReadTrack, ForumReadTrackAdmin)
admin.site.register(TopicReadTrack, TopicReadTrackAdmin)
| Python | 0 | |
2f7e3cf34e8460565d572507c2f97b98ac653036 | Allow overriding of DEFAULT_ENV_PREFIX | cbs/__init__.py | cbs/__init__.py |
from functools import partial
import importlib
import inspect
import os
from django.utils import six
from .utils import as_bool
DEFAULT_ENV_PREFIX = ''
class env(object):
'''
Decorator to make environ based settings simpler.
@env
def SOMETHING_KEY(self):
return 'default'
You can override the key to use in the env:
@env(key='OTHER_NAME')
def SETTINGS_NAME(self):
...
Or, if you want the env to have a prefix not in settings:
@env(prefix='MY_')
def SETTING(self):
...
``key`` and ``prefix`` can be used together.
You can pass a type caster / validator:
@env(type=int)
def SETTING(self):
'''
def __new__(cls, *args, **kwargs):
if not args:
return partial(env, **kwargs)
return object.__new__(cls)
def __init__(self, getter, key=None, type=None, prefix=None):
self.getter = getter
self.type = type
key = key or getter.__name__
if prefix is None:
prefix = DEFAULT_ENV_PREFIX
self.key = ''.join([prefix, key])
def __get__(self, obj, type=None):
if obj is None:
return self
try:
value = os.environ[self.key]
except KeyError:
value = self.getter(self)
obj.__dict__[self.getter.__name__] = value
if self.type:
value = self.type(value)
return value
class envbool(env):
'''
A special case of env that returns a boolean.
'''
def __init__(self, *args, **kwargs):
kwargs.setdefault(type=as_bool)
super(envbool, self).__init__(*args, **kwargs)
def apply(name, to):
'''
Apply settings to ``to``, which is expected to be globals().
Place at the end of settings.py / settings/__init__.py to apply a given
settings class.
Pass a settings class:
cbs.apply(MySettings, globals())
Pass a class name:
cbs.apply('MySettings', globals())
Pass an import path:
cbs.apply('settings.my.MySettings', globals())
'''
if isinstance(name, six.string_types):
if '.' in name:
module, obj_name = name.rsplit('.', 1)
module = importlib.import_module(module)
obj = getattr(module, obj_name)
else:
obj = to.get(name)
else:
obj = name
if obj is None:
raise ValueError('Could not find settings class: %r', name)
settings = obj()
def resolve_callable(value):
if callable(value):
return value()
return value
to.update({
key: resolve_callable(getattr(settings, key))
for key in dir(settings)
if key == key.upper()
})
from django import VERSION
base = importlib.import_module('cbs.base.django{}{}'.format(*VERSION[:2]))
BaseSettings = getattr(base, 'Base{}{}Settings'.format(*VERSION[:2]))
|
from functools import partial
import importlib
import inspect
import os
from django.utils import six
from .utils import as_bool
DEFAULT_ENV_PREFIX = ''
class env(object):
'''
Decorator to make environ based settings simpler.
@env
def SOMETHING_KEY(self):
return 'default'
You can override the key to use in the env:
@env(key='OTHER_NAME')
def SETTINGS_NAME(self):
...
Or, if you want the env to have a prefix not in settings:
@env(prefix='MY_')
def SETTING(self):
...
``key`` and ``prefix`` can be used together.
You can pass a type caster / validator:
@env(type=int)
def SETTING(self):
'''
def __new__(cls, *args, **kwargs):
if not args:
return partial(env, **kwargs)
return object.__new__(cls)
def __init__(self, getter, key=None, type=None, prefix=DEFAULT_ENV_PREFIX):
self.getter = getter
self.type = type
key = key or getter.__name__
self.key = ''.join([prefix, key])
def __get__(self, obj, type=None):
if obj is None:
return self
try:
value = os.environ[self.key]
except KeyError:
value = self.getter(self)
obj.__dict__[self.getter.__name__] = value
if self.type:
value = self.type(value)
return value
class envbool(env):
'''
A special case of env that returns a boolean.
'''
def __init__(self, *args, **kwargs):
kwargs.setdefault(type=as_bool)
super(envbool, self).__init__(*args, **kwargs)
def apply(name, to):
'''
Apply settings to ``to``, which is expected to be globals().
Place at the end of settings.py / settings/__init__.py to apply a given
settings class.
Pass a settings class:
cbs.apply(MySettings, globals())
Pass a class name:
cbs.apply('MySettings', globals())
Pass an import path:
cbs.apply('settings.my.MySettings', globals())
'''
if isinstance(name, six.string_types):
if '.' in name:
module, obj_name = name.rsplit('.', 1)
module = importlib.import_module(module)
obj = getattr(module, obj_name)
else:
obj = to.get(name)
else:
obj = name
if obj is None:
raise ValueError('Could not find settings class: %r', name)
settings = obj()
def resolve_callable(value):
if callable(value):
return value()
return value
to.update({
key: resolve_callable(getattr(settings, key))
for key in dir(settings)
if key == key.upper()
})
from django import VERSION
base = importlib.import_module('cbs.base.django{}{}'.format(*VERSION[:2]))
BaseSettings = getattr(base, 'Base{}{}Settings'.format(*VERSION[:2]))
| Python | 0.00027 |
115d17320e9fcd3eab4e60f222f382a7d551948b | Add unittests for magnumclient.v1.client module | magnumclient/tests/v1/test_client.py | magnumclient/tests/v1/test_client.py | # Copyright (c) 2015 Thales Services SAS
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import testtools
from magnumclient.v1 import client
class ClientTest(testtools.TestCase):
ENDPOINTS = {'container': [{'publicURL': 'http://myurl/'}]}
@mock.patch('magnumclient.common.httpclient.HTTPClient')
@mock.patch.object(client.Client, 'get_keystone_client')
def test_init_with_token_and_url(self, keystone_client, http_client):
client.Client(input_auth_token='mytoken', magnum_url='http://myurl/')
self.assertFalse(keystone_client.called)
http_client.assert_called_once_with(
'http://myurl/', token='mytoken', auth_ref=None)
@mock.patch('magnumclient.common.httpclient.HTTPClient')
@mock.patch.object(client.Client, 'get_keystone_client')
def test_init_with_token(self, keystone_client, http_client):
mocked = mock.Mock()
mocked.service_catalog.get_endpoints.return_value = self.ENDPOINTS
keystone_client.return_value = mocked
client.Client(input_auth_token='mytoken', auth_url='authurl')
keystone_client.assert_called_once_with(
token='mytoken', username=None, api_key=None,
project_name=None, project_id=None,
auth_url='authurl')
http_client.assert_called_once_with(
'http://myurl/', token='mytoken', auth_ref=None)
@mock.patch('magnumclient.common.httpclient.HTTPClient')
@mock.patch.object(client.Client, 'get_keystone_client')
def test_init_with_user(self, keystone_client, http_client):
mocked = mock.Mock()
mocked.auth_token = 'mytoken'
mocked.service_catalog.get_endpoints.return_value = self.ENDPOINTS
keystone_client.return_value = mocked
client.Client(username='user', api_key='pass', project_name='prj',
auth_url='authurl')
keystone_client.assert_called_once_with(
username='user', api_key='pass',
project_name='prj', project_id=None,
auth_url='authurl')
http_client.assert_called_once_with(
'http://myurl/', token='mytoken', auth_ref=None)
@mock.patch.object(client.Client, 'get_keystone_client')
def test_init_unauthorized(self, keystone_client):
mocked = mock.Mock()
mocked.auth_token = None
keystone_client.return_value = mocked
self.assertRaises(
RuntimeError, client.Client,
username='user', api_key='pass', project_name='prj',
auth_url='authurl')
def _test_get_keystone_client(self, auth_url, keystone_client):
class FakeClient(client.Client):
def __init__(self):
# Disable parent __init__
pass
FakeClient().get_keystone_client(
username='user', api_key='pass', project_name='prj',
auth_url=auth_url)
self.assertTrue(keystone_client.called)
@mock.patch('keystoneclient.v2_0.client.Client')
def test_get_keystone_client_v2(self, keystone_client):
self._test_get_keystone_client(
'http://authhost/v2.0', keystone_client)
@mock.patch('keystoneclient.v3.client.Client')
def test_get_keystone_client_v3(self, keystone_client):
self._test_get_keystone_client(
'http://authhost/v3', keystone_client)
def test_get_keystone_client_no_url(self):
self.assertRaises(RuntimeError,
self._test_get_keystone_client,
None, None)
| Python | 0.000001 | |
a59f86ea4905534237f9a1e055bce6a3a3d5fb81 | add migration so edit_messaging is set to True when the role has edit_data set to True | corehq/apps/users/migrations/0037_add_edit_messaging_permission.py | corehq/apps/users/migrations/0037_add_edit_messaging_permission.py |
from django.db import migrations
from corehq.apps.users.models_role import SQLPermission, UserRole
from corehq.util.django_migrations import skip_on_fresh_install
@skip_on_fresh_install
def migrate_edit_migrations_permissions(apps, schema_editor):
permission, created = SQLPermission.objects.get_or_create(value='edit_messaging')
edit_data_permission = SQLPermission.objects.get(value='edit_data')
role_ids_with_edit_data = set(UserRole.objects.filter(rolepermission__permission_fk_id=edit_data_permission.id)
.values_list("id", flat=True))
for role in UserRole.objects.filter(id__in=role_ids_with_edit_data):
role.rolepermission_set.get_or_create(permission_fk=permission, defaults={"allow_all": True})
class Migration(migrations.Migration):
dependencies = [
('users', '0036_reset_user_history_records'),
]
operations = [
migrations.RunPython(migrate_edit_migrations_permissions, migrations.RunPython.noop)
]
| Python | 0.012809 | |
a824039003abd693ff568d753c60bb6025bfd868 | Add cleanup_slaves.py script. | scripts/tools/cleanup_slaves.py | scripts/tools/cleanup_slaves.py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Removes checkouts from try slaves."""
import os
import subprocess
import sys
ROOT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..')
def parse_master(master):
sys.path.append(os.path.join(ROOT_DIR, 'scripts', 'master', 'unittests'))
import test_env # pylint: disable=F0401,W0612
masterpath = os.path.join(ROOT_DIR, 'masters', master)
os.chdir(masterpath)
variables = {}
master = os.path.join(masterpath, 'master.cfg')
execfile(master, variables)
return variables['c']
def main():
"""It starts a fake in-process buildbot master just enough to parse
master.cfg.
Then it queries all the builders and all the slaves to determine the current
configuration and process accordingly.
"""
c = parse_master('master.tryserver.chromium')
# Create a mapping of slavebuilddir with each slaves connected to it.
slavebuilddirs = {}
# Slaves per OS
all_slaves = {}
for builder in c['builders']:
builder_os = builder['name'].split('_', 1)[0]
if builder_os in ('cros', 'android'):
builder_os = 'linux'
slavenames = set(builder['slavenames'])
all_slaves.setdefault(builder_os, set())
all_slaves[builder_os] |= slavenames
slavebuilddir = builder.get('slavebuilddir', builder['name'])
slavebuilddirs.setdefault(builder_os, {})
slavebuilddirs[builder_os].setdefault(slavebuilddir, set())
slavebuilddirs[builder_os][slavebuilddir] |= slavenames
# Queue of commands to run, per slave.
queue = {}
for builder_os, slavebuilddirs in slavebuilddirs.iteritems():
os_slaves = all_slaves[builder_os]
for slavebuilddir, slaves in slavebuilddirs.iteritems():
for slave in os_slaves - slaves:
queue.setdefault((builder_os, slave), []).append(slavebuilddir)
print 'Out of %d slaves, %d will be cleaned' % (len(c['slaves']), len(queue))
commands = []
for key in sorted(queue):
slave_os, slavename = key
dirs = queue[key]
if slave_os == 'win':
cmd = 'cmd.exe /c rd /q %s' % ' '.join(
'e:\\b\\build\\slave\\%s' % s for s in dirs)
else:
cmd = 'rm -rf %s' % ' '.join('/b/build/slave/%s' % s for s in dirs)
commands.append(('ssh', slavename, cmd))
# TODO(maruel): Use pssh.
failed = []
for command in commands:
if subprocess.call(command):
failed.append(command[1])
if failed:
print 'These slaves failed:'
for i in failed:
print ' %s' % i
return 0
if __name__ == '__main__':
sys.exit(main())
| Python | 0.000008 | |
72467acd590ae5a3494e5059ce2ba99cf656baaa | Add IsAdminOrReadOnly permission class | registries/permissions.py | registries/permissions.py | from rest_framework.permissions import IsAdminUser, SAFE_METHODS
class IsAdminOrReadOnly(IsAdminUser):
"""
Allows read-only access to all users (including anonymous users) and write access to admin users only
"""
def has_permission(self, request, view):
is_admin = super().has_permission(request, view)
return is_admin or request.method in SAFE_METHODS
| Python | 0.000002 | |
d34b2a13b454ca2c08bd5e8bc3b38d80fb5367c6 | add initial mockup of curses UI | pyfs/ui.py | pyfs/ui.py | import curses
import os
import sys
import pyfs
class SimplePager(object):
def __init__(self):
self._old_stdout = sys.__stdout__
self._old_stdout_fd = os.dup(sys.stdout.fileno())
os.dup2(sys.stderr.fileno(), sys.stdout.fileno())
tty = open('/dev/tty')
os.dup2(tty.fileno(), 0)
self._scr = curses.initscr()
curses.noecho()
curses.cbreak()
curses.raw()
self._scr.keypad(1)
self._config = {
'default': 'find ./'
}
def cleanup(self):
self._scr.keypad(0)
curses.nocbreak()
curses.echo()
curses.endwin()
os.dup2(self._old_stdout_fd, sys.stdout.fileno())
sys.stdout = self._old_stdout
def run(self):
scanner = pyfs.Scanner(self._config)
scorer = pyfs.WeightedDistanceScore()
self._scr.addstr("Scanning ...")
self._scr.refresh()
files = scanner.scan()
max_y, _ = self._scr.getmaxyx()
max_y -= 1
self._scr.clear()
for line, match in enumerate(files[:max_y]):
self._scr.addstr(line, 0, match)
self._scr.refresh()
fm = pyfs.FuzzyMatch(files=files, scorer=scorer)
search = ''
while True:
c = self._scr.getch()
if c in (curses.KEY_ENTER, ord('\n')):
break
elif c in (curses.KEY_DC, curses.KEY_BACKSPACE):
if len(search):
search = search[:-1]
else:
search += chr(c)
fm.update_scores(search)
self._scr.clear()
for line, match in enumerate(fm.top_matches(max_y)):
self._scr.addstr(line, 0, match)
self._scr.refresh()
self._scr.refresh()
self.cleanup()
return fm.top_matches(1)[0]
def main():
ui = SimplePager()
result = ui.run()
sys.stdout.write(result.strip())
if __name__ == '__main__':
main()
| Python | 0.000001 | |
961fe99941860665d3c5f5782540d996eecdce94 | Prepare experimental data. | qm/expt.py | qm/expt.py | """ experimental data """
from collections import defaultdict
import logging
import pickle
from urllib.request import urlopen
import numpy as np
import yaml
from . import cachedir, systems
class HEPData:
"""
Interface to a HEPData yaml file.
"""
def __init__(self, inspire_rec, table, version=1):
self._cent = None
cachefile = (
cachedir / 'hepdata' /
'ins{}_table{}.pkl'.format(inspire_rec, table)
)
logging.debug('loading hepdata record %s table %s', inspire_rec, table)
if cachefile.exists():
logging.debug('reading from cache')
with cachefile.open('rb') as f:
self.data = pickle.load(f)
else:
logging.debug('not found in cache, downloading from hepdata.net')
cachefile.parent.mkdir(exist_ok=True)
with cachefile.open('wb') as f, urlopen(
'https://hepdata.net/download/table/'
'ins{}/Table{}/{}/yaml'.format(inspire_rec, table, version)
) as u:
self.data = yaml.load(u)
pickle.dump(self.data, f, protocol=pickle.HIGHEST_PROTOCOL)
def x(self, name, case=True):
"""
Get an independent variable ("x" data) with the given name.
"""
for x in self.data['independent_variables']:
x_name = x['header']['name']
if (x_name if case else x_name.lower()) == name:
return x['values']
def y(self, name=None, **quals):
"""
Get a dependent variable ("y" data) with the given name and qualifiers.
"""
for y in self.data['dependent_variables']:
if name is None or y['header']['name'] == name:
y_quals = {q['name']: q['value'] for q in y['qualifiers']}
if all(y_quals[k] == v for k, v in quals.items()):
return y['values']
def cent(self):
"""
Return a dict containing the centrality bins as a list of (low, high)
tuples and the midpoints (x values) as a 1D np.array.
"""
if self._cent is None:
cent = [
tuple(v[k] for k in ['low', 'high'])
for v in self.x('centrality', case=False)
]
self._cent = dict(
cent=cent,
x=np.array([(a + b)/2 for a, b in cent])
)
return self._cent
def dataset(self, name=None, **quals):
"""
Return a dict containing y values and errors along with centrality
data. Arguments are passed directly to self.y().
"""
y = []
yerr = defaultdict(list)
for v in self.y(name, **quals):
y.append(v['value'])
for e in v['errors']:
yerr[e.get('label', 'sum')].append(e['symerror'])
return dict(
y=np.array(y),
yerr={k: np.array(v) for k, v in yerr.items()},
**self.cent()
)
def get_all_data():
data = {s: {} for s in systems}
# PbPb2760 and PbPb5020 dNch/deta
for system, args, name in [
('PbPb2760', (880049, 1), 'D(N)/DETARAP'),
('PbPb5020', (1410589, 2),
r'$\mathrm{d}N_\mathrm{ch}/\mathrm{d}\eta$'),
]:
data[system]['dNch_deta'] = HEPData(*args).dataset(name)
# PbPb2760 identified dN/dy and mean pT
system = 'PbPb2760'
for obs, table, combine_func in [
('dN_dy', 31, np.sum),
('mean_pT', 32, np.mean),
]:
data[system][obs] = {}
d = HEPData(1222333, table)
for key, re_products in [
('pion', ['PI+', 'PI-']),
('kaon', ['K+', 'K-']),
('proton', ['P', 'PBAR']),
]:
dsets = [
d.dataset(RE='PB PB --> {} X'.format(i))
for i in re_products
]
data[system][obs][key] = dict(
y=combine_func([d['y'] for d in dsets], axis=0),
yerr={
e: combine_func([d['yerr'][e] for d in dsets], axis=0)
for e in dsets[0]['yerr']
},
**d.cent()
)
# PbPb2760 and PbPb5020 flows
for system, tables in [
('PbPb5020', [1, 2, 2]),
('PbPb2760', [3, 4, 4]),
]:
data[system]['vn'] = {}
for n, t in enumerate(tables, start=2):
data[system]['vn'][n] = HEPData(1419244, t).dataset(
'V{}{{2, |DELTAETA|>1}}'.format(n)
)
return data
data = get_all_data()
def print_data(d, indent=0):
"""
Pretty print the nested data dict.
"""
prefix = indent * ' '
for k in sorted(d):
v = d[k]
k = prefix + str(k)
if isinstance(v, dict):
print(k)
print_data(v, indent + 1)
else:
if k.endswith('cent'):
v = ' '.join(
str(tuple(int(j) if j.is_integer() else j for j in i))
for i in v
)
elif isinstance(v, np.ndarray):
v = str(v).replace('\n', '')
print(k, '=', v)
if __name__ == '__main__':
print_data(data)
| Python | 0 | |
01b9d4a491e2d732e9684d0782dcbf38df5eeec9 | Add adapters.py to new channelworm directory | channelworm/adapters.py | channelworm/adapters.py | # configure django to use default settings
# note that this can also be done using an environment variable
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
if hasattr(settings, 'DEBUG'):
# settings are configured already
pass
else:
# load default settings if they're not set
from web_app import settings as defaults
settings.configure(default_settings=defaults, DEBUG=True)
import ion_channel.models as C
import PyOpenWorm as P
from django.forms.models import model_to_dict
class PatchClampAdapter(object):
"""Map a channelworm model to a pyopenworm model"""
def __init__(self, cw_obj):
# initialize PyOpenWorm connection so we can access its API
P.connect()
self.channelworm_object = cw_obj
cw_dict = model_to_dict(self.channelworm_object)
experiment_id = cw_dict.pop('experiment')
patch_clamp_id = cw_dict.pop('id')
self.pyopenworm_object = P.Experiment()
# get the CW model's experiment
cw_evidence = C.Experiment.objects.get(id=experiment_id)
# make a PyOW evidence object with it
pow_evidence = P.Evidence(doi=cw_evidence.doi)
# add it to the PyOW experiment model
self.pyopenworm_object.reference(pow_evidence)
for key, value in cw_dict.iteritems():
self.pyopenworm_object.conditions.set(key, value)
# we not longer need PyOW API so we can kill the connection
P.disconnect()
def get_pow(self):
return self.pyopenworm_object
def get_cw(self):
return self.channelworm_object
| Python | 0 | |
f4689709f55a5e3209de7221853cb35a36699dcd | add file | check_sphinx.py | check_sphinx.py | import py
import subprocess
def test_linkcheck(tmpdir):
doctrees = tmpdir.join("_build/doctrees")
htmldir = tmpdir.join("_build/html")
subprocess.check_call(["sphinx-build", "-W", "-blinkcheck", "-d",
str(doctrees), "source", str(htmldir)])
def test_build_docs(tmpdir):
doctrees = tmpdir.join("_build/doctrees")
htmldir = tmpdir.join("_build/html")
subprocess.check_call([ "sphinx-build", "-n", "-W", "-bhtml", "-d",
str(doctrees), "source", str(htmldir)])
| Python | 0.000001 | |
45a1efa21162a5f1c39d8255d13d2bbca8f6c0ca | Create 7_ten_row_abacus.py | 7_ten_row_abacus.py | 7_ten_row_abacus.py | #########################################################################
# 10-row School abacus
# by
# Michael H
#########################################################################
# Description partially extracted from from wikipedia
#
# Around the world, abaci have been used in pre-schools and elementary
#
# In Western countries, a bead frame similar to the Russian abacus but
# with straight wires and a vertical frame has been common (see image).
# Helps schools as an aid in teaching the numeral system and arithmetic
#
# |00000***** | row factor 1000000000
# |00000***** | row factor 100000000
# |00000***** | row factor 10000000
# |00000***** | row factor 1000000
# |00000***** | row factor 100000
# |00000***** | row factor 10000
# |00000***** | row factor 1000
# |00000**** *| row factor 100 * 1
# |00000*** **| row factor 10 * 2
# |00000** ***| row factor 1 * 3
# -----------
# Sum 123
#
# Each row represents a different row factor, starting with x1 at the
# bottom, ascending up to x1000000000 at the top row.
######################################################################
# TASK:
# Define a procedure print_abacus(integer) that takes a positive integer and prints a visual representation (image) of an abacus setup for a given positive integer value.
# 任务:
# 定义一个函数print_abacus(整数),向它传入一个正整数,打印出与该给定值相对应算盘的图形。
# Ranking
# 1 STAR: solved the problem!
# 2 STARS: 6 < lines <= 9
# 3 STARS: 3 < lines <= 6
# 4 STARS: 0 < lines <= 3
# 评分
# 1 星: solved the problem!
# 2 星: 6 < 代码行数 <= 9
# 3 星: 3 < 代码行数 <= 6
# 4 星: 0 < 代码行数 <= 3
def print_abacus(value):
#row_list = ["|", "0", "0", "0", "0", "0", "*", "*", "*", "*", "*", " ", " ", " ", "|"]
row_list_const = ["|", "0", "0", "0", "0", "0", "*", "*", "*", "*", "*", "|"]
value_next = value
for i in range(9, -1, -1):
index_0 = value_next / (10 ** i)
index_1 = 0 - (index_0 + 1)
row_list_let = row_list_const[:]
row_list_let.insert(index_1, (" " * 3))
row_str = "".join(row_list_let)
# value_next = value_next % (10 ** i)
value_next %= (10 ** i)
print row_str
| Python | 0.998716 | |
9207041afb78f8d36442b7ee19b95055ebbc99cd | add test forms | app/tests/test_form.py | app/tests/test_form.py | from django.test import TestCase
from app.forms import FormAppOne
class TestForm(TestCase):
def test_invalid_name_form(self):
form = FormAppOne({'name': '1234', 'description': 'validate name'})
self.assertFalse(form.is_valid())
self.assertEquals(form.errors, {'name': [u'Name must be only text']})
def test_invalid_description_form(self):
form = FormAppOne({'name': 'asd'})
self.assertFalse(form.is_valid())
self.assertEquals(form.errors, {'description':
[u'This field is required.']})
def test_required_fields(self):
form = FormAppOne({})
self.assertFalse(form.is_valid())
self.assertEquals(form.errors, {'name': [u'This field is required.'],
'description': [u'This field is required.']})
def test_valid_form(self):
form = FormAppOne({'name': 'valder', 'description': 'validate name'})
self.assertTrue(form.is_valid())
| Python | 0 | |
a460b73861d406b14519b3e391190d1b8d7e57a9 | Add maximal margin classifier. | max_margin_classifier.py | max_margin_classifier.py | import numpy as np
import matplotlib.pylab as plt
from sklearn.svm import SVC
X = np.array([[3, 4], [2, 2], [4, 4], [1, 4], [2, 1], [4, 3], [4, 1]])
y = np.array(['Red', 'Red', 'Red', 'Red', 'Blue', 'Blue', 'Blue'])
linear_svm = SVC(kernel = 'linear', C = 2 ** 15)
linear_svm.fit(X, y)
## w0 * X_1 + w1 * X_2 + b = 0 <=> X_2 = -w0 / w1 * X_1 - b / w1
w = linear_svm.coef_[0]
print('Margin: %s'%(1.0 / np.linalg.norm(w)))
b = linear_svm.intercept_
slope = -w[0] / w[1]
## points in the separating line
xx = np.linspace(np.amin(X[:, 0]), np.amax(X[:, 0]))
yy = slope * xx - b / w[1]
## points in the two gutters
yy_top = yy + 1.0 / w[1]
yy_bottom = yy - 1.0 / w[1]
## canvas
fig, ax = plt.subplots(1, 1)
ax.set_title('Maximal margin classifier')
# draw points
ax.scatter(X[:, 0], X[:, 1], c = y)
# draw separating line
ax.plot(xx, yy, 'k-')
# draw gutters
ax.plot(xx, yy_top, 'k--')
ax.plot(xx, yy_bottom, 'k--')
# draw support vectors
ax.scatter(linear_svm.support_vectors_[:, 0], linear_svm.support_vectors_[:, 1],
s = 100, facecolors = 'none')
# set labels
ax.set_xlabel('X_1')
ax.set_ylabel('X_2')
plt.show() | Python | 0.000007 | |
2e821ab48542c89ac41ebc17036bddc164506a22 | Backup of some unused code | combine_data/cartesianProductOfIDs.py | combine_data/cartesianProductOfIDs.py | import argparse
import itertools
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate the cartesian product of two ID files')
parser.add_argument('--idFileA',required=True,type=str,help='First file of IDs')
parser.add_argument('--idFileB',required=True,type=str,help='Second file of IDS')
parser.add_argument('--outFile',required=True,type=str,help='Output file')
args = parser.parse_args()
with open(args.idFileA) as f:
idsA = [ int(line.strip()) for line in f ]
with open(args.idFileB) as f:
idsB = [ int(line.strip()) for line in f ]
idsA = sorted(list(set(idsA)))
idsB = sorted(list(set(idsB)))
with open(args.outFile,'w') as outF:
for a,b in itertools.product(idsA,idsB):
outF.write("%d\t%d\n" % (min(a,b),max(a,b)))
print "Processing complete."
| Python | 0.000001 | |
178dff5f0af375f4f49416aeb41b5d7a718d69e8 | Add python script to demystify compiler output | scripts/demystify.py | scripts/demystify.py | #!/usr/bin/env python
# This script demystifies C++ compiler output for CAF by
# replacing cryptic `typed_mpi<...>` templates with
# `replies_to<...>::with<...>` and `atom_constant<...>`
# with human-readable representation of the actual atom.
import sys
# decodes 6bit characters to ASCII
DECODING_TABLE = ' 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz'
# CAF type strings
ATOM_CONSTANT_SUFFIX = "caf::atom_constant<"
# `pos` points to first character after '<':
# template_name<...>
# ^
# and returns the position of the closing '>'
def end_of_template(x, pos):
open_templates = 1
while open_templates > 0:
if line[pos] == '<':
open_templates += 1
elif line[pos] == '>':
open_templates -= 1
pos += 1
# exclude final '>'
return pos - 1
def next_element(x, pos, last):
# scan for ',' that isn't inside <...>
while pos < last and x[pos] != ',':
if x[pos] == '<':
pos = end_of_template(x, pos + 1)
else:
pos += 1
return pos
def atom_read(x):
result = ""
read_chars = ((x & 0xF000000000000000) >> 60) == 0xF
mask = 0x0FC0000000000000
bitshift = 54
while bitshift >= 0:
if read_chars:
result += DECODING_TABLE[(x & mask) >> bitshift]
elif ((x & mask) >> bitshift) == 0xF:
read_chars = True
bitshift -= 6
mask = mask >> 6
return result
def decompose_type_list(x, first, last):
res = []
i = first
n = next_element(x, first, last)
while i != last:
res.append(x[i:n])
# skip following ','
i = min(n + 2, last)
n = next_element(x, i, last)
return res
def stringify(x):
if x.startswith(ATOM_CONSTANT_SUFFIX):
begin = len(ATOM_CONSTANT_SUFFIX)
end = len(x) - 1
res = "'"
res += atom_read(int(x[begin:end]))
res += "'"
return res
return x
def stringify_list(xs):
res = ""
for index in range(len(xs)):
if index > 0:
res += ", "
res += stringify(xs[index].strip(' '))
return res
def decompose_typed_actor(x, first, last):
needle = "caf::detail::type_list<"
# first type list -> input types
j = x.find(needle, first) + len(needle)
k = end_of_template(x, j)
inputs = decompose_type_list(x, j, k)
# second type list -> outputs
j = x.find(needle, k) + len(needle)
k = end_of_template(x, j)
outputs = decompose_type_list(x, j, k)
# replace all 'caf::atom_constant<...>' entries in inputs
res = "replies_to<"
res += stringify_list(inputs)
res += ">::with<"
res += stringify_list(outputs)
res += ">"
return res
for line in sys.stdin:
# replace "std::__1" with "std::" (Clang libc++)
line = line.replace("std::__1", "std::")
needle = "caf::typed_mpi<"
idx = line.find(needle)
while idx != -1:
# find end of typed_actor<...>
first = idx + len(needle)
last = end_of_template(line, first)
updated = decompose_typed_actor(line, first, last)
prefix = line[:idx]
suffix = line[last:]
line = prefix + updated + suffix
idx = line.find(needle, idx + len(updated))
sys.stdout.write(line.replace("caf::", ""))
| Python | 0.000002 | |
01e551bdfbe298cdbd30734a52305be25b65147a | add docker build.py | docker/build.py | docker/build.py | """
This script builds Docker images for various combinations of
parameters. Should be run from inside the git tree.
"""
import sys
from os import path
from argparse import ArgumentParser
from subprocess import run, PIPE, Popen
from time import time
import shutil
def parse_args(argv=None):
if argv is None:
argv = sys.argv
parser = ArgumentParser(argv)
parser.add_argument("--targets", type=lambda x: x.split(','),
default=['release', 'jupyter'],
help='Targets to build, delimited by commas.')
parser.add_argument("--hardware", type=lambda x: x.split(','),
default=['cpu', 'gpu'],
help='Whether to build the CPU and/or GPU versions.')
parser.add_argument("-v", "--verbose", action='store_true',
help='Show build output.')
return parser.parse_args()
def main():
args = parse_args()
git_branch = run(["git", "rev-parse", "--abbrev-ref", "HEAD"],
capture_output=True, text=True, check=True).stdout.strip()
git_commit = run(["git", "describe", "--always"],
capture_output=True, text=True, check=True).stdout.strip()
# checkout a clean version to build from
git_root = run(["git", "rev-parse", "--show-toplevel"],
capture_output=True, text=True, check=True).stdout.strip()
build_dir = "/tmp/dnm_docker_build"
if path.exists(build_dir):
build_dir += '_'+str(int(time()))
run(["git", "clone", git_root, build_dir], check=True)
completed = []
# run builds
for target in args.targets:
for hardware in args.hardware:
dockerfile = f"Dockerfile-{hardware}"
tag = "latest"
if hardware == 'gpu':
tag += '-cuda'
if target == 'jupyter':
tag += '-jupyter'
cmd = [
"docker", "build",
"--build-arg", f"GIT_BRANCH={git_branch}",
"--build-arg", f"GIT_COMMIT={git_commit}",
"-f", f"docker/{dockerfile}",
"--target", target,
"-t", f"gdmeyer/dynamite:{tag}",
"."
]
print(f"Building '{tag}'...", end="")
if args.verbose:
print()
print()
build_output = ""
prev_time = 0
with Popen(cmd, cwd=build_dir, stdout=PIPE, bufsize=1, text=True) as sp:
for line in sp.stdout:
if args.verbose:
print(line, end="")
else:
build_output += line
if time() - prev_time > 1:
print('.', end="", flush=True)
prev_time = time()
print()
if sp.returncode != 0:
print("Build failed!")
if not args.verbose:
print("Output:")
print()
print(build_output)
sys.exit()
else:
completed.append(tag)
print("Removing build files...")
if not build_dir.startswith("/tmp"):
# something has gone horribly wrong
print("not removing build files, not in /tmp")
else:
shutil.rmtree(build_dir)
print("Successfully completed builds", ", ".join(completed))
if __name__ == '__main__':
main()
| Python | 0.000003 | |
d3210b3d25a2eef7c4d066878d444b9b381243eb | add roulette | modules/ruletti.py | modules/ruletti.py | # -*- coding: ISO-8859-15 -*-
from twisted.internet import reactor
from core.Uusipuu import UusipuuModule
import random
class Module(UusipuuModule):
def startup(self):
self.scheduled['unban'] = []
def cmd_ruletti(self, user, target, params):
nick = user.split('!', 1)[0]
if random.choice(range(0, 6)) < 3:
self.bot.mode(self.channel, True, 'b %s!*@*' % nick)
self.bot.kick(self.channel, nick, 'naps!')
self.log('%s - Nyt napsahti!' % nick)
d = reactor.callLater(5, self.unban, nick)
self.scheduled['unban'].append(d)
else:
self.chanmsg('%s: klik!' % nick)
self.log('%s - Klik!' % nick)
def unban(self, nick):
self.bot.mode(self.channel, False, 'b %s!*@*' % nick)
# vim: set et sw=4:
| Python | 0.000387 | |
432cbfc65ea1e6c1b9079915cce20769b88502fe | add wrapper script to run dbify module | scripts/run_dbify.py | scripts/run_dbify.py | import logging
import os
import sys
import argparse
import _mypath
from bripipetools import genlims
from bripipetools import dbify
def parse_input_args(parser=None):
parser.add_argument('-p', '--import_path',
required=True,
default=None,
help=("path to flowcell run folder - e.g., "
"/mnt/genomics/Illumina/"
"150218_D00565_0081_BC5UF5ANXX/ - "
"or workflow batch file"))
parser.add_argument('-d', '--debug',
action='store_true',
help=("Set logging level to debug"))
# Parse and collect input arguments
args = parser.parse_args()
return parser.parse_args()
def main(argv):
parser = argparse.ArgumentParser()
args = parse_input_args(parser)
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.info("importing data based on path {}"
.format(args.import_path))
dbify.ImportManager(path=args.import_path, db=genlims.db).run()
if __name__ == "__main__":
main(sys.argv[1:])
| Python | 0 | |
d5b3bce22aec3e84b59fad379859aa636f2d3f1a | Create ColorConvolution.py | ColorConvolution.py | ColorConvolution.py | import numpy
def ColorConvolution ( I, W ):
'''
Reconstructs a color image from the stain matrix "W" and the stain intensity
images generated by ColorDeconvolution.
*Inputs:
I (rgbimage) - an RGB image of type unsigned char.
W (matrix) - a 3x3 matrix containing the color vectors in columns.
For two stain images the third column is zero and will be
complemented using cross-product. Minumum two nonzero
columns required.
*Outputs:
RGB (rgbimage) - a reconstructed RGB image with values ranging from
[0, 255], suitable for display.
*Related functions:
ComplementStainMatrix, OpticalDensityFwd, OpticalDensityInv, ColorDeconvolution
'''
#transform 3D input stain image to 2D stain matrix format
m = I.shape[0]
n = I.shape[1]
I = numpy.reshape(I, (m*n,3))
#transform input stains to optical density values and convolve, tfm back to stain
I = I.astype(dtype=numpy.float32)
ODfwd = OpticalDensityFwd(I)
ODdeconv = numpy.dot(ODfwd, numpy.transpose(W))
ODinv = OpticalDensityInv(ODdeconv)
#reshape output, transform type
RGB = numpy.reshape(ODinv, (m,n,3))
RGB[RGB > 255] = 255
RGB = RGB.astype(numpy.uint8)
return(RGB)
| Python | 0 | |
67d4bc38778632b482d9c372efb41104ecabedfa | add test cases for aggregate.py | test/test_aggregate.py | test/test_aggregate.py | from MrBam.bam import get_reads
from MrBam.aggregate import aggregate_reads
from helper import make_bam
from argparse import Namespace
from pysam import AlignmentFile
def test_aggregate_reads_1():
"it should aggregate pairs"
o = Namespace(verbos=False, qual=20)
reads = (
("r1", 'A', 60, 2, 11, 4, 11, False, True),
("r1", 'A', 60, 4, 13, 2, -11, False, True),
("r2", 'C', 60, 2, 11, 4, 11, False, True),
("r2", 'C', 60, 4, 13, 2, -11, False, True)
)
unique_pairs, *_ = aggregate_reads(o, reads)
assert len(unique_pairs) == 1
def test_aggregate_reads_2():
"it should aggregate singles"
o = Namespace(verbos=False, qual=20)
reads = (
("r1", 'A', 60, 2, 11, 0, 0, False, False),
("r2", 'C', 60, 2, 11, 0, 0, False, False),
("r3", 'C', 60, 2, 11, 0, 0, True, False)
)
_, unique_single, *_ = aggregate_reads(o, reads)
assert len(unique_single) == 2
def test_aggregate_reads_3():
"it should ignore when 3+ reads share the same name"
o = Namespace(verbos=False, qual=20)
reads = (
("r1", 'A', 60, 2, 11, 2, 9, False, True),
("r1", 'C', 60, 2, 11, 2, -9, False, True),
("r1", 'C', 60, 2, 11, 2, 9, True, True),
("r2", 'C', 60, 2, 11, 0, 0, True, False)
)
unique_pairs, unique_single, _, nerror, *_ = aggregate_reads(o, reads)
assert len(unique_pairs) == 0
assert len(unique_single) == 1
assert nerror == 3
def test_aggregate_reads_4():
"it should ignore when base in overlap area inconsistent between two reads"
o = Namespace(verbos=False, qual=20)
reads = (
("r1", 'A', 60, 2, 11, 4, 11, False, True),
("r1", 'C', 60, 4, 13, 2, -11, False, True),
("r2", 'C', 60, 3, 12, 5, 11, False, True),
("r2", 'C', 60, 5, 14, 3, -11, False, True)
)
unique_pairs, unique_single, *_, ninconsis = aggregate_reads(o, reads)
assert len(unique_pairs) == 1
assert ninconsis == 2
def test_aggregate_reads_5():
"work with reads returned by MrBam.bam"
o = Namespace(verbos=False, qual=20)
make_bam(tmpdir.strpath, """
123456789_123456789_12
r1 + ...........
r1 - ......*....
r2 + .........*.
r2 - .....*.......
r3 + ...........
r3 - ....*......
r4 + ...........
r4 - ...........
123456789_123456789_12
""")
sam = AlignmentFile(tmpdir.join("test.bam").strpath)
unique_pairs, unique_single, *_ = aggregate_reads(o, get_reads(o, sam, 'ref', '12'))
assert len(unique_pairs) == 3
| Python | 0.000005 | |
8f718c536897711663051a613e7f50d564fb4cbc | Call repair as part of upgrade | src/sentry/management/commands/upgrade.py | src/sentry/management/commands/upgrade.py | """
sentry.management.commands.upgrade
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from django.core.management import call_command
from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
help = 'Performs any pending database migrations and upgrades'
option_list = BaseCommand.option_list + (
make_option('--noinput',
action='store_true',
dest='noinput',
default=False,
help='Tells Django to NOT prompt the user for input of any kind.',
),
)
def handle(self, **options):
call_command(
'syncdb',
migrate=True,
interactive=(not options['noinput']),
traceback=options['traceback'],
verbosity=options['verbosity'],
)
call_command(
'repair',
interactive=(not options['noinput']),
traceback=options['traceback'],
verbosity=options['verbosity'],
)
| """
sentry.management.commands.upgrade
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from django.core.management import call_command
from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
help = 'Performs any pending database migrations and upgrades'
option_list = BaseCommand.option_list + (
make_option('--noinput',
action='store_true',
dest='noinput',
default=False,
help='Tells Django to NOT prompt the user for input of any kind.',
),
)
def handle(self, **options):
call_command(
'syncdb',
migrate=True,
interactive=(not options['noinput']),
traceback=options['traceback'],
verbosity=options['verbosity'],
)
| Python | 0 |
6569d7e36693512fdaadfb22a5aaf6f11fe0e084 | migrate dataregistry repeater | corehq/motech/repeaters/management/commands/migrate_dataregistrycaseupdaterepeater.py | corehq/motech/repeaters/management/commands/migrate_dataregistrycaseupdaterepeater.py | from corehq.motech.repeaters.management.commands.migrate_caserepeater import Command as MigrateCaseRepeaters
from corehq.motech.repeaters.models import SQLDataRegistryCaseUpdateRepeater
class Command(MigrateCaseRepeaters):
@classmethod
def couch_doc_type(cls):
return 'DataRegistryCaseUpdateRepeater'
@classmethod
def sql_class(cls):
return SQLDataRegistryCaseUpdateRepeater
| Python | 0 | |
5aa5ac33d2b841fa1d9c707681a9d024168672c4 | Create cdbtabledef.py | cdbtabledef.py | cdbtabledef.py | """cdbtabledef.py
Developer: Noelle Todd
Last Updated: June 5, 2014
This module will create 4 tables for the client database, using the
sqlalchemy module, and the sqlite database. This module is still in
early testing stages, and as such, is subject to many changes, and
probably contains bugs.
"""
from sqlalchemy import Column, DateTime, String, Integer, ForeignKey, func
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
engine = create_engine('sqlite:///test_db.sqlite')
session = sessionmaker()
session.configure(bind=engine)
base = declarative_base()
class Household(base):
"""
This class creates a table with columns for household data.
"""
__tablename__ = 'household'
id = Column(Integer, primary_key = True)
street_address = Column(String)
apt = Column(String)
city = Column(String, default = 'Troy')
state = Column(String, default = 'NY')
zip = Column(Integer, default = '12180')
#contact_ID = Column(Integer, ForeignKey('person.id'))
date_verified = Column(DateTime)
class Person(base):
"""
This class creates a table with columns for individual's data.
"""
__tablename__ = 'person'
id = Column(Integer, primary_key = True)
first_name = Column(String)
last_name = Column(String)
DOB = Column(DateTime)
age = Column(Integer)
phone = Column(Integer)
date_joined = Column(DateTime)
HH_ID = Column(Integer, ForeignKey('household.id'))
household = relationship(Household, backref=backref('members',
uselist = True))
class Volunteer(base):
"""
This class creates a table with columns for volunteer data.
"""
__tablename__ = 'volunteer'
id = Column(Integer, primary_key = True)
first_name = Column(String)
last_name = Column(String)
phone = Column(Integer)
class Visit(base):
"""
This class creates a table with columns tracking visit history.
"""
__tablename__ = 'visit'
id = Column(Integer, primary_key = True)
I_ID = Column(Integer, ForeignKey('person.id'))
HH_ID = Column(Integer, ForeignKey('household.id'))
Vol_ID = Column(Integer, ForeignKey('volunteer.id'))
date = Column(DateTime, default = func.now())
base.metadata.create_all(engine)
| Python | 0 | |
4fd42a6fc600d0db60c074f53d299b4488f637e4 | Create pdf2txt.py | luowang/Data_Processing/pdf2txt.py | luowang/Data_Processing/pdf2txt.py | # -*- coding: utf-8 -*-
from pdfminer.pdfparser import PDFParser ### ´ÓÒ»¸öÎļþÖлñÈ¡Êý¾Ý
from pdfminer.pdfdocument import PDFDocument ### ±£´æ»ñÈ¡µÄÊý¾Ý£¬ºÍPDFParser ÊÇ»¥Ïà¹ØÁªµÄ
from pdfminer.pdfpage import PDFPage
from pdfminer.pdfpage import PDFTextExtractionNotAllowed
from pdfminer.pdfinterp import PDFResourceManager
from pdfminer.pdfinterp import PDFPageInterpreter ### ´¦ÀíÒ³ÃæÄÚÈÝ
#from pdfminer.pdfdevice import PDFDevice
#from pdfminer.layout import *
from pdfminer.layout import LAParams
from pdfminer.converter import PDFPageAggregator
from pdfminer.layout import LTTextBoxHorizontal
from pdfminer.converter import TextConverter
import os
def pdfTotxt1(pdf_path, txt_path):
'''
function: get the text according to the structure of pdf page
pdf_path: the target path which is used to save translated pdf file
txt_path: the source file path which are ready for being translated into pdf file
'''
'''
if os.path.exists(txt_path):
return
'''
#content=''
#import os
#os.chdir(r'F:\test')
fp = open(pdf_path, 'rb')
#À´´´½¨Ò»¸öpdfÎĵµ·ÖÎöÆ÷
parser = PDFParser(fp)
#´´½¨Ò»¸öPDFÎĵµ¶ÔÏó´æ´¢Îĵµ½á¹¹
document = PDFDocument(parser)
# ¼ì²éÎļþÊÇ·ñÔÊÐíÎı¾ÌáÈ¡
if not document.is_extractable:
raise PDFTextExtractionNotAllowed
else:
# ´´½¨Ò»¸öPDF×ÊÔ´¹ÜÀíÆ÷¶ÔÏóÀ´´æ´¢¹²ÉÍ×ÊÔ´
rsrcmgr=PDFResourceManager()
# É趨²ÎÊý½øÐзÖÎö
laparams=LAParams()
# ´´½¨Ò»¸öPDFÉ豸¶ÔÏó
# device=PDFDevice(rsrcmgr)
device=PDFPageAggregator(rsrcmgr,laparams=laparams)
# ´´½¨Ò»¸öPDF½âÊÍÆ÷¶ÔÏó
interpreter=PDFPageInterpreter(rsrcmgr,device)
# ´¦Àíÿһҳ
for page in PDFPage.create_pages(document):
interpreter.process_page(page)
# ½ÓÊܸÃÒ³ÃæµÄLTPage¶ÔÏó
layout=device.get_result()
for x in layout:
#print type(x)
if(isinstance(x, LTTextBoxHorizontal)):
#content += x.get_text().encode('utf-8')+'\n'
with open(txt_path,'a') as f:
f.write(x.get_text().encode('utf-8')+'\n')
f.close()
print ('create %s successully' % pdf_path)
fp.close()
return
#return content
def pdfTotxt2(pdf_path, txt_path) :
'''
if os.path.exists(txt_path):
return
'''
content =''
#Êä³öÎļþÃû£¬ÕâÀïÖ»´¦Àíµ¥Îĵµ£¬ËùÒÔÖ»ÓÃÁËargv£Û1£Ý
try:
outfile = txt_path
args =[pdf_path]
debug = 0
pagenos = set()
password = ''
maxpages = 0
rotation = 0
codec = 'utf-8' #Êä³ö±àÂë
caching = True
imagewriter = None
laparams = LAParams()
#
PDFResourceManager.debug = debug
PDFPageInterpreter.debug = debug
rsrcmgr = PDFResourceManager(caching=caching)
outfp = file(outfile,'a')
#pdfת»»
device = TextConverter(rsrcmgr, outfp, codec=codec, laparams=laparams, imagewriter=imagewriter)
for fname in args:
fp = file(fname,'rb')
interpreter = PDFPageInterpreter(rsrcmgr, device)
#´¦ÀíÎĵµ¶ÔÏóÖÐÿһҳµÄÄÚÈÝ
for page in PDFPage.get_pages(fp, pagenos, maxpages=maxpages, password=password, caching=caching, check_extractable=False) :
page.rotate = (page.rotate+rotation) % 360
interpreter.process_page(page)
fp.close()
device.close()
outfp.close()
print 'create successful:', pdf_path
except Exception:
print 'Error!!!:', pdf_path
return
if __name__=='__main__':
pdf_path='/home/luowang/financial_reports_data/attach/00001/2002/Annual/Report of the Chairman and the Managing Director.pdf'
txt_path='/home/a.txt'
pdfTotxt(pdf_path, txt_path)
| Python | 0 | |
1172811d073e544d249aeba64f2b6828ee75bd5d | test geometry | tests/test_geometry.py | tests/test_geometry.py | import numpy as np
from numpy.testing import assert_allclose
from geonet.geometry import unit_vector, angle_between
def test_unit_vector():
v1 = np.array([1, 0, 0])
assert_allclose(unit_vector(v1), v1)
v2 = np.array([1, 1, 0])
u2 = unit_vector(v2)
assert_allclose(np.linalg.norm(u2), 1.0)
def test_angle_between():
v1 = np.array([1, 1])
v2 = np.array([1, 0])
v3 = np.array([0, 1])
for v in (v1, v2, v3):
assert_allclose(angle_between(v, v), 0.0, atol=1e-6)
assert_allclose(angle_between(v1, v2), np.pi/4, atol=1e-6)
assert_allclose(angle_between(v2, v1), np.pi/4, atol=1e-6)
assert_allclose(angle_between(v2, v3), np.pi/2, atol=1e-6)
assert_allclose(angle_between(v3, v2), np.pi/2, atol=1e-6)
| Python | 0.000002 | |
f5aab57e443e5b5a7c2507f10c0c6f608d677500 | Add simple unparser test | tests/test_unparser.py | tests/test_unparser.py | # -*- coding: utf-8 -*-
from pyrql.parser import parser
from pyrql.unparser import unparser
import pytest
@pytest.mark.parametrize('func', ['eq', 'lt', 'le', 'gt', 'ge', 'ne'])
def test_cmp_functions(func):
parsed = {'name': func, 'args': ['a', 1]}
assert unparser.unparse(parsed) == '%s(a,1)' % func
parsed = {'name': func, 'args': [('a', 'b', 'c'), 1]}
assert unparser.unparse(parsed) == '%s((a,b,c),1)' % func
| Python | 0.00003 | |
0d623e0029dc7d7d6fb0bf9634904b23f2a11732 | Add files via upload | XMLAnalyze.py | XMLAnalyze.py | # Author: Andrew Sainz
#
# Purpose: XMLParser is designed to iterate through a collection of Post data collected from Stack Overflow
# forums. Data collected to analize the code tagged information to find the language of the code
# being utilized.
#
# How to use: To run from command line input "python XMLParser.py [XML file name].xml"
import xml.etree.ElementTree as ET
import sys
import re
from nltk.util import ngrams
def parseBodyForTagCode(body):
try:
# Code is a string that contains all code tag data within the body
# ex. code = ['<code>EXCEPT</code>, <code>LEFT JOIN</code>']
code = [body[m.start():m.end()] for m in re.finditer('<code>(.+?)</code>', body)]
# print(code)
except AttributeError:
code = None
return code
# Known list tag fields
knownJava = []
knownC = []
knownCSharp = []
knownPython = []
xmldoc = sys.argv[1]
tree = ET.parse(xmldoc)
root = tree.getroot()
# print (root.attrib)
myList = []
# for each row in the xml document gather body information
for row in root:
# Body holds all comment information from post
body = row.get('Body')
rowId = row.get('Id')
# Tags for comment post
tags = row.get('Tags')
# parse body to find code tags
code = parseBodyForTagCode(body)
# Encode list information about code into UTF8
codeUni = repr([x.encode('UTF8') for x in code])
# If code isn't present ignore
if codeUni == '[]':
continue
# print (codeUni)
if tags != None:
# Assign all known code to list
if ("<java>" in tags) or ("java" in body):
knownJava.append(rowId+'`'+codeUni+'`'+tags)
if ("<python>" in tags) or ("python" in body):
knownPython.append(rowId+'`'+codeUni+'`'+tags)
if ("<C>" in tags) or ("C" in body):
knownC.append(rowId+'`'+codeUni+'`'+tags)
if ("<C#>" in tags) or ("C#" in body):
knownCSharp.append(rowId+'`'+codeUni+'`'+tags)
myList.append(rowId+'`'+codeUni+'`'+tags)
else:
myList.append(rowId+'`'+codeUni)
# Ngram section
# print(myList)
############################################################################
for item in myList:
allCodeTags = [item[m.start():m.end()] for m in re.finditer('<code>(.+?)</code>', item)]
for code in allCodeTags:
cleanCode = re.sub('<code>|</code>','',code)
# print (cleanCode)
trigrams = ngrams(cleanCode.split(), 3)
for grams in trigrams:
print (grams)
# break | Python | 0 | |
5fb7d1912eda9d6381af3e0cfa7655ed2d6795f2 | Create Xclipboard.py | Xclipboard.py | Xclipboard.py | from tkinter import Tk
__all__=['copy','paste','clear']
__author__='Calvin(Martin)Adyezik adyezik@gmail.com'
__doc__="""simple Module to work with clipboard based on tkinter -Python 3"""
__name__='Xclipboard'
def copy(text):
"""copy text to clipboard """
try:
root=Tk()
root.withdraw()
root.clipboard_append(text)
except Exception as e:
print ('Error: ',e)
finally:
root.destroy()
def paste():
"""paste text from clipboad"""
try:
root=Tk()
root.withdraw()
return root.clipboard_get()
except Exception as e:
print ('Error: ',e)
finally:
root.destroy()
def clear():
"""clear clipboard"""
try:
root=Tk()
root.withdraw()
root.clipboard_clear()
except Exception as e:
print ('Error: ',e)
finally:
root.destroy()
| Python | 0.000002 | |
bbf73c8db9a2af114beb29766d0ca2e16818175b | fix 192: failure in test_disk on linux | test/_linux.py | test/_linux.py | #!/usr/bin/env python
#
# $Id$
#
import unittest
import subprocess
import sys
from test_psutil import sh
import psutil
class LinuxSpecificTestCase(unittest.TestCase):
def test_cached_phymem(self):
# test psutil.cached_phymem against "cached" column of free
# command line utility
p = subprocess.Popen("free", shell=1, stdout=subprocess.PIPE)
output = p.communicate()[0].strip()
if sys.version_info >= (3,):
output = str(output, sys.stdout.encoding)
free_cmem = int(output.split('\n')[1].split()[6])
psutil_cmem = psutil.cached_phymem() / 1024
self.assertEqual(free_cmem, psutil_cmem)
def test_phymem_buffers(self):
# test psutil.phymem_buffers against "buffers" column of free
# command line utility
p = subprocess.Popen("free", shell=1, stdout=subprocess.PIPE)
output = p.communicate()[0].strip()
if sys.version_info >= (3,):
output = str(output, sys.stdout.encoding)
free_cmem = int(output.split('\n')[1].split()[5])
psutil_cmem = psutil.phymem_buffers() / 1024
self.assertEqual(free_cmem, psutil_cmem)
def test_disks(self):
# test psutil.disk_usage() and psutil.disk_partitions()
# against "df -a"
def df(path):
out = sh('df -P -B 1 "%s"' % path).strip()
lines = out.split('\n')
lines.pop(0)
line = lines.pop(0)
dev, total, used, free = line.split()[:4]
if dev == 'none':
dev = ''
total, used, free = int(total), int(used), int(free)
return dev, total, used, free
for part in psutil.disk_partitions(all=False):
usage = psutil.disk_usage(part.mountpoint)
dev, total, used, free = df(part.mountpoint)
self.assertEqual(part.device, dev)
self.assertEqual(usage.total, total)
# 10 MB tollerance
if abs(usage.free - free) > 10 * 1024 * 1024:
self.fail("psutil=%s, df=%s" % usage.free, free)
if abs(usage.used - used) > 10 * 1024 * 1024:
self.fail("psutil=%s, df=%s" % usage.used, used)
if __name__ == '__main__':
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(LinuxSpecificTestCase))
unittest.TextTestRunner(verbosity=2).run(test_suite)
| #!/usr/bin/env python
#
# $Id$
#
import unittest
import subprocess
import sys
from test_psutil import sh
import psutil
class LinuxSpecificTestCase(unittest.TestCase):
def test_cached_phymem(self):
# test psutil.cached_phymem against "cached" column of free
# command line utility
p = subprocess.Popen("free", shell=1, stdout=subprocess.PIPE)
output = p.communicate()[0].strip()
if sys.version_info >= (3,):
output = str(output, sys.stdout.encoding)
free_cmem = int(output.split('\n')[1].split()[6])
psutil_cmem = psutil.cached_phymem() / 1024
self.assertEqual(free_cmem, psutil_cmem)
def test_phymem_buffers(self):
# test psutil.phymem_buffers against "buffers" column of free
# command line utility
p = subprocess.Popen("free", shell=1, stdout=subprocess.PIPE)
output = p.communicate()[0].strip()
if sys.version_info >= (3,):
output = str(output, sys.stdout.encoding)
free_cmem = int(output.split('\n')[1].split()[5])
psutil_cmem = psutil.phymem_buffers() / 1024
self.assertEqual(free_cmem, psutil_cmem)
def test_disks(self):
# test psutil.disk_usage() and psutil.disk_partitions()
# against "df -a"
def df(path):
out = sh('df -B 1 "%s"' % path).strip()
lines = out.split('\n')
lines.pop(0)
line = lines.pop(0)
dev, total, used, free = line.split()[:4]
if dev == 'none':
dev = ''
total, used, free = int(total), int(used), int(free)
return dev, total, used, free
for part in psutil.disk_partitions(all=False):
usage = psutil.disk_usage(part.mountpoint)
dev, total, used, free = df(part.mountpoint)
self.assertEqual(part.device, dev)
self.assertEqual(usage.total, total)
# 10 MB tollerance
if abs(usage.free - free) > 10 * 1024 * 1024:
self.fail("psutil=%s, df=%s" % usage.free, free)
if abs(usage.used - used) > 10 * 1024 * 1024:
self.fail("psutil=%s, df=%s" % usage.used, used)
if __name__ == '__main__':
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(LinuxSpecificTestCase))
unittest.TextTestRunner(verbosity=2).run(test_suite)
| Python | 0.000006 |
501454e30a93b6ec706add520a6b106940b537d9 | Create card_pick.py | FiveThirtyEightRiddler/2017-04-21/card_pick.py | FiveThirtyEightRiddler/2017-04-21/card_pick.py | import random
from collections import Counter
import matplotlib.pyplot as plt
from multiprocessing import Pool
import numpy as np
import itertools
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
def simulate_single_run(num_cards, hand_perc, stop_percentage):
hand_size = int(num_cards * hand_perc)
remaining_cards = list(range(1, num_cards + 1))
hand = random.sample(remaining_cards, hand_size)
seen_cards = []
# print(hand, max(hand))
for num_card, card in enumerate(hand, start=1):
seen_cards.append(card)
remaining_cards.remove(card)
high_card_so_far = max(seen_cards)
prob_draw_higher_than_highest = len([c for c in remaining_cards if c > high_card_so_far]) / len(remaining_cards)
prob_any_remaining_higher = 1 - ((1 - prob_draw_higher_than_highest) ** (hand_size - num_card))
# print(seen_cards, high_card_so_far, prob_draw_higher_than_highest, prob_any_remaining_higher)
if prob_any_remaining_higher <= stop_percentage:
return card == max(hand)
def simulate_single_percentage(num_cards, hand_perc, stop_percentage, trials):
return Counter(simulate_single_run(num_cards, hand_perc, stop_percentage) for _ in range(trials))[True] / trials
def trail_multiple_percentages(num_cards, hand_perc, stop_percentages, trials):
result = 0
for pct in stop_percentages:
result = max(result, simulate_single_percentage(num_cards, hand_perc, pct, trials))
print(num_cards, hand_perc, result)
return result
if __name__ == '__main__':
#NUM_CARDS = np.logspace(2, 5, num=4, dtype=int)
NUM_CARDS = np.linspace(100, 1000, num=4, dtype=int)
HAND_PERC = np.linspace(.2, .7, num=6, dtype=float)
PERCENTAGES = np.linspace(0, 1, num=10, dtype=float)
SAMPLE_SIZE = 1000
with Pool(4) as p:
results = p.starmap(trail_multiple_percentages,
[(num_cards, hand_size, PERCENTAGES, SAMPLE_SIZE) for num_cards, hand_size in
itertools.product(NUM_CARDS, HAND_PERC)])
results = np.array(results).reshape((len(NUM_CARDS), len(HAND_PERC))).T
NUM_CARDS, HAND_PERC = np.meshgrid(NUM_CARDS, HAND_PERC)
fig = plt.figure()
ax = fig.gca(projection='3d')
surf = ax.plot_surface(NUM_CARDS, HAND_PERC, results, linewidth=0, antialiased=False, cmap=cm.coolwarm)
plt.show()
| Python | 0.000001 | |
49f5b1c0111426a3366d59ca702ce5ec307fc3a8 | add rmodel | rmodel/__init__.py | rmodel/__init__.py | from jinja2 import Environment, FileSystemLoader
import os
from subprocess import Popen, PIPE
def hi():
print('hi')
PATH = os.path.dirname(os.path.abspath(__file__))
TEMPLATE_ENVIRONMENT = Environment(
autoescape=False,
loader=FileSystemLoader(os.path.join(PATH, '../templates')),
trim_blocks=False)
def diff_month(d1, d2):
return (d1.year - d2.year)*12 + d1.month - d2.month
def forcing_present(path, BUSER, BEXP, date, KSA ):
if KSA > 0:
ff = '{}/a{}{}a{}{}.tar'.format(path, BUSER, BEXP, str(date.year), str(date.month).zfill(2))
print ff
if os.path.isfile(ff):
print('Forcing tar file exist')
else:
raise NameError('Forcing tar file do not exist')
def restart_present(path, USER, EXP, date, KSA):
if KSA > 0:
ffile = '{}/xf/e{}{}f{}{}0100'.format(path, USER, EXP, str(date.year), str(date.month).zfill(2))
gfile = '{}/xf/e{}{}g{}{}0100'.format(path, USER, EXP, str(date.year), str(date.month).zfill(2))
print(ffile)
print(gfile)
isf = os.path.isfile(ffile)
iss = os.path.isfile(gfile)
if isf and iss:
print('Restart files exist')
else:
raise NameError('Restart files do not exist')
def preprocessing(PFADFRC, DIR, BUSER, BEXP, date, ndate):
ofile = open('preprocessing.sh', 'w')
out_init = TEMPLATE_ENVIRONMENT.get_template('preprocessing_template').render( year=str(date.year),\
mon=str(date.month).zfill(2),\
nyear=str(ndate.year),\
nmon =str(ndate.month).zfill(2),\
PFADFRC=PFADFRC,\
DIR=DIR,\
BUSER=BUSER,\
BEXP=BEXP )
ofile.write(out_init)
ofile.close()
os.system('chmod +x ./preprocessing.sh')
print('Begin with preprocessing')
process = Popen('./preprocessing.sh', shell=True,
stdout=PIPE, stderr=PIPE)
(out,err) = process.communicate()
print(out)
print(err)
print('Preprocessing is over')
def generate_INPUT(KSA, KSE):
ofile = open('INPUT', 'w')
out_init = TEMPLATE_ENVIRONMENT.get_template('INPUT_template').render(KSA=KSA, KSE=KSE)
ofile.write(out_init)
ofile.close()
print("INPUT file is generated")
def postprocessing(MYWRKSHR, PFADFRC, PFADRES, DIR, USER, EXP, date, jobid):
ofile = open('postprocessing.sh', 'w')
out_init = TEMPLATE_ENVIRONMENT.get_template('postprocessing_template').render( year=str(date.year),\
mon=str(date.month).zfill(2),\
PFADFRC=PFADFRC,\
DIR=DIR,\
USER=USER,\
EXP=EXP,\
MYWRKSHR=MYWRKSHR,\
PFADRES=PFADRES,\
jobid=jobid )
ofile.write(out_init)
ofile.close()
os.system('chmod +x ./postprocessing.sh')
print('postprocessing start')
process = Popen('./postprocessing.sh', shell=True,
stdout=PIPE, stderr=PIPE)
(out,err) = process.communicate()
print(out)
print(err)
print('postprocessing over')
def postprocessing_pure(MYWRKSHR, PFADFRC, PFADRES, DIR, USER, EXP, date, jobid):
ofile = open('postprocessing_pure.sh', 'w')
out_init = TEMPLATE_ENVIRONMENT.get_template('postprocessing_pure_template').render( year=str(date.year),\
mon=str(date.month).zfill(2),\
PFADFRC=PFADFRC,\
DIR=DIR,\
USER=USER,\
EXP=EXP,\
MYWRKSHR=MYWRKSHR,\
PFADRES=PFADRES,\
jobid=jobid )
ofile.write(out_init)
ofile.close()
os.system('chmod +x ./postprocessing_pure.sh')
print('postprocessing start')
process = Popen('./postprocessing_pure.sh', shell=True,
stdout=PIPE, stderr=PIPE)
(out,err) = process.communicate()
print(out)
print(err)
print('postprocessing over')
| Python | 0.000001 | |
4d44d58c91e6a4fdf9ab16acac6320dd5d1f6bb9 | Add senlin-manage service list/clean for engine status | senlin/cmd/manage.py | senlin/cmd/manage.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
CLI interface for senlin management.
"""
import sys
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import timeutils
from senlin.common.i18n import _
from senlin.db import api
from senlin import version
CONF = cfg.CONF
def do_db_version():
'''Print database's current migration level.'''
print(api.db_version(api.get_engine()))
def do_db_sync():
'''Place a database under migration control and upgrade.
DB is created first if necessary.
'''
api.db_sync(api.get_engine(), CONF.command.version)
class ServiceManageCommand(object):
def _format_service(self, service):
if service is None:
return
status = 'down'
if ((timeutils.utcnow() - service.updated_at).total_seconds() <=
CONF.periodic_interval):
status = 'up'
result = {
'service_id': service.id,
'binary': service.binary,
'host': service.host,
'topic': service.topic,
'created_at': service.created_at,
'updated_at': service.updated_at,
'status': status
}
return result
def service_list(self):
services = [self._format_service(service)
for service in api.service_get_all()]
print_format = "%-36s %-24s %-16s %-16s %-10s %-24s %-24s"
print(print_format % (_('Service ID'),
_('Host'),
_('Binary'),
_('Topic'),
_('Status'),
_('Created At'),
_('Updated At')))
for svc in services:
print(print_format % (svc['service_id'],
svc['host'],
svc['binary'],
svc['topic'],
svc['status'],
svc['created_at'],
svc['updated_at']))
def service_clean(self):
for service in api.service_get_all():
svc = self._format_service(service)
if svc['status'] == 'down':
print(_('Dead service %s is removed.') % svc['service_id'])
api.service_delete(svc['service_id'])
@staticmethod
def add_service_parsers(subparsers):
service_parser = subparsers.add_parser('service')
service_parser.set_defaults(command_object=ServiceManageCommand)
service_subparsers = service_parser.add_subparsers(dest='action')
list_parser = service_subparsers.add_parser('list')
list_parser.set_defaults(func=ServiceManageCommand().service_list)
remove_parser = service_subparsers.add_parser('clean')
remove_parser.set_defaults(func=ServiceManageCommand().service_clean)
def add_command_parsers(subparsers):
parser = subparsers.add_parser('db_version')
parser.set_defaults(func=do_db_version)
parser = subparsers.add_parser('db_sync')
parser.set_defaults(func=do_db_sync)
ServiceManageCommand.add_service_parsers(subparsers)
parser.add_argument('version', nargs='?')
parser.add_argument('current_version', nargs='?')
command_opt = cfg.SubCommandOpt('command',
title='Commands',
help='Show available commands.',
handler=add_command_parsers)
def main():
logging.register_options(CONF)
logging.setup(CONF, 'senlin-manage')
CONF.register_cli_opt(command_opt)
try:
default_config_files = cfg.find_config_files('senlin', 'senlin-engine')
CONF(sys.argv[1:], project='senlin', prog='senlin-manage',
version=version.version_info.version_string(),
default_config_files=default_config_files)
except RuntimeError as e:
sys.exit("ERROR: %s" % e)
try:
CONF.command.func()
except Exception as e:
sys.exit("ERROR: %s" % e)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
CLI interface for senlin management.
"""
import sys
from oslo_config import cfg
from oslo_log import log as logging
from senlin.db import api
from senlin import version
CONF = cfg.CONF
def do_db_version():
'''Print database's current migration level.'''
print(api.db_version(api.get_engine()))
def do_db_sync():
'''Place a database under migration control and upgrade.
DB is created first if necessary.
'''
api.db_sync(api.get_engine(), CONF.command.version)
def add_command_parsers(subparsers):
parser = subparsers.add_parser('db_version')
parser.set_defaults(func=do_db_version)
parser = subparsers.add_parser('db_sync')
parser.set_defaults(func=do_db_sync)
parser.add_argument('version', nargs='?')
parser.add_argument('current_version', nargs='?')
command_opt = cfg.SubCommandOpt('command',
title='Commands',
help='Show available commands.',
handler=add_command_parsers)
def main():
logging.register_options(CONF)
logging.setup(CONF, 'senlin-manage')
CONF.register_cli_opt(command_opt)
try:
default_config_files = cfg.find_config_files('senlin', 'senlin-engine')
CONF(sys.argv[1:], project='senlin', prog='senlin-manage',
version=version.version_info.version_string(),
default_config_files=default_config_files)
except RuntimeError as e:
sys.exit("ERROR: %s" % e)
try:
CONF.command.func()
except Exception as e:
sys.exit("ERROR: %s" % e)
| Python | 0.000001 |
1730cecbc05928e93e2dec826a67139bf6765cc6 | Add a src-side script to display a summary of host info. | testing/scripts/host_info.py | testing/scripts/host_info.py | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import multiprocessing
import os
import platform
import subprocess
import sys
import common
def is_linux():
return sys.platform.startswith('linux')
def get_free_disk_space(failures):
"""Returns the amount of free space on the current disk, in GiB.
Returns:
The amount of free space on the current disk, measured in GiB.
"""
if os.name == 'posix':
# Stat the current path for info on the current disk.
stat_result = os.statvfs('.')
# Multiply block size by number of free blocks, express in GiB.
return stat_result.f_frsize * stat_result.f_bavail / (
1024.0 / 1024.0 / 1024.0)
failures.append('get_free_disk_space: OS %s not supported.' % os.name)
return 0
def get_num_cpus(failures):
"""Returns the number of logical CPUs on this machine.
Returns:
The number of logical CPUs on this machine, or 'unknown' if indeterminate.
"""
try:
return multiprocessing.cpu_count()
except NotImplementedError:
failures.append('get_num_cpus')
return 'unknown'
def get_device_info(args, failures):
"""Parses the device info for each attached device, and returns a summary
of the device info and any mismatches.
Returns:
A dict indicating the result.
"""
if not is_linux():
return {}
with common.temporary_file() as tempfile_path:
rc = common.run_command([
sys.executable,
os.path.join(args.paths['checkout'],
'build',
'android',
'buildbot',
'bb_device_status_check.py'),
'--json-output', tempfile_path])
if rc:
failures.append('bb_device_status_check')
return {}
with open(tempfile_path, 'r') as src:
device_info = json.load(src)
results = {}
results['devices'] = sorted(v['serial'] for v in device_info)
details = [v['build_detail'] for v in device_info]
def unique_build_details(index):
return sorted(list(set([v.split(':')[index] for v in details])))
parsed_details = {
'device_names': unique_build_details(0),
'build_versions': unique_build_details(1),
'build_types': unique_build_details(2),
}
for k, v in parsed_details.iteritems():
if len(v) == 1:
results[k] = v[0]
else:
results[k] = 'MISMATCH'
results['%s_list' % k] = v
failures.append(k)
return results
def main_run(args):
failures = []
host_info = {}
host_info['os_system'] = platform.system()
host_info['os_release'] = platform.release()
host_info['processor'] = platform.processor()
host_info['num_cpus'] = get_num_cpus(failures)
host_info['free_disk_space'] = get_free_disk_space(failures)
host_info['python_version'] = platform.python_version()
host_info['python_path'] = sys.executable
host_info['devices'] = get_device_info(args, failures)
json.dump({
'valid': True,
'failures': failures,
'_host_info': host_info,
}, args.output)
return len(failures) != 0
def main_compile_targets(args):
json.dump([], args.output)
if __name__ == '__main__':
funcs = {
'run': main_run,
'compile_targets': main_compile_targets,
}
sys.exit(common.run_script(sys.argv[1:], funcs))
| Python | 0.999999 | |
ab31fea8a0d30bc4b68813ce635880d4682cfc2f | Write base classes for collection profiles. | cohydra/profile.py | cohydra/profile.py | import abc
import logging
import six
class Profile(six.with_metaclass(abc.ABCMeta)):
"""Base class for all collection profiles.
Attributes:
top_dir: Where this profile's files will be stored.
parent: The profile from which this profile is derived, or
None for a root profile.
children: List of child profiles.
"""
def __init__(self, top_dir, parent):
"""Create a profile.
"""
self.top_dir = top_dir
self.parent = parent
self.children = []
if self.parent is not None:
self.parent.children.append(self)
def __str__(self):
return '%s.%s(top_dir=%r, parent=%r)' % (
self.__class__.__module__,
self.__class__.__name__,
self.top_dir,
None if self.parent is None else self.parent.top_dir,
)
def generate_all(self, depth=0):
"""Generate this profile and all of its children.
"""
logging.info('%sGenerating %s', ' ' * depth, self)
self.generate()
# TODO: parallelize?
for child in self.children:
child.generate_all(depth + 1)
def print_all(self, depth=0):
"""List all profiles, for debugging.
"""
print(' ' * depth + str(self))
for child in self.children:
child.print_all(depth + 1)
def log(self, level, msg, *args, **kwargs):
"""Log, with additional info about the profile.
"""
logging.log(
level,
'%s: %s' % (self, msg),
*args,
**kwargs)
@abc.abstractmethod
def generate(self):
"""Generate this profile from its parent.
This method assumes that the parent is up-to-date.
"""
pass
class RootProfile(Profile):
"""Root profile.
This is a profile that consists of a directory with original files,
instead of a profile derived from another profile's files.
"""
def __init__(self, top_dir):
Profile.__init__(self, top_dir, None)
def generate(self):
pass
| Python | 0 | |
8fec4b6eef7f1f4ef5840504f6abcdf0d2f9f80d | Adding the concept of a target platform. | tools/cr/cr/base/platform.py | tools/cr/cr/base/platform.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module for the target platform support."""
from importlib import import_module
import os
import cr
DEFAULT = cr.Config.From(
DEPOT_TOOLS=os.path.join('{GOOGLE_CODE}', 'depot_tools'),
)
class Platform(cr.Plugin, cr.Plugin.Type):
"""Base class for implementing cr platforms.
A platform is the target operating system being compiled for (linux android).
"""
_platform_module = import_module('platform', None)
SELECTOR = 'CR_PLATFORM'
@classmethod
def AddArguments(cls, parser):
parser.add_argument(
'--platform', dest=cls.SELECTOR,
choices=cls.Choices(),
default=None,
help='Sets the target platform to use. Overrides ' + cls.SELECTOR
)
@classmethod
def System(cls):
return cls._platform_module.system()
def __init__(self):
super(Platform, self).__init__()
def Activate(self, context):
super(Platform, self).Activate(context)
if _PathFixup not in context.fixup_hooks:
context.fixup_hooks.append(_PathFixup)
@cr.Plugin.activemethod
def Prepare(self, context):
pass
@property
def paths(self):
return []
def _PathFixup(context, key, value):
"""A context fixup that does platform specific modifications to the PATH."""
if key == 'PATH':
paths = []
for entry in Platform.GetActivePlugin(context).paths:
entry = context.Substitute(entry)
if entry not in paths:
paths.append(entry)
for entry in value.split(os.path.pathsep):
if entry.endswith(os.path.sep + 'goma'):
pass
elif entry not in paths:
paths.append(entry)
value = os.path.pathsep.join(paths)
return value
| Python | 0.999998 | |
c2dac9161705e6edbf5b059fd8a442c2754577ff | Add Lambda job | refusereminder.py | refusereminder.py | #!/usr/bin/env python
from __future__ import print_function
import boto3
import json
import os
from mkerefuse.refuse import RefuseQuery
from mkerefuse.refuse import RefuseQueryAddress
DEFAULT_SNS_TOPIC = 'mke-trash-pickup'
"""Default topic to notify for pickup changes"""
DEFAULT_S3_BUCKET = 'mke-trash-pickup'
"""Default S3 bucket name for storing persistent data"""
DEFAULT_S3_PREFIX = ''
"""Default S3 key prefix for persistent data"""
DEFAULT_S3_KEY = 'mke-trash-pickup.json'
"""Default S3 key for persistent data"""
def get_sns_topic_arn(topic_name, aws_region=None, aws_account_num=None):
if aws_region is None:
aws_region = boto3.session.Session().region_name
if aws_account_num is None:
aws_account_num = boto3.client('sts').get_caller_identity()['Account']
return ":".join([
"arn",
"aws",
"sns",
aws_region,
aws_account_num,
topic_name])
def notify_pickup_change(pickup, sns_topic):
"""
Produces a notification for a garbage pickup change
"""
print("Notifying SNS: {}".format(sns_topic.arn))
notify_msg = """
Garbage: {garbage}
Recycle (After): {recycle_after}
Recycle (Before): {recycle_before}""".format(
garbage=pickup.next_pickup_garbage,
recycle_after=pickup.next_pickup_recycle_after,
recycle_before=pickup.next_pickup_recycle_before).strip()
print("\n{}\n".format(notify_msg))
return
sns_topic.publish(
Subject='Garbage Day Update',
Message=notify_msg)
def lambda_handler(event, context):
"""
Detects garbage day changes & updates them
"""
# Compose the address
address = RefuseQueryAddress(
house_number=event['house_number'],
direction=event['direction'],
street_name=event['street_name'],
street_type=event['street_type'])
print("Querying address: {num} {d} {name} {t}".format(
num=address.house_number,
d=address.direction,
name=address.street_name,
t=address.street_type))
# Query for the collection schedule
pickup = RefuseQuery.Execute(address)
# Create an S3 resource for fetching/storing persistent data
s3 = boto3.resource('s3')
# Attempt reading the last pickup information
s3_bucket = event.get('s3_bucket', DEFAULT_S3_BUCKET)
s3_key = os.path.join(
event.get('s3_prefix', DEFAULT_S3_PREFIX),
event.get('s3_key', DEFAULT_S3_KEY)).lstrip('/')
s3_object = s3.Object(s3_bucket, s3_key)
last_data = json.loads('{}')
try:
print("Loading previous pickup data from s3://{b}/{k}".format(
b=s3_object.bucket_name,
k=s3_object.key))
last_data = json.loads(s3_object.get()['Body'].read().decode('utf-8'))
except Exception as e:
# Failed to load old data for some reason
# Ignore it and assume a change in dates
print("Failed to load previous pickup data")
print(e)
# Overwrite previous pickup data with the new data
s3_object.put(Body=json.dumps(pickup.to_dict()))
# If the information differs, notify of the changes
if last_data != pickup.to_dict():
print("Pickup change detected")
sns = boto3.resource('sns')
notify_pickup_change(
pickup,
sns_topic=sns.Topic(
get_sns_topic_arn(event.get('sns_topic', DEFAULT_SNS_TOPIC))))
| Python | 0.000046 | |
8cac10350cdbc33d243a561ba06c25f5d01e9a04 | fix for lists | Scripts/SearchIncidents_5.0/SearchIncidents.py | Scripts/SearchIncidents_5.0/SearchIncidents.py | from typing import Dict, List
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
special = ['n', 't', '\\', '"', '\'', '7', 'r']
def check_if_found_incident(res: List):
if res and isinstance(res, list) and isinstance(res[0].get('Contents'), dict):
if 'data' not in res[0]['Contents']:
raise DemistoException(res[0].get('Contents'))
elif res[0]['Contents']['data'] is None:
raise DemistoException("Incidents not found.")
else:
raise DemistoException(f'failed to get incidents from demisto.\nGot: {res}')
def is_valid_args(args: Dict):
array_args: List[str] = ['id', 'name', 'status', 'notstatus', 'reason', 'level', 'owner', 'type', 'query']
error_msg: List[str] = []
for _key, value in args.items():
if _key in array_args:
value = ','.join(value)
i = 0
while i < len(value):
if value[i] == '\\':
if value[i + 1] not in special:
error_msg.append(f'Error while parsing the argument: "{_key}" '
f'\nSucceeded parsing untill:\n- "{value[0:i]}"')
else:
i += 1
i += 1
if len(error_msg) != 0:
raise DemistoException('\n'.join(error_msg))
return True
def search_incidents(args: Dict):
if is_valid_args(args):
res: List = demisto.executeCommand('getIncidents', args)
check_if_found_incident(res)
data: Dict = res[0]['Contents']['data']
context_entry: Dict = {'foundIncidents': data}
headers: List[str] = ['id', 'name', 'severity', 'status', 'owner', 'created', 'closed']
md: str = tableToMarkdown(name="Incidents found", t=data, headers=headers)
return_outputs(md, context_entry, res)
def main():
args: Dict = demisto.args()
try:
search_incidents(args)
except DemistoException as error:
return_error(str(error), error)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| from typing import Dict, List
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
special = ['n', 't', '\\', '"', '\'', '7', 'r']
def check_if_found_incident(res: List):
if res and isinstance(res, list) and isinstance(res[0].get('Contents'), dict):
if 'data' not in res[0]['Contents']:
raise DemistoException(res[0].get('Contents'))
elif res[0]['Contents']['data'] is None:
raise DemistoException("Incidents not found.")
else:
raise DemistoException(f'failed to get incidents from demisto.\nGot: {res}')
def is_valid_args(args: Dict):
error_msg: List[str] = []
for _key, value in args.items():
i = 0
while i < len(value):
if value[i] == '\\':
if value[i + 1] not in special:
error_msg.append(f'Error while parsing the argument: "{_key}" '
f'\nSucceeded parsing untill:\n- "{value[0:i]}"')
else:
i += 1
i += 1
if len(error_msg) != 0:
raise DemistoException('\n'.join(error_msg))
return True
def search_incidents(args: Dict):
if is_valid_args(args):
res: List = demisto.executeCommand('getIncidents', args)
check_if_found_incident(res)
data: Dict = res[0]['Contents']['data']
context_entry: Dict = {'foundIncidents': data}
headers: List[str] = ['id', 'name', 'severity', 'status', 'owner', 'created', 'closed']
md: str = tableToMarkdown(name="Incidents found", t=data, headers=headers)
return_outputs(md, context_entry, res)
def main():
args: Dict = demisto.args()
try:
search_incidents(args)
except DemistoException as error:
return_error(str(error), error)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| Python | 0.000001 |
37bd6459bff3f9b079897b1392c04681c65fa24e | Fix #8 chat.reply doesn't work in groups | aiotg/chat.py | aiotg/chat.py | import json
import logging
from functools import partialmethod
logger = logging.getLogger("aiotg")
class Chat:
"""
Wrapper for telegram chats, passed to most callbacks
"""
def send_text(self, text, **options):
"""
Send a text message to the chat, for available options see
https://core.telegram.org/bots/api#sendmessage
"""
return self.bot.send_message(self.id, text, **options)
def reply(self, text, markup={}, parse_mode=None):
return self.send_text(
text,
reply_to_message_id=self.message["message_id"],
disable_web_page_preview='true',
reply_markup=json.dumps(markup),
parse_mode=parse_mode
)
def _send_to_chat(self, method, **options):
return self.bot.api_call(
method,
chat_id=str(self.id),
**options
)
send_audio = partialmethod(_send_to_chat, "sendAudio")
send_photo = partialmethod(_send_to_chat, "sendPhoto")
send_video = partialmethod(_send_to_chat, "sendVideo")
send_document = partialmethod(_send_to_chat, "sendDocument")
send_sticker = partialmethod(_send_to_chat, "sendSticker")
send_voice = partialmethod(_send_to_chat, "sendVoice")
send_locaton = partialmethod(_send_to_chat, "sendLocation")
send_chat_action = partialmethod(_send_to_chat, "sendChatAction")
def forward_message(self, from_chat_id, message_id):
return self.bot.api_call(
"forwardMessage",
chat_id=self.id,
from_chat_id=from_chat_id,
message_id=message_id
)
def is_group(self):
return self.type == "group"
def __init__(self, bot, chat_id, chat_type="private", src_message=None):
self.bot = bot
self.message = src_message
sender = src_message['from'] if src_message else {"first_name": "N/A"}
self.sender = Sender(sender)
self.id = chat_id
self.type = chat_type
@staticmethod
def from_message(bot, message):
chat = message["chat"]
return Chat(bot, chat["id"], chat["type"], message)
class TgChat(Chat):
def __init__(self, *args, **kwargs):
logger.warning("TgChat is depricated, use Chat instead")
super().__init__(*args, **kwargs)
class Sender(dict):
"""A small wrapper for sender info, mostly used for logging"""
def __repr__(self):
uname = " (%s)" % self["username"] if "username" in self else ""
return self['first_name'] + uname
class TgSender(Sender):
def __init__(self, *args, **kwargs):
logger.warning("TgSender is depricated, use Sender instead")
super().__init__(*args, **kwargs)
| import json
import logging
from functools import partialmethod
logger = logging.getLogger("aiotg")
class Chat:
"""
Wrapper for telegram chats, passed to most callbacks
"""
def send_text(self, text, **options):
"""
Send a text message to the chat, for available options see
https://core.telegram.org/bots/api#sendmessage
"""
return self.bot.send_message(self.id, text, **options)
def reply(self, text, markup=None, parse_mode=None):
return self.send_text(
text,
reply_to_message_id=self.message["message_id"],
disable_web_page_preview='true',
reply_markup=json.dumps(markup),
parse_mode=parse_mode
)
def _send_to_chat(self, method, **options):
return self.bot.api_call(
method,
chat_id=str(self.id),
**options
)
send_audio = partialmethod(_send_to_chat, "sendAudio")
send_photo = partialmethod(_send_to_chat, "sendPhoto")
send_video = partialmethod(_send_to_chat, "sendVideo")
send_document = partialmethod(_send_to_chat, "sendDocument")
send_sticker = partialmethod(_send_to_chat, "sendSticker")
send_voice = partialmethod(_send_to_chat, "sendVoice")
send_locaton = partialmethod(_send_to_chat, "sendLocation")
send_chat_action = partialmethod(_send_to_chat, "sendChatAction")
def forward_message(self, from_chat_id, message_id):
return self.bot.api_call(
"forwardMessage",
chat_id=self.id,
from_chat_id=from_chat_id,
message_id=message_id
)
def is_group(self):
return self.type == "group"
def __init__(self, bot, chat_id, chat_type="private", src_message=None):
self.bot = bot
self.message = src_message
sender = src_message['from'] if src_message else {"first_name": "N/A"}
self.sender = Sender(sender)
self.id = chat_id
self.type = chat_type
@staticmethod
def from_message(bot, message):
chat = message["chat"]
return Chat(bot, chat["id"], chat["type"], message)
class TgChat(Chat):
def __init__(self, *args, **kwargs):
logger.warning("TgChat is depricated, use Chat instead")
super().__init__(*args, **kwargs)
class Sender(dict):
"""A small wrapper for sender info, mostly used for logging"""
def __repr__(self):
uname = " (%s)" % self["username"] if "username" in self else ""
return self['first_name'] + uname
class TgSender(Sender):
def __init__(self, *args, **kwargs):
logger.warning("TgSender is depricated, use Sender instead")
super().__init__(*args, **kwargs)
| Python | 0 |
d88a473030d52529ad0abc626776bbb5d4886067 | Add tests of raw FIPS U2F commands | test/on_yubikey/test_fips_u2f_commands.py | test/on_yubikey/test_fips_u2f_commands.py | import unittest
from fido2.hid import (CTAPHID)
from ykman.util import (TRANSPORT)
from ykman.driver_fido import (FIPS_U2F_CMD)
from .util import (DestructiveYubikeyTestCase, is_fips, open_device)
HID_CMD = 0x03
@unittest.skipIf(not is_fips(), 'FIPS YubiKey required.')
class TestFipsU2fCommands(DestructiveYubikeyTestCase):
def test_echo_command(self):
dev = open_device(transports=TRANSPORT.FIDO)
res = dev.driver._dev.call(
CTAPHID.MSG,
[*[0, FIPS_U2F_CMD.ECHO], 0, 0, *[0, 0, 6], *b'012345'])
self.assertEqual(res, b'012345\x90\x00')
def test_pin_commands(self):
# Assumes no PIN is set at beginning of test
dev = open_device(transports=TRANSPORT.FIDO)
verify_res1 = dev.driver._dev.call(
CTAPHID.MSG,
[*[0, FIPS_U2F_CMD.VERIFY_PIN], 0, 0, *[0, 0, 6], *b'012345'])
res = dev.driver._dev.call(
CTAPHID.MSG,
[*[0, FIPS_U2F_CMD.SET_PIN], 0, 0, *[0, 0, 7], *[6, *b'012345']])
verify_res2 = dev.driver._dev.call(
CTAPHID.MSG,
[*[0, FIPS_U2F_CMD.VERIFY_PIN], 0, 0, *[0, 0, 6], *b'543210'])
verify_res3 = dev.driver._dev.call(
CTAPHID.MSG,
[*[0, FIPS_U2F_CMD.VERIFY_PIN], 0, 0, *[0, 0, 6], *b'012345'])
self.assertEqual(verify_res1, b'\x69\x86') # PIN not set
self.assertEqual(res, b'\x90\x00') # Success
self.assertEqual(verify_res2, b'\x63\xc0') # Incorrect PIN
self.assertEqual(verify_res3, b'\x90\x00') # Success
def test_reset_command(self):
dev = open_device(transports=TRANSPORT.FIDO)
res = dev.driver._dev.call(
CTAPHID.MSG, [*[0, FIPS_U2F_CMD.RESET], 0, 0])
# 0x6985: Touch required
# 0x6986: Power cycle required
# 0x9000: Success
self.assertIn(res, [b'\x69\x85', b'\x69\x86', b'\x90\x00'])
def test_verify_fips_mode_command(self):
dev = open_device(transports=TRANSPORT.FIDO)
res = dev.driver._dev.call(
CTAPHID.MSG, [*[0, FIPS_U2F_CMD.VERIFY_FIPS_MODE], 0, 0])
# 0x6a81: Function not supported (PIN not set - not FIPS mode)
# 0x9000: Success (PIN set - FIPS mode)
self.assertIn(res, [b'\x6a\x81', b'\x90\x00'])
| Python | 0.000001 | |
9ca93016ffa0f3c876f19fbb9bd5edaee3f86303 | Add AntiGraph as a subclass example. | examples/subclass/antigraph.py | examples/subclass/antigraph.py | """ Complement graph class for small footprint when working on dense graphs.
This class allows you to add the edges that *do not exist* in the dense
graph. However, when applying algorithms to this complement graph data
structure, it behaves as if it were the dense version. So it can be used
directly in several NetworkX algorithms.
This subclass has only been tested for k-core, connected_components,
and biconnected_components algorithms but might also work for other
algorithms.
"""
# Copyright (C) 2015 by
# Jordi Torrents <jtorrents@milnou.net>
# All rights reserved.
# BSD license.
import networkx as nx
from networkx.exception import NetworkXError
__author__ = """\n""".join(['Jordi Torrents <jtorrents@milnou.net>'])
__all__ = ['AntiGraph']
class AntiGraph(nx.Graph):
"""
Class for complement graphs.
The main goal is to be able to work with big and dense graphs with
a low memory foodprint.
In this class you add the edges that *do not exist* in the dense graph,
the report methods of the class return the neighbors, the edges and
the degree as if it was the dense graph. Thus it's possible to use
an instance of this class with some of NetworkX functions.
"""
all_edge_dict = {'weight': 1}
def single_edge_dict(self):
return self.all_edge_dict
edge_attr_dict_factory = single_edge_dict
def __getitem__(self, n):
"""Return a dict of neighbors of node n in the dense graph.
Parameters
----------
n : node
A node in the graph.
Returns
-------
adj_dict : dictionary
The adjacency dictionary for nodes connected to n.
"""
return dict((node, self.single_edge_dict()) for node in
set(self.adj) - set(self.adj[n]) - set([n]))
def neighbors(self, n):
"""Return a list of the nodes connected to the node n in
the dense graph.
Parameters
----------
n : node
A node in the graph
Returns
-------
nlist : list
A list of nodes that are adjacent to n.
Raises
------
NetworkXError
If the node n is not in the graph.
"""
try:
return list(set(self.adj) - set(self.adj[n]) - set([n]))
except KeyError:
raise NetworkXError("The node %s is not in the graph."%(n,))
def neighbors_iter(self, n):
"""Return an iterator over all neighbors of node n in the
dense graph.
"""
try:
return iter(set(self.adj) - set(self.adj[n]) - set([n]))
except KeyError:
raise NetworkXError("The node %s is not in the graph."%(n,))
def degree(self, nbunch=None, weight=None):
"""Return the degree of a node or nodes in the dense graph.
"""
if nbunch in self: # return a single node
return next(self.degree_iter(nbunch,weight))[1]
else: # return a dict
return dict(self.degree_iter(nbunch,weight))
def degree_iter(self, nbunch=None, weight=None):
"""Return an iterator for (node, degree) in the dense graph.
The node degree is the number of edges adjacent to the node.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
Returns
-------
nd_iter : an iterator
The iterator returns two-tuples of (node, degree).
See Also
--------
degree
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> list(G.degree_iter(0)) # node 0 with degree 1
[(0, 1)]
>>> list(G.degree_iter([0,1]))
[(0, 1), (1, 2)]
"""
if nbunch is None:
nodes_nbrs = ((n, {v: self.single_edge_dict() for v in
set(self.adj) - set(self.adj[n]) - set([n])})
for n in self.nodes_iter())
else:
nodes_nbrs= ((n, {v: self.single_edge_dict() for v in
set(self.nodes()) - set(self.adj[n]) - set([n])})
for n in self.nbunch_iter(nbunch))
if weight is None:
for n,nbrs in nodes_nbrs:
yield (n,len(nbrs)+(n in nbrs)) # return tuple (n,degree)
else:
# AntiGraph is a ThinGraph so all edges have weight 1
for n,nbrs in nodes_nbrs:
yield (n, sum((nbrs[nbr].get(weight, 1) for nbr in nbrs)) +
(n in nbrs and nbrs[n].get(weight, 1)))
def adjacency_iter(self):
"""Return an iterator of (node, adjacency set) tuples for all nodes
in the dense graph.
This is the fastest way to look at every edge.
For directed graphs, only outgoing adjacencies are included.
Returns
-------
adj_iter : iterator
An iterator of (node, adjacency set) for all nodes in
the graph.
"""
for n in self.adj:
yield (n, set(self.adj) - set(self.adj[n]) - set([n]))
if __name__ == '__main__':
# Build several pairs of graphs, a regular graph
# and the AntiGraph of it's complement, which behaves
# as if it were the original graph.
Gnp = nx.gnp_random_graph(20,0.8)
Anp = AntiGraph(nx.complement(Gnp))
Gd = nx.davis_southern_women_graph()
Ad = AntiGraph(nx.complement(Gd))
Gk = nx.karate_club_graph()
Ak = AntiGraph(nx.complement(Gk))
GA = [(Gnp, Anp), (Gd, Ad), (Gk, Ak)]
# test connected components
for G, A in GA:
gc = [set(c) for c in nx.connected_components(G)]
ac = [set(c) for c in nx.connected_components(A)]
for comp in ac:
assert comp in gc
# test biconnected components
for G, A in GA:
gc = [set(c) for c in nx.biconnected_components(G)]
ac = [set(c) for c in nx.biconnected_components(A)]
for comp in ac:
assert comp in gc
# test degree
for G, A in GA:
node = list(G.nodes())[0]
nodes = list(G.nodes())[1:4]
assert G.degree(node) == A.degree(node)
assert sum(G.degree().values()) == sum(A.degree().values())
# AntiGraph is a ThinGraph, so all the weights are 1
assert sum(A.degree().values()) == sum(A.degree(weight='weight').values())
assert sum(G.degree(nodes).values()) == sum(A.degree(nodes).values())
| Python | 0 | |
7f1f8c53508d7cbaf3513b738dff21e6ea6e5ca1 | add script to generate multiple instance catalogs in different bands | twinkles/InstcatGenerator.py | twinkles/InstcatGenerator.py | """
Based upon examples by Scott Daniel (scottvalscott@gmail.com) found here:
https://stash.lsstcorp.org/projects/SIM/repos/sims_catutils/browse/python/lsst/sims/
catUtils/exampleCatalogDefinitions/phoSimCatalogExamples.py
"""
from lsst.sims.catalogs.measures.instance import CompoundInstanceCatalog
from lsst.sims.catalogs.generation.db import CatalogDBObject
from lsst.sims.catUtils.baseCatalogModels import OpSim3_61DBObject
from lsst.sims.catUtils.exampleCatalogDefinitions.phoSimCatalogExamples import \
PhoSimCatalogPoint, PhoSimCatalogSersic2D, PhoSimCatalogZPoint
from lsst.sims.catUtils.utils import ObservationMetaDataGenerator
#from sprinkler import sprinklerCompound
class InstcatGenerator(object):
_starObjNames = ['msstars', 'bhbstars', 'wdstars', 'rrlystars',
'cepheidstars']
def __init__(self, opsim_db, fieldRA, fieldDec, boundLength=0.3):
gen = ObservationMetaDataGenerator(database=opsim_db,
driver='sqlite')
self.obs_md_results = gen.getObservationMetaData(fieldRA=fieldRA,
fieldDec=fieldDec,
boundLength=boundLength)
def find_visits(self, bandpass, nmax=None):
visits = []
for obs_metadata in self.obs_md_results:
if nmax is not None and len(visits) == nmax:
break
if obs_metadata.bandpass == bandpass:
visits.append(obs_metadata)
return visits
def __call__(self, outfile, obs_metadata):
catalogs = []
# Add Instance Catalogs for phoSim stars.
for starName in self._starObjNames:
starDBObj = CatalogDBObject.from_objid(starName)
catalogs.append(PhoSimCatalogPoint(starDBObj,
obs_metadata=obs_metadata))
# Add phosim Galaxy Instance Catalogs to compound Instance Catalog.
galsBulge = CatalogDBObject.from_objid('galaxyBulge')
catalogs.append(PhoSimCatalogSersic2D(galsBulge,
obs_metadata=obs_metadata))
galsDisk = CatalogDBObject.from_objid('galaxyDisk')
catalogs.append(PhoSimCatalogSersic2D(galsDisk,
obs_metadata=obs_metadata))
galsAGN = CatalogDBObject.from_objid('galaxyAgn')
catalogs.append(PhoSimCatalogZPoint(galsAGN,
obs_metadata=obs_metadata))
# Write the catalogs to the output file one at a time.
write_header = True
for catalog in catalogs:
catalog.write_catalog(outfile, write_mode='a',
write_header=write_header,
chunk_size=20000)
write_header = False
if __name__ == '__main__':
import os
import pickle
import time
# This following is a deep drilling field ID for enigma_1189, but
# fieldID is not one of the selection options in
# getObservationMetaData(...), so we need to continue using
# fieldRA, fieldDec
fieldID = 1427
fieldRA = (53, 54)
fieldDec = (-29, -27)
opsim_db = '/nfs/slac/g/ki/ki18/jchiang/DESC/Twinkles/work/enigma_1189_sqlite.db'
pickle_file = 'instcat_generator_enigma_1189_%(fieldID)i.pickle' % locals()
t0 = time.time()
if not os.path.isfile(pickle_file):
print "Extracting visits from %s:" % os.path.basename(opsim_db)
generator = InstcatGenerator(opsim_db, fieldRA, fieldDec)
pickle.dump(generator, open(pickle_file, 'w'))
print "execution time:", time.time() - t0
else:
print "Loading pickle file with visits:", pickle_file
generator = pickle.load(open(pickle_file))
print "execution time:", time.time() - t0
nmax = 1
for bandpass in 'ugrizy':
print "band pass:", bandpass
visits = generator.find_visits(bandpass, nmax=nmax)
for visit in visits:
obshistid = visit.phoSimMetaData['Opsim_obshistid'][0]
outfile = 'phosim_input_%s_%07i.txt' % (bandpass, obshistid)
print outfile
generator(outfile, visit)
| Python | 0 | |
1675ecd5ea2d4aaf8d8b6aa76d007d081f92eba6 | add context processor for static | nurseconnect/context_processors.py | nurseconnect/context_processors.py | from django.conf import settings
def compress_settings(request):
return {
'STATIC_URL': settings.STATIC_URL,
'ENV': settings.ENV
}
| Python | 0.000002 | |
a1820a0e5f9bd891b20f70ab68dfd4bb385047a0 | Add utils to allow multiclass classification. | utils/multiclassification.py | utils/multiclassification.py | from __future__ import division
import numpy as np
from sklearn.multiclass import OneVsOneClassifier
from sklearn.multiclass import _fit_binary
from sklearn.externals.joblib import Parallel, delayed
from unbalanced_dataset import SMOTE
def _fit_ovo_binary(estimator, X, y, i, j, sampling=None):
"""Fit a single binary estimator (one-vs-one)."""
cond = np.logical_or(y == i, y == j)
y = y[cond]
y_binary = np.empty(y.shape, np.int)
y_binary[y == i] = 0
y_binary[y == j] = 1
ind = np.arange(X.shape[0])
X_values = X[ind[cond]]
y_values = y_binary
if sampling == 'SMOTE':
print 'SMOTE'
ratio = 1
smote = SMOTE(ratio=ratio)
X_values, y_values = smote.fit_transform(X_values, y_values)
return _fit_binary(estimator, X_values, y_values, classes=[i, j])
class CustomOneVsOneClassifier(OneVsOneClassifier):
def __init__(self, estimator, n_jobs=1, sampling=None):
self.estimator = estimator
self.n_jobs = n_jobs
self.sampling = sampling
def predict_proba(self, X):
return super(CustomOneVsOneClassifier, self).decision_function(X)
def fit(self, X, y):
"""Fit underlying estimators.
Parameters
----------
X : (sparse) array-like, shape = [n_samples, n_features]
Data.
y : array-like, shape = [n_samples]
Multi-class targets.
Returns
-------
self
"""
y = np.asarray(y)
self.classes_ = np.unique(y)
n_classes = self.classes_.shape[0]
self.estimators_ = Parallel(n_jobs=self.n_jobs)(
delayed(_fit_ovo_binary)(
self.estimator, X, y,
self.classes_[i], self.classes_[j], sampling=self.sampling)
for i in range(n_classes) for j in range(i + 1, n_classes))
return self
| Python | 0 | |
175554c4af88e4ba634976e893510f04b43442b7 | Add tool for plotting a connection box lookahead delay matrix. | utils/plot_connection_box.py | utils/plot_connection_box.py | import argparse
import capnp
import os.path
import matplotlib.pyplot as plt
from matplotlib.colors import BoundaryNorm
from matplotlib.ticker import MaxNLocator
import numpy as np
def get_connection_box(cost_map, segment, connection_box):
assert cost_map.costMap.dims[0] == cost_map.offset.dims[0]
assert cost_map.costMap.dims[1] == cost_map.offset.dims[1]
nsegment = cost_map.costMap.dims[0]
nconnection_box = cost_map.costMap.dims[1]
m_itr = iter(cost_map.costMap.data)
offset_itr = iter(cost_map.offset.data)
for segment_idx in range(nsegment):
for connection_box_idx in range(nconnection_box):
m = next(m_itr).value
offset = next(offset_itr).value
x_off = offset.x
y_off = offset.y
if segment == segment_idx and connection_box_idx == connection_box:
return m, (x_off, y_off)
def plot_connection_box(cost_map, segment, connection_box):
m, (x_off, y_off) = get_connection_box(cost_map, segment, connection_box)
assert len(m.dims) == 2
x_dim = m.dims[0]
y_dim = m.dims[1]
# generate 2 2d grids for the x & y bounds
y, x = np.mgrid[slice(y_off, y_off + y_dim), slice(x_off, x_off + x_dim)]
delay = np.zeros((y_dim, x_dim))
congestion = np.zeros((y_dim, x_dim))
itr = iter(m.data)
for x_idx in range(x_dim):
for y_idx in range(y_dim):
value = next(itr)
x_val = x_idx + x_off
y_val = y_idx + y_off
delay[(x == x_val) & (y == y_val)] = value.value.delay
congestion[(x == x_val) & (y == y_val)] = value.value.congestion
print(delay)
# x and y are bounds, so z should be the value *inside* those bounds.
# Therefore, remove the last value from the z array.
delay_levels = MaxNLocator(nbins=50).tick_values(delay.min(), delay.max())
# pick the desired colormap, sensible levels, and define a normalization
# instance which takes data values and translates those into levels.
cmap = plt.get_cmap('PiYG')
norm = BoundaryNorm(delay_levels, ncolors=cmap.N, clip=True)
fig, (ax0, ax1) = plt.subplots(nrows=2)
im = ax0.pcolormesh(x, y, delay, cmap=cmap, norm=norm)
ax0.autoscale(False) # To avoid that the scatter changes limits
inf_idx = delay == float('inf')
ax0.scatter(x[inf_idx], y[inf_idx])
fig.colorbar(im, ax=ax0)
ax0.set_title('pcolormesh with levels')
# contours are *point* based plots, so convert our bound into point
# centers
cf = ax1.contourf(
x + 1. / 2., y + 1. / 2., delay, levels=delay_levels, cmap=cmap
)
fig.colorbar(cf, ax=ax1)
ax1.set_title('contourf with levels')
# adjust spacing between subplots so `ax1` title and `ax0` tick labels
# don't overlap
fig.tight_layout()
plt.show()
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--schema_path', help='Path to connection map schema', required=True
)
parser.add_argument('--lookahead_map', required=True)
parser.add_argument('--segment', required=True, type=int)
parser.add_argument('--connection_box', required=True, type=int)
args = parser.parse_args()
connection_map = capnp.load(
os.path.join(args.schema_path, 'connection_map.capnp')
)
with open(args.lookahead_map, 'rb') as f:
cost_map = connection_map.VprCostMap.read(
f, traversal_limit_in_words=1024 * 1024 * 1024
)
plot_connection_box(cost_map, args.segment, args.connection_box)
if __name__ == "__main__":
main()
| Python | 0 | |
83f0e2ae541b6b08b3eb6d5d8097bdfd85ccbd3c | Create ROUSE_simulation.py | ROUSE_simulation.py | ROUSE_simulation.py | import numpy as np
from scipy.linalg import toeplitz
from scipy.optimize import curve_fit
import scipy.linalg as sla
from matplotlib import pyplot as plt
from numba import jit
from sys import argv
from tqdm import tqdm
from numba import guvectorize, float64, jit
def rouse_mat(n):
ret = np.array([[-1,1] + [0] * (n-2)])
for i in range(1, n-1):
ret = np.append(ret, np.array([[0] * (i-1) + [1,-2,1] + [0] * (n-2-i)]), axis=0)
return -np.append(ret, np.array([[0] * (n-2) + [1,-1]]), axis=0)
def zeta_mat(n, alpha, delta):
return sla.expm(-delta * toeplitz(np.exp(-alpha * np.arange(n))))
def Roessler2010_SRK2_rouse(A, B, y0, t, dW=None):
'''Simulate EQU as dX/dt = AX + B dW.
For ROUSE systems:
dr_i = 1/z_i * -((k_{i-1}(r_{i-1}-r_i)+k_i(r_{i+1}-r_i)) dt + 1/z_i \sqrt{2k_BTz_i} dW
coefficients in the LHS must be 1, k_i and mobility z_i can be modified.
and k_i and z_i must be constants.
:param A: matrix in RHS of eqs
:param B: fluctuations in RHS of eqs
:param y0: initial positions
:param t: time
'''
A2 = A.dot(A)
dt = t[1] - t[0]
if dW is None:
dW = np.random.normal(0, dt**0.5, (t.shape[0]-1, *y0.shape))
y = np.zeros((t.shape[0], *y0.shape))
y[0] = y0
for i in range(t.shape[0]-1):
yi = y[i]
y[i+1] = yi + A.dot(yi) * dt + 0.5 * A2.dot(yi) * dt ** 2 + dW[i] * B
return y
ndim = 3
ns = np.asarray(argv[1:], dtype=np.int)
T = 100
nT = int(T/0.02)
t=np.linspace(0,T,nT,endpoint=False)
for n in ns:
ret = np.zeros((nT, ndim))
msd = np.zeros((nT,))
R = rouse_mat(n)
for i in tqdm(range(1000), ascii=True, desc='Chian length of %d' % (n)):
r = Roessler2010_SRK2_rouse(-3*R, np.ones((n,1))*np.sqrt(2), np.zeros((n,ndim)), t).mean(axis=1)
ret += r
msd += np.sum(r ** 2, axis=-1)
np.savetxt('traj_cm_%d.txt' % (n), np.vstack([t,ret.T/1000]).T)
np.savetxt('msd_cm_%d.txt' % (n), np.vstack([t,msd/1000]).T)
| Python | 0 | |
18d11a06e1e84ece32277e0860229555aae321f0 | Add a snippet (Python). | python/cross-platform_home_directory.py | python/cross-platform_home_directory.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
print(os.path.expanduser("~"))
| Python | 0.000035 | |
c8b78cab7a32a300e418033185595fd79a290823 | add bmf study | qlcoder/image_processing/bmp_python3.py | qlcoder/image_processing/bmp_python3.py | import binascii
fi = open("aaa.bmp", "rb")
header = fi.read(1078)
line = fi.read()
fi.close()
binline = ''
for i in range(0,len(line)):
binline += bin(line[i])[2:].zfill(8)
newbinline = ''
for i in range(len(binline)):
if(i%7 == 0):
newbinline+='0'
newbinline+=binline[i]
newhexline = hex(int(newbinline, 2))[2:]
newhexline = '0' + newhexline
newbyteline = bytes().fromhex(newhexline)
fo = open("out.bmp", "wb")
outbmp = header + newbyteline
line = fo.write(outbmp)
fo.close() | Python | 0.000001 | |
c9b3bd8309d3d1448823787160021a8688e8f3c1 | Add python to make vv h5 file | vv_h5_setup.py | vv_h5_setup.py | import tables
vv_desc = dict(
obsid=tables.IntCol(pos=0),
revision=tables.IntCol(pos=1),
most_recent=tables.IntCol(pos=2),
slot=tables.IntCol(pos=3),
type=tables.StringCol(10,pos=4),
n_pts=tables.IntCol(pos=5),
rad_off=tables.FloatCol(pos=6),
frac_dy_big=tables.FloatCol(pos=7),
frac_dz_big=tables.FloatCol(pos=8),
frac_mag_big=tables.FloatCol(pos=9),
mean_y =tables.FloatCol(pos=10),
mean_z =tables.FloatCol(pos=11),
dy_mean=tables.FloatCol(pos=12),
dy_med =tables.FloatCol(pos=13),
dy_rms =tables.FloatCol(pos=14),
dz_mean=tables.FloatCol(pos=15),
dz_med =tables.FloatCol(pos=16),
dz_rms =tables.FloatCol(pos=17),
mag_mean=tables.FloatCol(pos=18),
mag_med =tables.FloatCol(pos=19),
mag_rms =tables.FloatCol(pos=20),
)
h5f = tables.openFile('/data/aca/archive/vv/vv.h5', 'a')
tbl = h5f.createTable('/', 'vv', vv_desc)
tbl.cols.obsid.createIndex()
h5f.close()
| Python | 0.000001 | |
836c7aa92cd9d35e7d54046e835f285410780b84 | Create nodejs.py | wigs/nodejs.py | wigs/nodejs.py | class nodejs(Wig):
git_uri = 'https://github.com/nodejs/node'
tarball_uri = 'https://github.com/nodejs/node/archive/v$RELEASE_VERSION$.tar.gz'
last_release_version = 'v4.7.2'
| Python | 0.000032 | |
1edf0898422c74173a9b6526c789b140938664b5 | add main module | server/__main__.py | server/__main__.py | import server | Python | 0.000001 | |
d48035b06b952b9ac4d95897d08de50d5977bf9f | Add basic test for OrderedDict. | tests/basics/ordereddict1.py | tests/basics/ordereddict1.py | try:
from collections import OrderedDict
except ImportError:
try:
from _collections import OrderedDict
except ImportError:
print("SKIP")
import sys
sys.exit()
d = OrderedDict([(10, 20), ("b", 100), (1, 2)])
print(list(d.keys()))
print(list(d.values()))
del d["b"]
print(list(d.keys()))
print(list(d.values()))
| Python | 0 | |
45ccdce362694f50c43828e3923fc9e3fa32c8bb | Add list_callbacks.py | scripts/list_callbacks.py | scripts/list_callbacks.py | #!/usr/bin/env python2
import sys
from parse_header import *
def main(argv):
for type, name, args, attrs in get_callbacks(sys.stdin.read()):
print name
if __name__ == "__main__":
main(sys.argv)
| Python | 0.000003 | |
f075f21b53e13d53fc26e38bcf995d55ea44df67 | Patch bump for pypi | exa/__init__.py | exa/__init__.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
'''
Exa
#########
This package creates a systematic infrastructure for an ecosystem of packages,
tailored to specific industry or academic displines, for organizing, processing,
analyzing, and visualizing data. It is built with minimal dependencies, leverages
established open-source packages, is itself extensible, and is targeted at both
industry and academic applications.
At a high level, data objects such as series or dataframes (i.e. `pandas`_
like objects) are organized into containers which track relationships between
these objects and provide methods for computation, conversion to other formats,
analysis, and visualization within the `Jupyter notebook`_ environment.
.. _pandas: http://pandas.pydata.org/pandas-docs/stable/index.html
.. _Jupyter notebook: http://jupyter.org/
'''
__exa_version__ = (0, 2, 5)
__version__ = '.'.join((str(v) for v in __exa_version__))
from exa import _config
from exa import log
from exa import test
from exa import relational
from exa import widget
from exa import math
from exa import distributed
from exa import mpl, tex
from exa import error
# User API
from exa.numerical import Series, DataFrame, Field3D, SparseSeries, SparseDataFrame
from exa.container import Container
from exa.editor import Editor
from exa.filetypes import CSV
| # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
'''
Exa
#########
This package creates a systematic infrastructure for an ecosystem of packages,
tailored to specific industry or academic displines, for organizing, processing,
analyzing, and visualizing data. It is built with minimal dependencies, leverages
established open-source packages, is itself extensible, and is targeted at both
industry and academic applications.
At a high level, data objects such as series or dataframes (i.e. `pandas`_
like objects) are organized into containers which track relationships between
these objects and provide methods for computation, conversion to other formats,
analysis, and visualization within the `Jupyter notebook`_ environment.
.. _pandas: http://pandas.pydata.org/pandas-docs/stable/index.html
.. _Jupyter notebook: http://jupyter.org/
'''
__exa_version__ = (0, 2, 4)
__version__ = '.'.join((str(v) for v in __exa_version__))
from exa import _config
from exa import log
from exa import test
from exa import relational
from exa import widget
from exa import math
from exa import distributed
from exa import mpl, tex
from exa import error
# User API
from exa.numerical import Series, DataFrame, Field3D, SparseSeries, SparseDataFrame
from exa.container import Container
from exa.editor import Editor
from exa.filetypes import CSV
| Python | 0 |
ec91d5106bfab93e0540e5dc4a0bbd9b7cea151b | add script to update old .h5 files to support normalization | scripts/update_data_h5.py | scripts/update_data_h5.py | # update h5 files created by old versions of pyannote-speaker-embedding
# estimate mu/sigma and save it back to the file
# usage: update_data_h5.py /path/to/file.h5
import sys
import h5py
import numpy as np
from tqdm import tqdm
data_h5 = sys.argv[1]
with h5py.File(data_h5, mode='r') as fp:
X = fp['X']
weights, means, squared_means = zip(*(
(len(x), np.mean(x, axis=0), np.mean(x**2, axis=0))
for x in tqdm(X)))
mu = np.average(means, weights=weights, axis=0)
squared_mean = np.average(squared_means, weights=weights, axis=0)
sigma = np.sqrt(squared_mean - mu ** 2)
with h5py.File(data_h5, mode='r+') as fp:
X = fp['X']
X.attrs['mu'] = mu
X.attrs['sigma'] = sigma
| Python | 0 | |
e63eb3f4dd194e98622d614a18a6f7bb21d839c9 | Add generic image service tests. | nova/tests/test_image.py | nova/tests/test_image.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Linux2Go
# Author: Soren Hansen
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from nova import context
from nova import exception
from nova import test
import nova.image
class _ImageTestCase(test.TestCase):
def setUp(self):
super(_ImageTestCase, self).setUp()
self.context = context.get_admin_context()
def test_index(self):
res = self.image_service.index(self.context)
for image in res:
self.assertEquals(set(image.keys()), set(['id', 'name']))
def test_detail(self):
res = self.image_service.detail(self.context)
for image in res:
keys = set(image.keys())
self.assertEquals(keys, set(['id', 'name', 'created_at',
'updated_at', 'deleted_at', 'deleted',
'status', 'is_public', 'properties']))
self.assertTrue(isinstance(image['created_at'], datetime.datetime))
self.assertTrue(isinstance(image['updated_at'], datetime.datetime))
if not (isinstance(image['deleted_at'], datetime.datetime) or
image['deleted_at'] is None):
self.fail('image\'s "deleted_at" attribute was neither a '
'datetime object nor None')
def check_is_bool(image, key):
val = image.get('deleted')
if not isinstance(val, bool):
self.fail('image\'s "%s" attribute wasn\'t '
'a bool: %r' % (key, val))
check_is_bool(image, 'deleted')
check_is_bool(image, 'is_public')
def test_index_and_detail_have_same_results(self):
index = self.image_service.index(self.context)
detail = self.image_service.detail(self.context)
index_set = set([(i['id'], i['name']) for i in index])
detail_set = set([(i['id'], i['name']) for i in detail])
self.assertEqual(index_set, detail_set)
def test_show_raises_imagenotfound_for_invalid_id(self):
self.assertRaises(exception.ImageNotFound,
self.image_service.show,
self.context,
'this image does not exist')
def test_show_by_name(self):
self.assertRaises(exception.ImageNotFound,
self.image_service.show_by_name,
self.context,
'this image does not exist')
def test_create_adds_id(self):
index = self.image_service.index(self.context)
image_count = len(index)
self.image_service.create(self.context, {})
index = self.image_service.index(self.context)
self.assertEquals(len(index), image_count+1)
self.assertTrue(index[0]['id'])
def test_create_keeps_id(self):
self.image_service.create(self.context, {'id': '34'})
self.image_service.show(self.context, '34')
def test_create_rejects_duplicate_ids(self):
self.image_service.create(self.context, {'id': '34'})
self.assertRaises(exception.Duplicate,
self.image_service.create,
self.context,
{'id': '34'})
# Make sure there's still one left
self.image_service.show(self.context, '34')
def test_update(self):
self.image_service.create(self.context,
{'id': '34', 'foo': 'bar'})
self.image_service.update(self.context, '34',
{'id': '34', 'foo': 'baz'})
img = self.image_service.show(self.context, '34')
self.assertEquals(img['foo'], 'baz')
def test_delete(self):
self.image_service.create(self.context, {'id': '34', 'foo': 'bar'})
self.image_service.delete(self.context, '34')
self.assertRaises(exception.NotFound,
self.image_service.show,
self.context,
'34')
def test_delete_all(self):
self.image_service.create(self.context, {'id': '32', 'foo': 'bar'})
self.image_service.create(self.context, {'id': '33', 'foo': 'bar'})
self.image_service.create(self.context, {'id': '34', 'foo': 'bar'})
self.image_service.delete_all()
index = self.image_service.index(self.context)
self.assertEquals(len(index), 0)
class FakeImageTestCase(_ImageTestCase):
def setUp(self):
super(FakeImageTestCase, self).setUp()
self.image_service = nova.image.fake.FakeImageService()
| Python | 0 | |
651628a3f9d89a9365b6ba869db72400c3a6a63f | add one file I forgot | simulation_test.py | simulation_test.py | #!/usr/bin/env python
import unittest
import simulation
def state(visitors_per_bucket, baseline_conversions, treatment_conversions):
return simulation.ExperimentState(
baseline_conversions,
visitors_per_bucket - baseline_conversions,
treatment_conversions,
visitors_per_bucket - treatment_conversions,
)
class ChisqDecisionTest(unittest.TestCase):
def test_sample_size_calculation(self):
# test values from http://www.stat.ubc.ca/~rollin/stats/ssize/b2.html
self.assertEqual(
14751,
simulation.ChisqDecision(0.95, 0.1).necessary_sample_size_per_bucket(0.1),
)
self.assertEqual(
9780,
simulation.ChisqDecision(0.85, 0.1).necessary_sample_size_per_bucket(0.1),
)
self.assertEqual(
2507,
simulation.ChisqDecision(0.95, 0.25).necessary_sample_size_per_bucket(0.1),
)
self.assertEqual(
6510,
simulation.ChisqDecision(0.95, 0.1).necessary_sample_size_per_bucket(0.2),
)
def test_decision(self):
baseline_rate = 0.5
chisq_decision = simulation.ChisqDecision(0.95, 0.1)
# sanity checks
self.assertEqual('keep running', chisq_decision.decision(state(20, 7, 10), baseline_rate))
self.assertEqual(
'baseline',
chisq_decision.decision(state(10000, 5000, 5000), baseline_rate),
)
self.assertEqual(
'baseline',
chisq_decision.decision(state(10000, 6000, 4000), baseline_rate),
)
self.assertEqual(
'treatment',
chisq_decision.decision(state(10000, 4000, 6000), baseline_rate),
)
# some close calls, using Chi-squared values from
# http://www.graphpad.com/quickcalcs/contingency1.cfm
self.assertEqual(
'baseline',
chisq_decision.decision(state(10000, 5000, 5100), baseline_rate),
)
self.assertEqual(
'treatment',
chisq_decision.decision(state(10000, 5000, 5150), baseline_rate),
)
class BayesianDecisionTest(unittest.TestCase):
def setUp(self):
self.decision = simulation.BayesianDecision(0.01)
def test_posterior_probability_treatment_is_better(self):
# sanity checks
self.assertAlmostEqual(
1,
self.decision.posterior_probability_treatment_is_better(state(1000, 1, 999)),
)
self.assertAlmostEqual(
0,
self.decision.posterior_probability_treatment_is_better(state(1000, 999, 1)),
)
self.assertAlmostEqual(
0.5,
self.decision.posterior_probability_treatment_is_better(state(100, 50, 50)),
)
self.assertGreater(
self.decision.posterior_probability_treatment_is_better(state(100, 50, 51)),
0.5,
)
self.assertLess(
self.decision.posterior_probability_treatment_is_better(state(100, 50, 49)),
0.5,
)
# some less obvious ones which might be wrong (generated using my own implementation), but
# useful for catching unintended changes at least
self.assertAlmostEqual(
0.92318343,
self.decision.posterior_probability_treatment_is_better(state(1000, 100, 120)),
)
self.assertAlmostEqual(
0.22343071,
self.decision.posterior_probability_treatment_is_better(state(1000, 100, 90)),
)
def test_expected_loss_from_choosing_treatment(self):
# sanity checks
self.assertAlmostEqual(
0.9,
self.decision.expected_loss_from_choosing_treatment(state(1000, 950, 50)),
places=2,
)
self.assertAlmostEqual(
0,
self.decision.expected_loss_from_choosing_treatment(state(1000, 1, 999)),
)
# some values from Chris Stucchio's numerical integration code
# https://gist.github.com/stucchio/9090456
# see stucchio.py in this repository
self.assertAlmostEqual(
0.017,
self.decision.expected_loss_from_choosing_treatment(state(100, 10, 10)),
places=3,
)
self.assertAlmostEqual(
0.0005,
self.decision.expected_loss_from_choosing_treatment(state(100, 10, 20)),
places=4,
)
self.assertAlmostEqual(
0.1,
self.decision.expected_loss_from_choosing_treatment(state(100, 20, 10)),
places=1,
)
if __name__ == '__main__':
unittest.main()
| Python | 0.000001 | |
50ba17b46c7fcc7eb42a48a5ec82e295fdbeae13 | Add missing migration | migrations/versions/25ecf1c9b3fb_introduce_deploykey_entity.py | migrations/versions/25ecf1c9b3fb_introduce_deploykey_entity.py | """Introduce DeployKey entity
Revision ID: 25ecf1c9b3fb
Revises: 1c314d48261a
Create Date: 2014-02-08 02:56:34.174597
"""
# revision identifiers, used by Alembic.
revision = '25ecf1c9b3fb'
down_revision = '1c314d48261a'
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mysql
from kozmic.models import db, DeployKey, Project
def upgrade():
op.create_table('deploy_key',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('gh_id', sa.Integer(), nullable=False),
sa.Column('rsa_private_key', sa.Text(), nullable=False),
sa.Column('rsa_public_key', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['project_id'], ['project.id'], ),
sa.PrimaryKeyConstraint('id')
)
select = db.select(['id', 'is_public', 'rsa_public_key', 'rsa_private_key', 'gh_key_id'],
from_obj=Project.__tablename__)
for id, is_public, rsa_public_key, rsa_private_key, gh_key_id \
in db.session.execute(select).fetchall():
if is_public:
continue
insert = DeployKey.__table__.insert().values(
project_id=id,
rsa_public_key=rsa_public_key,
rsa_private_key=rsa_private_key,
gh_id=gh_key_id)
db.session.execute(insert)
db.session.commit()
op.drop_column(u'project', 'rsa_public_key')
op.drop_column(u'project', 'rsa_private_key')
op.drop_column(u'project', 'gh_key_id')
def downgrade():
op.add_column(u'project', sa.Column('gh_key_id', mysql.INTEGER(display_width=11), nullable=False))
op.add_column(u'project', sa.Column('rsa_private_key', mysql.MEDIUMTEXT(), nullable=False))
op.add_column(u'project', sa.Column('rsa_public_key', mysql.MEDIUMTEXT(), nullable=False))
op.drop_table('deploy_key')
| Python | 0.0002 | |
e9d2730e69228f458b6743515226b3060bf0012a | Create linode2do.py | linode2do.py | linode2do.py | from linode.api import Api
import requests
class APIError(Exception):
pass
class DigitalOceanEndpoint(object):
def __init__(self, client_id, api_key):
self.client_id = client_id
self.api_key = api_key
def credentials(self, params={}):
params.update({
"client_id": self.client_id,
"api_key": self.api_key,
})
return params
def assert_ok(self, response):
if response['status'] != 'OK':
raise APIError(
"API Error: {}".format(
response.get(
'error_message',
response.get(
'message',
"Unknown error ({})".format(response),
)
)
)
)
class Domains(list):
class Domain(dict):
class Records(list):
class Record(dict):
def __init__(self, domain, data=None):
self.domain = domain
self.update(data)
def edit(self, record_type, data, **kwargs):
url = "https://api.digitalocean.com/domains/{}/records/{}".format(
self.domain['id'],
self['id'],
)
params = self.domain.api.credentials({
"record_type": record_type,
"data": data,
})
for param in kwargs:
if param not in ("name", "priority", "port", "weight"):
raise Exception("Unknown parameter {}".format(param))
params[param] = kwargs[param]
r = requests.get(url, data=params).json()
self.domain.api.assert_ok(r)
self.update(r['record'])
return self
def destroy(self):
url = "https://api.digitalocean.com/domains/{}/records/{}/destroy".format(
self.domain['id'],
self['id'],
)
r = requests.get(url, data=self.domain.credentials())
self.domain.assert_ok(r)
return r
def __init__(self, domain):
self.domain = domain
if 'id' in domain:
url = "https://api.digitalocean.com/domains/{}/records".format(
domain['id']
)
r = requests.get(url, data=domain.api.credentials()).json()
domain.api.assert_ok(r)
self.extend(r['records'])
def new(self, record_type, data, **kwargs):
url = "https://api.digitalocean.com/domains/{}/records/new".format(self.domain['id'])
params = self.domain.api.credentials({
"record_type": record_type,
"data": data,
})
for param in kwargs:
if param not in ("name", "priority", "port", "weight"):
raise Exception("Unknown parameter {}".format(param))
params[param] = kwargs[param]
r = requests.get(url, data=params).json()
self.domain.api.assert_ok(r)
return(r['record'])
def __call__(self, record_id):
url = 'https://api.digitalocean.com/domains/{}/records/{}'.format(
self.domain['id'],
record_id,
)
r = requests.get(url, data=self.domain.credentials()).json()
self.domain.assert_ok(r)
return self.Record(self.domain, r['record'])
def __init__(self, api, domain_id):
self.api = api
url = "https://api.digitalocean.com/domains/{}".format(domain_id)
r = requests.get(url, data=api.credentials()).json()
api.assert_ok(r)
self.update(r['domain'])
self.records = self.Records(self)
def credentials(self, params):
return self.api.credentials(params)
def assert_ok(self, response):
return self.api.assert_ok(response)
def destroy(self):
url = "https://api.digitalocean.com/domains/{}/destroy".format(
self['id']
)
r = requests.get(url, data=self.api.credentials()).json()
self.api.assert_ok(r)
return r
def __repr__(self):
return "<DigitalOceanDomain {}>".format(self['name'])
def __init__(self, api):
self.api = api
url = 'https://api.digitalocean.com/domains'
domain_list = requests.get(url, data=api.credentials()).json()
api.assert_ok(domain_list)
self[:] = [self.Domain(self.api, d['id']) for d in domain_list['domains']]
def __call__(self, domain_id):
return self.Domain(self.api, domain_id)
def new(self, name, ip_address):
url = "https://api.digitalocean.com/domains/new"
r = requests.get(url, data=self.api.credentials(
{"name": name, "ip_address": ip_address}
)).json()
self.api.assert_ok(r)
domain = self.Domain(self.api, r['domain']['id'])
self.append(domain)
return domain
class Client(DigitalOceanEndpoint):
def __init__(self, client_id, api_key):
super(Client, self).__init__(client_id, api_key)
def __getattr__(self, name):
if name == 'domains': return Domains(self)
raise AttributeError("Attribute not found: {}".format(name))
if __name__ == "__main__":
ln = Api(raw_input("Enter your Linode API key: "))
do = Client(
client_id=raw_input("Enter your DigitalOcean client ID: "),
api_key=raw_input("Enter your DigitalOcean API key: "),
)
for domain in ln.domain_list():
domain_name = domain['DOMAIN']
domain_id = domain['DOMAINID']
records = ln.domain_resource_list(domainID=domain['DOMAINID'])
www = [record for record in records if (
(record['NAME'] == 'www' and record['TYPE'].upper() == 'A') or
(record['NAME'] == '' and record['TYPE'].upper() == 'A')
)]
if not www:
print "\tPulando", domain_name
print "\tTransfering", domain_name
ip = www[0]['TARGET']
try:
do_domain = do.domains(domain['DOMAIN'])
except APIError:
do_domain = do.domains.new(domain_name, ip)
for record in records:
record_type = record['TYPE'].upper()
if record_type in ('NS', 'SOA'):
continue
print "\t\tRecord: {} {}".format(record_type, record['TARGET'])
if record_type in ('MX', 'CNAME'):
if "." in record['TARGET']:
if record['TARGET'][-1] != '.':
record['TARGET'] += '.'
else:
record['TARGET'] += '.' + domain_name + '.'
if not record['NAME']:
record['NAME'] = '@'
do_domain.records.new(
record_type=record_type,
data=record['TARGET'],
name=record['NAME'],
priority=record['PRIORITY'],
port=record['PORT'],
weight=record['WEIGHT'],
)
| Python | 0.000001 | |
0e833de83903c26fb3ca04c10b140c712350a12f | Create tests.py | unit-3-mixed-reading-and-assignment-lessons/lesson-3-assignment-one-code-block/tests.py | unit-3-mixed-reading-and-assignment-lessons/lesson-3-assignment-one-code-block/tests.py | import unittest
class ConvertTemperatureTestCase(unittest.TestCase):
def test_fahrenheit_to_celsius(self):
self.assertEqual(convert_temperature(32, to='celsius'), 0)
def test_celsius_to_fahrenheit(self):
self.assertEqual(convert_temperature(40, to='fahrenheit'), 104)
def test_default_parameter_is_celsius(self):
self.assertEqual(convert_temperature(32), 0)
| Python | 0.000001 | |
6c6d3d365e021918fe88450136a75bbac7a21d5c | add .percol.d | .percol.d/rc.py | .percol.d/rc.py | # Emacs like
percol.import_keymap({
"C-h" : lambda percol: percol.command.delete_backward_char(),
"C-d" : lambda percol: percol.command.delete_forward_char(),
"C-k" : lambda percol: percol.command.kill_end_of_line(),
"C-y" : lambda percol: percol.command.yank(),
"C-t" : lambda percol: percol.command.transpose_chars(),
"C-a" : lambda percol: percol.command.beginning_of_line(),
"C-e" : lambda percol: percol.command.end_of_line(),
"C-b" : lambda percol: percol.command.backward_char(),
"C-f" : lambda percol: percol.command.forward_char(),
"M-f" : lambda percol: percol.command.forward_word(),
"M-b" : lambda percol: percol.command.backward_word(),
"M-d" : lambda percol: percol.command.delete_forward_word(),
"M-h" : lambda percol: percol.command.delete_backward_word(),
"C-n" : lambda percol: percol.command.select_next(),
"C-p" : lambda percol: percol.command.select_previous(),
"C-v" : lambda percol: percol.command.select_next_page(),
"M-v" : lambda percol: percol.command.select_previous_page(),
"M-<" : lambda percol: percol.command.select_top(),
"M->" : lambda percol: percol.command.select_bottom(),
"C-m" : lambda percol: percol.finish(),
"C-j" : lambda percol: percol.finish(),
"C-g" : lambda percol: percol.cancel(),
})
| Python | 0.000005 | |
d7595d6d80468ec5f0e4bde86db8a431c4384ad3 | Solve 41. | 041/solution.py | 041/solution.py | # coding: utf-8
""" Project Euler problem #41. """
import math as mt
def problem():
u""" Solve the problem.
We shall say that an n-digit number is pandigital if it makes use of all
the digits 1 to n exactly once. For example, 2143 is a 4-digit pandigital
and is also prime.
What is the largest n-digit pandigital prime that exists?
Solution: any integer divisible by 3 or 9 when sum of digits is divisible
by 3 or 9. So it's mean we could check only range (4321, 7654321), because
1+2+3+4+5+6+7+8=36
Answer: 7652413
"""
for x in xrange(7654321, 4321, -2):
if is_pandigital(x) and is_prime(x):
return x
def is_pandigital(*args):
""" Check numbers is pandigital through 9. """
return '123456789'.startswith(
''.join(sorted(x for arg in args for x in str(arg))))
def is_prime(num):
""" Check number is prime. """
if is_even(num) and num != 2 or num == 1:
return False
for dd in range(3, int(mt.sqrt(num)) + 1):
if num % dd == 0:
return False
return True
def is_even(num):
""" Check for number is even. """
return num % 2 == 0
if __name__ == '__main__':
print problem()
| Python | 0.998706 | |
7b381e309f6ea6b4dde3526c455f3f5bf88d6bf3 | Add check_https.py | check_https.py | check_https.py | #!/usr/bin/env python
""" @todo docstring me """
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals
)
import fnmatch
import glob
import io
import json
import re
import os
import sys
import pprint
import requests
from jsoncomment import JsonComment
from jsonschema import validate
from six.moves.urllib.parse import urlsplit, urlunsplit
class CheckURLs(object):
""" @todo docstring me """
def __init__(self):
""" @todo docstring me """
self.file = ''
self.last = ''
def is_https(self, url):
return re.search(r'^https', url, re.I) is not None
def httpsify(self, url):
if self.is_https(url):
return url
if re.search(r'^https', url, re.I):
return url
parts = list(urlsplit(url))
parts[0] = 'https'
# newparts = parts._replace(scheme='https')
parts[0] = 'https'
return urlunsplit(parts)
def check_url(self, url, key, data, get=True, hash='', desc=''):
""" @todo docstring me """
if desc:
key += '.' + desc
# print("url=%s hash=%s" % (url, hash))
if self.is_https(url):
return (data, False)
new_url = self.httpsify(url)
if get:
try:
r = requests.get(new_url)
if r.status_code < 200 or r.status_code > 299:
# print(" %s: %s" % (r.status_code, url))
return (data, False)
if self.file != self.last:
print(" %s:" % self.file)
print(" %s: %s: %s" % (r.status_code, key, url))
except Exception as e:
#print(" %s: %s: %s" % (500, key, url))
return (data, False)
new_data = re.sub(re.escape(url), new_url, data)
return (new_data, new_data != data)
return (data, False)
def check_urls(self, url_or_list, key, data, get=True, hash='', desc=''):
""" @todo docstring me """
if isinstance(url_or_list, list):
updated = False
for index, url in enumerate(url_or_list):
if isinstance(hash, list):
hash_value = hash[index]
else:
hash_value = ''
(data, changed) = self.check_url(url, key, data, get, hash_value, desc)
if changed:
updated = True
return (data, updated)
return self.check_url(url_or_list, key, data, get, desc)
def process(self, j, key, data, get=True, hash='', desc=''):
""" @todo docstring me """
if not key in j:
return (data, False)
if isinstance(j[key], dict):
if not 'url' in j[key]:
return (data, False)
if not hash:
if 'hash' in j[key]:
hash = j[key]['hash']
return self.check_urls(j[key]['url'], key, data, get, hash, desc)
return self.check_urls(j[key], key, data, get, hash, desc)
def run(self, args=None):
""" @todo docstring me """
if not args:
args = sys.argv[1:]
if not args:
args = ['.']
for arg in args:
mask = arg + '/*.json'
print("%s:" % mask)
parser = JsonComment(json)
for file in glob.glob(mask):
self.file = file
#print(" %s:" % file)
with io.open(file, 'r', encoding='utf-8') as f:
data = f.read()
orig_data = data
with io.open(file, 'r', encoding='utf-8') as f:
j = parser.load(f)
hash = ''
if 'hash' in j:
hash = j['hash']
# (data, changed) = self.process(j, 'homepage', data)
(data, changed) = self.process(j, 'license', data)
# (data, changed) = self.process(j, 'url', data, True, hash)
# if changed:
# (data, changed) = self.process(j, 'autoupdate', data, False)
# (data, changed) = self.process(j, 'checkver', data)
# if 'checkver' in j:
# if isinstance(j['checkver'], dict):
# (data, changed) = self.process(j['checkver'], 'github', data)
# if 'architecture' in j:
# (data, changed) = self.process(j['architecture'], '32bit', data, True, '', 'architecture')
# if changed:
# if 'autoupdate' in j:
# if 'architecture' in j['autoupdate']:
# (data, changed) = self.process(j['autoupdate']['architecture'], '32bit', data, False, '', 'autoupdate.architecture')
# (data, changed) = self.process(j['architecture'], '64bit', data, True, '', 'architecture')
# if changed:
# if 'autoupdate' in j:
# if 'architecture' in j['autoupdate']:
# (data, changed) = self.process(j['autoupdate']['architecture'], '32bit', data, False, '', 'autoupdate.architecture')
self.last = file
if data != orig_data:
print("Updating %s" % file)
os.rename(file, file + '.bak')
with io.open(file, 'w', encoding='utf-8') as f:
f.write(data)
checker = CheckURLs()
checker.run()
sys.exit(0)
| Python | 0.000003 | |
b395239526a4246193aa9ce0e541538e5690f408 | Return change | python/reddit/least_bills.py | python/reddit/least_bills.py | def bills_needed(money):
"""Determine optimal numbers of each bill denomination for amount.
Args:
money, int: Amount of money to figure bills for
Returns:
cash, dict: Count of each type of bill needed for sum
"""
denominations = [1, 2, 5, 10, 20, 50, 100]
cash = {}
balance = money
bill_count = 0
if money > 0:
for denomination in sorted(denominations, reverse=True):
bills = balance // denomination
if bills > 0:
cash[denomination] = bills
bill_count += bills
balance = balance % denomination
return bill_count, cash
def test_bills_needed():
tests = [
1,
2,
42,
51,
123,
222,
500,
]
for test in tests:
bill_count, cash = bills_needed(test)
print('Money: {}, Bills: {} - {}'.format(test, bill_count, cash))
def main():
test_bills_needed()
main()
| Python | 0.000015 | |
9bcb2566afa8191e24fb4f66b3fb882724ba4083 | Test ragged getitem | thinc/tests/test_indexing.py | thinc/tests/test_indexing.py | import pytest
import numpy
from numpy.testing import assert_allclose
from thinc.types import Ragged
@pytest.fixture
def ragged():
data = numpy.zeros((20, 4), dtype="f")
lengths = numpy.array([4, 2, 8, 1, 4], dtype="i")
data[0] = 0
data[1] = 1
data[2] = 2
data[3] = 3
data[4] = 4
data[5] = 5
return Ragged(data, lengths)
def test_ragged_starts_ends(ragged):
starts = ragged._get_starts()
ends = ragged._get_ends()
assert list(starts) == [0, 4, 6, 14, 15]
assert list(ends) == [4, 6, 14, 15, 19]
def test_ragged_simple_index(ragged, i=1):
r = ragged[i]
assert_allclose(r.data, ragged.data[4:6])
assert_allclose(r.lengths, ragged.lengths[i:i+1])
def test_ragged_slice_index(ragged, start=0, end=2):
r = ragged[start:end]
size = ragged.lengths[start:end].sum()
assert r.data.shape == (size, r.data.shape[1])
assert_allclose(r.lengths, ragged.lengths[start:end])
def test_ragged_array_index(ragged):
arr = numpy.array([2, 1, 4], dtype="i")
print(arr)
r = ragged[arr]
assert r.data.shape[0] == ragged.lengths[arr].sum()
| Python | 0 | |
f433cdb41f33a7b9daeaf276bf19d2617534e781 | Add Tensor Flow | python/src/fft/fourier_nd.py | python/src/fft/fourier_nd.py | import numpy as np
import pylab as plt
import pandas as pd
from numpy import fft
import src.mylib.mfile as mfile
def bandpass_filter(x, freq, frequency_of_signal=0, band=0.1):
if (frequency_of_signal - band) < abs(freq) < (frequency_of_signal + band):
return x
else:
return 0
d1 = mfile.loadClose('JPY=X', '../db/forex.db')
d2 = mfile.loadClose('GBP=X', '../db/forex.db')
ds = pd.concat([d1, d2], axis=1, join='inner')
x = ds.values
for i in range(1):
x = x[i:]
N = len(x)
spectrum = fft.fftn(x)
feq = fft.fftfreq(N) # frequencies
ampli = np.absolute(spectrum) # amplitude
phase = np.angle(spectrum) # phase
#print(phase)
index = np.argsort(-ampli, axis = 0)
sfreq = feq[index]
sampl = ampli[index]
#print(sampl[1:10])
#sfreq = np.where(sfreq > 0)
#big = list(zip(*sfreq))
print(sfreq[1:10] * N)
plt.plot(sfreq * N, 'o')
#F_filtered = np.asanyarray([bandpass_filter(x, freq) for x, freq in zip(spectrum, feq)])
#filtered_signal = np.fft.ifft(F_filtered)
#plt.semilogy(feq[1:], ampli[1:]), 'o') #zero feq is very large
#plt.semilogy(ampli[1:])
plt.legend()
plt.show()
| Python | 0.000019 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.