commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
175b36b0eb1e84378e350ddc31da3ef7fcae32c2 | Add test. | test/test.py | test/test.py | #!/usr/bin/env python
# Test PyCharlockHolmes
#
from charlockholmes import detect
TEST_FILES = {
"py": [
"file/test.py",
{'confidence': 34, 'type': 'text', 'language': 'en', 'encoding': 'ISO-8859-1'}
],
"txt": [
"file/test.txt",
{'confidence': 16, 'type': 'text', 'language': 'en', 'encoding': 'ISO-8859-1'}
],
"c": [
"file/test.c",
{'confidence': 50, 'type': 'text', 'language': 'en', 'encoding': 'ISO-8859-1'}
],
"sh": [
"file/test.sh",
{'confidence': 21, 'type': 'text', 'language': 'en', 'encoding': 'ISO-8859-1'}
],
"elf": [
"file/test",
{'confidence': 100, 'type': 'text'}
],
"bz2": [
"file/test.tar.bz2",
{'confidence': 100, 'type': 'text'}
],
"gz": [
"file/test.tar.gz",
{'confidence': 100, 'type': 'text'}
],
}
for test in TEST_FILES:
file_path = TEST_FILES[test][0]
file_result = TEST_FILES[test][1]
content = open(file_path).read()
test_result = detect(content)
if test_result == file_result:
print file_path + ": OK"
else:
print file_path + ": ERROR"
| Python | 0.000001 | |
829defd825d5e311ad187569ba61381ecb40dd08 | Add q1 2019 | 2019/q1.py | 2019/q1.py | """
BIO 2019 Q1: Palindromes
This ended up being surprisingly difficult, for whatever reason I found it surprisingly difficult
to reason about.
I found it easier to think about how, given a palindrome, I would calculate the following
palindrome. There are ~2 cases:
Odd number of digits: [left][middle][right = reversed(right)]
Even number of digits: [left][right = reversed(right)]
In the first case, we can (hopefully) obviously generate the next palindrome by adding one to the
middle digit, and carrying the one into the left hand side as if you were doing regular addition,
and then reflecting the new value to produce a new palindrome.
In the second case, we can basically do the same thing, but without the middle digit.
And then if we are still 'carrying' anything by the time we get to the end, this becomes a new
left-most digit, and the right most digit becomes the new middle digit.
"""
class Palindrome:
def __init__(self, left, middle):
assert middle is None or middle < 10 and middle >= 0
self.left = list(int(x) for x in str(left))
self.middle = middle
def add_one_left(self, carry):
for i in range(len(self.left)):
ix = -(i + 1)
if self.left[ix] == 9:
self.left[ix] = 0
carry = True
else:
self.left[ix] += 1
carry = False
break
if carry and self.middle is None:
self.middle = self.left[-1]
self.left = [1] + self.left[:-1]
elif carry and self.middle is not None:
self.left = [1] + self.left
self.middle = None
def next_palindrome(self):
if self.middle is not None:
if self.middle == 9:
self.middle = 0
self.add_one_left(carry = True)
else:
self.middle += 1
else:
self.add_one_left(carry = False)
def as_int(self):
if self.middle is None:
l = self.left + list(reversed(self.left))
else:
l = self.left + [self.middle] + list(reversed(self.left))
return int("".join(str(x) for x in l))
@staticmethod
def of_int(i):
s = str(i)
if len(s) % 2 == 0:
left = [int(x) for x in s[:len(s) //2]]
middle = None
else:
left = [int(x) for x in s[:len(s) //2]]
middle = int(s[len(left)])
return Palindrome("".join(str(x) for x in left), middle)
def __str__(self):
return str(self.as_int())
i = input()
in_int = int(i)
p = Palindrome.of_int(i)
p_int = p.as_int()
if p_int > in_int:
print(p_int)
else:
p.next_palindrome()
print(p)
| Python | 0.000002 | |
a5ff7dfacfb151297636bcdc1a4b45400bf27085 | Add script to extract features from TPNModel. | src/tpn/recurrent_extract_features.py | src/tpn/recurrent_extract_features.py | #!/usr/bin/env python
import os
import os.path as osp
import numpy as np
import tensorflow as tf
from model import TPNModel
import argparse
import glog as log
import glob
from data_io import tpn_test_iterator
import cPickle
def bbox_transform_inv(boxes, deltas):
if boxes.shape[0] == 0:
return np.zeros((0, deltas.shape[1]), dtype=deltas.dtype)
boxes = boxes.astype(deltas.dtype, copy=False)
widths = boxes[:, 2] - boxes[:, 0] + 1.0
heights = boxes[:, 3] - boxes[:, 1] + 1.0
ctr_x = boxes[:, 0] + 0.5 * widths
ctr_y = boxes[:, 1] + 0.5 * heights
dx = deltas[:, 0::4]
dy = deltas[:, 1::4]
dw = deltas[:, 2::4]
dh = deltas[:, 3::4]
pred_ctr_x = dx * widths[:, np.newaxis] + ctr_x[:, np.newaxis]
pred_ctr_y = dy * heights[:, np.newaxis] + ctr_y[:, np.newaxis]
pred_w = np.exp(dw) * widths[:, np.newaxis]
pred_h = np.exp(dh) * heights[:, np.newaxis]
pred_boxes = np.zeros(deltas.shape, dtype=deltas.dtype)
# x1
pred_boxes[:, 0::4] = pred_ctr_x - 0.5 * pred_w
# y1
pred_boxes[:, 1::4] = pred_ctr_y - 0.5 * pred_h
# x2
pred_boxes[:, 2::4] = pred_ctr_x + 0.5 * pred_w
# y2
pred_boxes[:, 3::4] = pred_ctr_y + 0.5 * pred_h
return pred_boxes
logging = tf.logging
def test_vid(session, m, vid_file, verbose=True):
assert m.batch_size == 1
tracks = tpn_test_iterator(vid_file)
# import pdb
# pdb.set_trace()
cum_acc_static = 0.
cum_acc_lstm = 0.
print vid_file
vid_res = []
for ind, track in enumerate(tracks, start=1):
# process track data
track_length = track['feature'].shape[0]
expend_feat = np.zeros((m.num_steps,) + track['feature'].shape[1:])
expend_feat[:track_length] = track['feature']
# extract features
state = session.run([m.initial_state])
cls_scores, bbox_deltas, end_probs, state = session.run(
[m.cls_scores, m.bbox_pred, m.end_probs, m.final_state],
{m.input_data: expend_feat[np.newaxis,:,:],
m.initial_state: state[0]})
# process outputs
cls_labels = track['class_label']
gt_len = cls_labels.shape[0]
bbox_pred = bbox_transform_inv(track['roi'], bbox_deltas[:gt_len,:])
cls_pred_lstm = np.argmax(cls_scores, axis=1)[:gt_len]
end_probs = end_probs[:gt_len]
# calculate accuracy comparison
cls_pred_static = np.argmax(track['scores'], axis=1)[:gt_len]
cum_acc_lstm += np.mean((cls_labels == cls_pred_lstm))
cum_acc_static += np.mean((cls_labels == cls_pred_static))
# save outputs
track_res = {}
for key in ['roi', 'frame', 'bbox', 'scores', 'anchor']:
track_res[key] = track[key]
track_res['scores_lstm'] = cls_scores[:gt_len,:]
track_res['end_lstm'] = end_probs
track_res['bbox_lstm'] = bbox_pred.reshape((gt_len, -1, 4))
vid_res.append(track_res)
cum_acc_lstm /= len(tracks)
cum_acc_static /= len(tracks)
print "Accuracy (Static): {:.03f} Accuracy (LSTM): {:.03f}".format(cum_acc_static, cum_acc_lstm)
return vid_res
class TestConfig(object):
"""Default config."""
init_scale = 0.01
learning_rate = 0.001
momentum = 0.9
max_grad_norm = 1.5
num_steps = 20
input_size = 1024
hidden_size = 1024
max_epoch = 5
iter_epoch = 2000
keep_prob = 1.0
lr_decay = 0.5
batch_size = 1
num_classes = 31
cls_weight = 1.0
bbox_weight = 0.0
ending_weight = 1.0
vid_per_batch = 4
cls_init = ''
bbox_init = ''
def main(args):
if not args.data_path:
raise ValueError("Must set --data_path to TPN data directory")
log.info("Processing data...")
# raw_data = tpn_raw_data(args.data_path)
# train_data, valid_data = raw_data
config = TestConfig()
config.num_layers = args.num_layers
#tf.set_random_seed(1017)
vids = glob.glob(osp.join(args.data_path, '*'))
with tf.Graph().as_default(), tf.Session() as session:
initializer = tf.random_uniform_initializer(-config.init_scale,
config.init_scale, seed=1017)
with tf.variable_scope("model", reuse=None, initializer=None):
m = TPNModel(is_training=False, config=config)
# restoring variables
saver = tf.train.Saver()
print "Retoring from {}".format(args.model_path)
saver.restore(session, args.model_path)
for vid_file in vids:
vid_name = osp.split(vid_file)[-1]
save_dir = osp.join(args.save_dir, vid_name)
if not osp.isdir(save_dir):
os.makedirs(save_dir)
outputs = test_vid(session, m, vid_file, verbose=True)
for track_id, track in enumerate(outputs):
with open(osp.join(save_dir, '{:06d}.pkl'.format(track_id)), 'wb') as f:
cPickle.dump(track, f, cPickle.HIGHEST_PROTOCOL)
if __name__ == '__main__':
parser = argparse.ArgumentParser('Extracting recurrent features.')
parser.add_argument('data_path',
help='Data path')
parser.add_argument('save_dir',
help='Result directory')
parser.add_argument('model_path', help='model_stored path')
parser.add_argument('num_layers', type=int,
help='Number of layers')
args = parser.parse_args()
main(args)
| Python | 0 | |
77cb3a711170d0f37a0d5d5cf9744ffc8f7242fd | Add test for comparing performance on virtual machines | henrste/test-vm.py | henrste/test-vm.py | #!/usr/bin/env python3
from framework.test_framework import Testbed, TestEnv, require_on_aqm_node
from framework.test_utils import *
import time
def test_compose():
udp_rate = 0
def branch_udp_rate(rate_list, title='UDP-rate: %d Mb/s'):
def branch(testdef):
nonlocal udp_rate
for rate in rate_list:
udp_rate = rate
yield {
'tag': 'udp-rate-%d' % rate,
'title': title % rate,
'titlelabel': 'UDP Rate [Mb/s]',
}
return branch
def branch_titles(titles):
def branch(testdef):
for tag, title in titles:
yield {
'tag': 'title-%s' % tag,
'title': title,
'titlelabel': '',
}
return branch
def branch_cc(testdef):
pass
def my_test(testcase):
for i in range(15):
testcase.run_greedy(node='a', tag='node-a')
testcase.run_greedy(node='b', tag='node-b')
if udp_rate > 0:
time.sleep(1)
testcase.run_udp(node='a', bitrate=udp_rate * MBIT, ect='nonect', tag='udp-rate')
testbed = Testbed()
testbed.ta_samples = 30
testbed.ta_idle = 5
testbed.ta_delay = 500
testbed.cc('a', 'cubic', testbed.ECN_ALLOW)
testbed.cc('b', 'dctcp-drop', testbed.ECN_INITIATE)
run_test(
folder='results/vm-test-1',
title='Testing VM',
subtitle='Using 15 flows of CUBIC, 15 flows of DCTCP (with ECN) and 1 flow UDP',
testenv=TestEnv(testbed, retest=False),
steps=(
branch_titles([
('dqa', 'dqa'),
('dqa1', 'dqa1'),
('dqa2', 'dqa2'),
('dqa3', 'dqa3'),
('dqa4', 'dqa4'),
('dqa5', 'dqa5'),
('x250', 'x250'),
]),
plot_swap(),
branch_sched([
('pi2',
'PI2: dualq target 15ms tupdate 15ms alpha 5 beta 50 sojourn k 2 t\\\\_shift 30ms l\\\\_drop 100',
lambda testbed: testbed.aqm_pi2(params='dualq target 15ms tupdate 15ms alpha 5 beta 50 sojourn k 2 t_shift 30ms l_drop 100')),
('pie', 'PIE', lambda testbed: testbed.aqm_pie('ecn target 15ms tupdate 15ms alpha 1 beta 10 ecndrop 25')),
#('pfifo', 'pfifo', lambda testbed: testbed.aqm_pfifo()),
]),
plot_swap(),
branch_rtt([10]),
plot_swap(),
branch_bitrate([100,250,500]),
plot_swap(),
branch_udp_rate([50]),
plot_swap(),
branch_runif([
#('config-3', lambda testenv: False, '8 GiB / 6 vCPU'),
#('config-6144-1', lambda testenv: False, '6 GiB / 1 vCPU'),
#('config-512-6', lambda testenv: False, '512 MiB / 6 vCPU'),
#('config-4', lambda testenv: False, '512 MiB / 1 vCPU'),
#('config-3072-2', lambda testenv: False, '3 GiB / 2 vCPU'),
('config-3072-2', lambda testenv: False, '-'),
#('config-1', lambda testenv: False, '2 GiB / 1 vCPU'),
#('config-2', lambda testenv: False, '1 GiB / 1 vCPU'),
]),
#branch_repeat(2),
branch_repeat(10),
my_test,
),
)
if __name__ == '__main__':
require_on_aqm_node()
test_compose()
| Python | 0 | |
5a1518bc2bd8b509bc5c00850ba1da59989147f8 | Add basic tests | test_main.py | test_main.py | #!/usr/bin/env python
import sys
from io import StringIO
from jproperties import Properties
def _test_deserialize(*data):
for s, items in data:
props = Properties()
props.load(StringIO(s))
assert list(props.items()) == items
def test_eq_separator():
_test_deserialize(
("a=b", [("a", "b")]),
("a= b", [("a", "b")]),
("a = b", [("a", "b")]),
("a =b", [("a", "b")]),
)
def test_colon_separator():
_test_deserialize(
("a:b", [("a", "b")]),
("a: b", [("a", "b")]),
("a : b", [("a", "b")]),
("a :b", [("a", "b")]),
)
def test_space_separator():
_test_deserialize(
("a b", [("a", "b")]),
("a b", [("a", "b")]),
("a b", [("a", "b")]),
)
def test_space_in_key():
_test_deserialize(
("key\ with\ spaces = b", [("key with spaces", "b")]),
("key\ with\ spaces b", [("key with spaces", "b")]),
("key\ with\ spaces : b", [("key with spaces", "b")]),
("key\ with\ spaces\ : b", [("key with spaces ", "b")]),
)
def main():
for name, f in globals().items():
if name.startswith("test_") and callable(f):
f()
if __name__ == "__main__":
main()
| Python | 0.000004 | |
4249c6456ca21ad6bbec0eccdf66aef629deb511 | Add basic tag testing script | test_tags.py | test_tags.py | import sys
import requests
from wikibugs import Wikibugs2
from channelfilter import ChannelFilter
import configfetcher
conf = configfetcher.ConfigFetcher()
w = Wikibugs2(conf)
c = ChannelFilter()
print("\n\n\n\n\n\n\n\n")
page = requests.get(sys.argv[1]).text
tags = w.get_tags(page)
for tag in tags:
print(tag, c.channels_for([tag]))
| Python | 0.000006 | |
377f44ea05d8fc550be5916a1ca6c085df8f8cdc | add mysql database backup script | backupmysql.py | backupmysql.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#Author: Andrew McDonald andrew@mcdee.com.au http://mcdee.com.au
# Example: config file
#[client]
#host = localhost
#user = root
#password = root-pass
from datetime import datetime
import sys, os, subprocess, tarfile
import zipfile, glob, logging
date_format = [
"%A %d.%m.%Y",
"%Y%m%d"
]
default_date_format = 1
def print_usage(script):
print 'Usage:', script, '--cnf <config file>', '--todir <directory>'
sys.exit(1)
def usage(args):
if not len(args) == 5:
print_usage(args[0])
else:
req_args = ['--cnf', '--todir']
for a in req_args:
if not a in req_args:
print_usage()
if not os.path.exists(args[args.index(a)+1]):
print 'Error: Path not found:', args[args.index(a)+1]
print_usage()
cnf = args[args.index('--cnf')+1]
dir = args[args.index('--todir')+1]
return cnf, dir
def mysql_dblist(cnf):
no_backup = ['Database', 'information_schema', 'performance_schema', 'test']
cmd = ['mysql', '--defaults-extra-file='+cnf, '-e', 'show databases']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode > 0:
print 'MySQL Error:'
print stderr
sys.exit(1)
dblist = stdout.strip().split('\n')
for item in no_backup:
try:
dblist.remove(item)
except ValueError:
continue
if len(dblist) == 1:
print "Doesn't appear to be any user databases found"
return dblist
def mysql_backup(dblist, dir, cnf):
for db in dblist:
bdate = datetime.now().strftime('%Y%m%d%H%M')
bfile = db+'_'+bdate+'.sql'
dumpfile = open(os.path.join(dir, bfile), 'w')
if db == 'mysql':
cmd = ['mysqldump', '--defaults-extra-file='+cnf, '--events', db]
else:
cmd = ['mysqldump', '--defaults-extra-file='+cnf, db]
p = subprocess.Popen(cmd, stdout=dumpfile)
retcode = p.wait()
dumpfile.close()
if retcode > 0:
print 'Error:', db, 'backup error'
backup_compress(dir, bfile)
def backup_compress(dir, bfile):
tar = tarfile.open(os.path.join(dir, bfile)+'.tar.gz', 'w:gz')
tar.add(os.path.join(dir, bfile), arcname=bfile)
tar.close()
os.remove(os.path.join(dir, bfile))
def main():
cnf, dir = usage(sys.argv)
dblist = mysql_dblist(cnf)
mysql_backup(dblist, dir, cnf)
if __name__ == '__main__':
main()
| Python | 0.000001 | |
07c5ed48d107c7ec88a990698647a70187d277a1 | Update cms_helper.py | cms_helper.py | cms_helper.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from tempfile import mkdtemp
gettext = lambda s: s
HELPER_SETTINGS = dict(
NOSE_ARGS=[
'-s',
],
ROOT_URLCONF='tests.test_utils.urls',
INSTALLED_APPS=[
'admin_enhancer',
'filer',
'parler',
'taggit',
'meta',
'meta_mixin',
'easy_thumbnails',
'djangocms_text_ckeditor',
'cmsplugin_filer_image',
'taggit_autosuggest',
],
LANGUAGE_CODE='en',
LANGUAGES=(
('en', gettext('English')),
('fr', gettext('French')),
('it', gettext('Italiano')),
),
CMS_LANGUAGES={
1: [
{
'code': 'en',
'name': gettext('English'),
'public': True,
},
{
'code': 'it',
'name': gettext('Italiano'),
'public': True,
},
{
'code': 'fr',
'name': gettext('French'),
'public': True,
},
],
2: [
{
'code': 'en',
'name': gettext('English'),
'public': True,
},
],
'default': {
'hide_untranslated': False,
},
},
PARLER_LANGUAGES={
1: (
{'code': 'en'},
{'code': 'it'},
{'code': 'fr'},
),
2: (
{'code': 'en'},
),
'default': {
'fallback': 'en',
'hide_untranslated': False,
}
},
MIGRATION_MODULES={
'cmsplugin_filer_image': 'cmsplugin_filer_image.migrations_django',
},
META_SITE_PROTOCOL='http',
META_SITE_DOMAIN='example.com',
META_USE_OG_PROPERTIES=True,
META_USE_TWITTER_PROPERTIES=True,
META_USE_GOOGLEPLUS_PROPERTIES=True,
THUMBNAIL_PROCESSORS=(
'easy_thumbnails.processors.colorspace',
'easy_thumbnails.processors.autocrop',
'filer.thumbnail_processors.scale_and_crop_with_subject_location',
'easy_thumbnails.processors.filters',
),
FILE_UPLOAD_TEMP_DIR=mkdtemp(),
SITE_ID=1
)
if 'test' in sys.argv or len(sys.argv) == 1:
HELPER_SETTINGS['INSTALLED_APPS'].append('django_nose')
def run():
from djangocms_helper import runner
if 'test' in sys.argv or len(sys.argv) == 1:
sys.argv.append('--nose-runner')
runner.cms('djangocms_blog')
if __name__ == "__main__":
run()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from tempfile import mkdtemp
gettext = lambda s: s
HELPER_SETTINGS = {
'NOSE_ARGS': [
'-s',
],
'ROOT_URLCONF': 'tests.test_utils.urls',
'INSTALLED_APPS': [
'admin_enhancer',
'filer',
'parler',
'taggit',
'meta',
'meta_mixin',
'easy_thumbnails',
'djangocms_text_ckeditor',
'cmsplugin_filer_image',
'taggit_autosuggest',
],
'LANGUAGE_CODE': 'en',
'LANGUAGES': (
('en', gettext('English')),
('fr', gettext('French')),
('it', gettext('Italiano')),
),
'CMS_LANGUAGES': {
1: [
{
'code': 'en',
'name': gettext('English'),
'public': True,
},
{
'code': 'it',
'name': gettext('Italiano'),
'public': True,
},
{
'code': 'fr',
'name': gettext('French'),
'public': True,
},
],
2: [
{
'code': 'en',
'name': gettext('English'),
'public': True,
},
],
'default': {
'hide_untranslated': False,
},
},
'PARLER_LANGUAGES': {
1: (
{'code': 'en'},
{'code': 'it'},
{'code': 'fr'},
),
2: (
{'code': 'en'},
),
'default': {
'fallback': 'en',
'hide_untranslated': False,
}
},
'MIGRATION_MODULES': {
'cmsplugin_filer_image': 'cmsplugin_filer_image.migrations_django',
},
'META_SITE_PROTOCOL': 'http',
'META_SITE_DOMAIN': 'example.com',
'META_USE_OG_PROPERTIES': True,
'META_USE_TWITTER_PROPERTIES': True,
'META_USE_GOOGLEPLUS_PROPERTIES': True,
'THUMBNAIL_PROCESSORS': (
'easy_thumbnails.processors.colorspace',
'easy_thumbnails.processors.autocrop',
'filer.thumbnail_processors.scale_and_crop_with_subject_location',
'easy_thumbnails.processors.filters',
),
'FILE_UPLOAD_TEMP_DIR': mkdtemp(),
'SITE_ID': 1
}
if 'test' in sys.argv:
HELPER_SETTINGS['INSTALLED_APPS'].append('django_nose')
def run():
from djangocms_helper import runner
sys.argv.append('--nose-runner')
runner.cms('djangocms_blog')
if __name__ == "__main__":
run() | Python | 0.000001 |
a3a2f645d3154334e8ae6af93fe56a3f2368c4c7 | Add multiprocessing pool example | multiprocessing_pool.py | multiprocessing_pool.py | from multiprocessing.pool import ThreadPool as Pool
from multiprocessing import Queue as PQueue
import Queue
my_dict = {
'url1': 'url2',
'url3': 'url4',
}
my_q = PQueue()
def test_p(uq):
q, url = uq[0], uq[1]
q.put(url, False)
def main():
global my_dict
global my_q
print "Going to process (%d)" % len(my_dict.keys() + my_dict.values())
p = Pool(processes=8)
print p.map(test_p, [(my_q, url) for url in my_dict.keys() + my_dict.values()])
its = []
while True:
# If we go more than 30 seconds without something, die
try:
print "Waiting for item from queue for up to 5 seconds"
i = my_q.get(True, 5)
print "found %s from the queue !!" % i
its.append(i)
except Queue.Empty:
print "Caught queue empty exception, done"
break
print "processed %d items, completion successful" % len(its)
p.close()
p.join()
if __name__ == '__main__':
main()
| Python | 0 | |
b117fbc82de4fb6acd8a044651c95e2425d9e71c | Create preprocess_MS_dataset_utils_test.py | preprocess_MS_dataset_utils_test.py | preprocess_MS_dataset_utils_test.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Testing for preprocess_MS_dataset_utils.py."""
import unittest
from unittest import TestCase
from preprocess_MS_dataset_utils import process_row
class PreprocessMSDatasetUtilsTest(TestCase):
def test_process_row_without_excluded_sample(self):
row = ["PlaceHolder ||| PlaceHolder ||| OriginalSentence ||| "
"Summary1 ||| 6 ||| 6 ||| 6 ||| Most important meaning Flawless language "
"||| Summary2 ||| 7 ||| 7 ||| 7 ||| Most important meaning Minor errors"]
output_original_sentence, output_shortened_sentences_list, \
output_shortened_ratings_list, count_excluded = process_row(row)
self.assertEqual(output_original_sentence, 'OriginalSentence')
self.assertEqual(output_shortened_sentences_list, ['Summary1', 'Summary2'])
self.assertEqual(output_shortened_ratings_list, [['6'], ['7']])
self.assertEqual(count_excluded, 0)
def test_process_row_with_excluded_sample(self):
row = ["PlaceHolder ||| PlaceHolder ||| OriginalSentence ||| "
"Summary1 ||| 7 ||| 7 ||| 7 ||| Most important meaning Minor errors "
"||| Summary2 ||| 9 ||| 9 ||| 9 ||| Most important meaning Disfluent or incomprehensible"]
output_original_sentence, output_shortened_sentences_list, \
output_shortened_ratings_list, count_excluded = process_row(row)
self.assertEqual(output_original_sentence, 'OriginalSentence')
self.assertEqual(output_shortened_sentences_list, ['Summary1'])
self.assertEqual(output_shortened_ratings_list, [['7']])
self.assertEqual(count_excluded, 1)
if __name__ == '__main__':
unittest.main()
| Python | 0.000004 | |
6bf43087967dee2bfb9f31a5de61c91ed0664586 | update get ids and columns in pecanstreet, much faster | proto/pylearn2/create_ev_dataset.py | proto/pylearn2/create_ev_dataset.py | import sys
import os.path
sys.path.append(os.path.join(os.pardir,os.pardir))
import disaggregator as da
import disaggregator.PecanStreetDatasetAdapter as psda
db_url = "postgresql://USERNAME:PASSWORD@db.wiki-energy.org:5432/postgres"
psda.set_url(db_url)
schema = 'shared'
table_names = psda.get_table_names(schema)
all_ids = []
all_columns = []
for table_name in table_names:
ids,columns = psda.get_table_dataids_and_column_names(schema,table_name)
all_ids.append(ids)
all_columns.append(columns)
print all_ids
print all_columns
| Python | 0.000001 | |
10524dd2c42ef499d36b3f64e31150885d45e51b | Add slot_usage command for checking cluster balance | streamparse/cli/slot_usage.py | streamparse/cli/slot_usage.py | """
Display slots used by every topology on the cluster
"""
from __future__ import absolute_import, print_function
from collections import Counter, defaultdict
from pkg_resources import parse_version
from prettytable import PrettyTable
from six import iteritems
from .common import add_environment
from ..util import get_ui_json, storm_lib_version
def subparser_hook(subparsers):
""" Hook to add subparser for this command. """
subparser = subparsers.add_parser('slot_usage',
description=__doc__,
help=main.__doc__)
subparser.set_defaults(func=main)
add_environment(subparser)
def display_slot_usage(env_name):
print('Querying Storm UI REST service for slot usage stats (this can take a while)...')
topology_summary = '/api/v1/topology/summary'
topology_detail = '/api/v1/topology/{topology}'
component = '/api/v1/topology/{topology}/component/{component}'
topo_summary_json = get_ui_json(env_name, topology_summary)
topology_ids = [x['id'] for x in topo_summary_json['topologies']]
# Keep track of the number of workers used by each topology on each machine
topology_worker_ports = defaultdict(lambda: defaultdict(set))
topology_executor_counts = defaultdict(Counter)
topology_names = set()
for topology in topology_ids:
topology_detail_json = get_ui_json(env_name,
topology_detail.format(topology=topology))
spouts = [x['spoutId'] for x in topology_detail_json['spouts']]
bolts = [x['boltId'] for x in topology_detail_json['bolts']]
for comp in spouts + bolts:
comp_detail = get_ui_json(env_name,
component.format(topology=topology,
component=comp))
for worker in comp_detail['executorStats']:
topology_worker_ports[worker['host']][topology_detail_json['name']].add(worker['port'])
topology_executor_counts[worker['host']][topology_detail_json['name']] += 1
topology_names.add(topology_detail_json['name'])
print("# Slot (and Executor) Counts by Topology")
topology_names = sorted(topology_names)
table = PrettyTable(["Host"] + topology_names)
table.align = 'l'
for host, host_dict in sorted(iteritems(topology_worker_ports)):
row = [host] + ['{} ({})'.format(len(host_dict.get(topology, set())),
topology_executor_counts[host][topology])
for topology in topology_names]
table.add_row(row)
print(table)
print()
def main(args):
""" Display uptime for Storm workers. """
storm_version = storm_lib_version()
if storm_version >= parse_version('0.9.2-incubating'):
display_slot_usage(args.environment)
else:
print("ERROR: Storm {0} does not support this command."
.format(storm_version))
| Python | 0 | |
b69cc15467456a070333ff00f886f27ca391b85b | Add script for appending entries to .gitignore. | webrtc/build/extra_gitignore.py | webrtc/build/extra_gitignore.py | #!/usr/bin/env python
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
""" Adds extra patterns to the root .gitignore file.
Reads the contents of the filename given as the first argument and appends
them to the root .gitignore file. The new entires are intended to be additional
ignoring patterns, or negating patterns to override existing entries (man
gitignore for more details).
"""
import os
import sys
MODIFY_STRING = '# The following added by %s\n'
def main(argv):
if not argv[1]:
# Special case; do nothing.
return 0
modify_string = (MODIFY_STRING % argv[0])
gitignore_file = os.path.dirname(argv[0]) + '/../.gitignore'
lines = open(gitignore_file, 'r').readlines()
for i, line in enumerate(lines):
if line == modify_string:
lines = lines[:i]
break
lines.append(modify_string)
f = open(gitignore_file, 'w')
f.write(''.join(lines))
f.write(open(argv[1], 'r').read())
f.close()
if __name__ == '__main__':
sys.exit(main(sys.argv))
| Python | 0.000005 | |
5e07a21cce64e1845832641b6de1951182d41ea0 | add back module changed mixin | core/mixins.py | core/mixins.py | """
core.mixins - Mixins available to use with models
"""
from django.db.models.signals import post_save
def on_changed(sender, **kwargs):
"""
Calls the `model_changed` method and then resets the state.
"""
instance = kwargs.get("instance")
is_new = kwargs.get("created")
dirty_fields = instance.get_dirty_fields()
instance.model_changed(instance.original_state, dirty_fields, is_new)
instance.original_state = instance.to_dict()
class ModelChangedMixin(object):
"""
Mixin for detecting changes to a model
"""
def __init__(self, *args, **kwargs):
super(ModelChangedMixin, self).__init__(*args, **kwargs)
self.original_state = self.to_dict()
identifier = "{0}_model_changed".format(self.__class__.__name__)
post_save.connect(
on_changed, sender=self.__class__, dispatch_uid=identifier)
def to_dict(self):
"""
Returns the model as a dict
"""
# Get all the field names that are not relations
keys = (f.name for f in self._meta.local_fields if not f.rel)
return {field: getattr(self, field) for field in keys}
def get_dirty_fields(self):
"""
Returns the fields dirty on the model
"""
dirty_fields = {}
current_state = self.to_dict()
for key, value in current_state.items():
if self.original_state[key] != value:
dirty_fields[key] = value
return dirty_fields
def is_dirty(self):
"""
Return whether the model is dirty
An unsaved model is dirty when it has no primary key
or has at least one dirty field.
"""
if not self.pk:
return True
return {} != self.get_dirty_fields()
def model_changed(self, old_fields, new_fields, is_new):
"""
Post-hook for all fields that have been changed.
"""
raise NotImplementedError("Missing method `model_changed`")
| Python | 0 | |
87413a50fa61761f8e669eda641635a0ab7bede3 | Create migration for message | API/chat/migrations/0005_auto_20160511_1921.py | API/chat/migrations/0005_auto_20160511_1921.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('chat', '0004_auto_20150905_1700'),
]
operations = [
migrations.RenameField(
model_name='message',
old_name='text',
new_name='message_content',
),
migrations.AddField(
model_name='message',
name='message_type',
field=models.CharField(default=b'text', max_length=10, choices=[(b'text', b'text'), (b'image', b'image')]),
),
]
| Python | 0 | |
b38527cccf970e069f55c531a4490cdb6eb7042b | Add a widget. | python/pyqt/pyqt5/hello_as_class.py | python/pyqt/pyqt5/hello_as_class.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
class Window(QMainWindow):
def __init__(self):
super().__init__()
self.resize(250, 150)
self.setWindowTitle('Hello')
self.show()
app = QApplication(sys.argv)
window = Window()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
| Python | 0 | |
b41b2edde5ac7c786b5ce23adec116fe8311d5d7 | Add tests for createaccount command | tests/test_account_service_account.py | tests/test_account_service_account.py | from unittest.mock import ANY, Mock
import requests
from django.core.management import call_command
from saleor.account.models import ServiceAccount
from saleor.core.permissions import get_permissions
def test_createaccount_command_creates_service_account():
name = "SA name"
permissions = ["account.manage_users", "order.manage_orders"]
call_command("createserviceaccount", name, permission=permissions)
sa_accounts = ServiceAccount.objects.filter(name=name)
assert len(sa_accounts) == 1
sa_account = sa_accounts[0]
tokens = sa_account.tokens.all()
assert len(tokens) == 1
def test_createaccount_command_service_account_has_all_required_permissions():
name = "SA name"
permission_list = ["account.manage_users", "order.manage_orders"]
expected_permission = get_permissions(permission_list)
call_command("createserviceaccount", name, permission=permission_list)
sa_accounts = ServiceAccount.objects.filter(name=name)
assert len(sa_accounts) == 1
sa_account = sa_accounts[0]
assert set(sa_account.permissions.all()) == set(expected_permission)
def test_createaccount_command_sends_data_to_target_url(monkeypatch):
mocked_response = Mock()
mocked_response.status_code = 200
mocked_post = Mock(return_value=mocked_response)
monkeypatch.setattr(requests, "post", mocked_post)
name = "SA name"
target_url = "https://ss.shop.com/register"
permissions = [
"account.manage_users",
]
call_command(
"createserviceaccount", name, permission=permissions, target_url=target_url
)
service_account = ServiceAccount.objects.filter(name=name)[0]
token = service_account.tokens.all()[0].auth_token
mocked_post.assert_called_once_with(
target_url,
headers={"x-saleor-domain": "mirumee.com"},
json={
"auth_token": token,
"name": "SA name",
"permissions": ["account.manage_users"],
},
timeout=ANY,
)
| Python | 0.000001 | |
1390de93f8f9703416dc465fc546a8883e96bada | add a header generator | EMControllerManagerHeaderGenerator.py | EMControllerManagerHeaderGenerator.py | #!/usr/bin/env python
#coding:utf8
import getopt
import json
import sys
def generate_definition(input_file, output_path, prefix):
with open(input_file, 'r') as json_file:
json_string = json_file.read()
config_dict = json.loads(json_string)
if not isinstance(config_dict,dict):
sys.stderr.write('configuration file is not failed')
exit(-1)
with open(output_path, 'w') as o:
o.write('/* Generated by EMControllerManagerHeaderGenerator, do not edit it manually. */\n\n\n')
for controller_name in config_dict:
if prefix is None:
def_name = controller_name
else:
def_name = "%s_%s" % (prefix, controller_name)
o.write('#define %s @"%s"\n' % (def_name, controller_name))
def main():
try:
options, args = getopt.getopt(sys.argv[1:],'i:o:p:')
except Exception, e:
print str(e)
raise e
input_file = None
output_path = None
prefix = None
for o, a in options:
if o == '-i':
input_file = a
elif o == '-o':
output_path = a
elif o == '-p':
prefix = a
if input_file is None or output_path is None:
print "input error"
exit(-1)
generate_definition (input_file, output_path, prefix)
if __name__ == '__main__':
main()
| Python | 0 | |
10b8043463b6bcc89d4ce559548fa113f3d26190 | drop tables no longer needed by application | gem/migrations/0044_remove_deprecated_tables.py | gem/migrations/0044_remove_deprecated_tables.py | # Generated by Django 2.2.15 on 2020-08-14 11:23
from django.db import migrations
TABLES = [
'surveys_articletagrule',
'surveys_combinationrule',
'surveys_groupmembershiprule',
'surveys_molosurveyformfield',
'surveys_molosurveypage',
'surveys_molosurveypage_translated_pages',
'surveys_molosurveypageview',
'surveys_molosurveysubmission',
'surveys_personalisablesurvey',
'surveys_personalisablesurveyformfield',
'surveys_segmentusergroup',
'surveys_segmentusergroup_users',
'surveys_surveyresponserule',
'surveys_surveysindexpage',
'surveys_surveysubmissiondatarule',
'surveys_surveytermsconditions',
'surveys_termsandconditionsindexpage',
'surveys_termsandconditionsindexpage_translated_pages',
'yourwords_termsandconditions',
'yourwords_thankyou',
'yourwords_yourwordscompetition',
'yourwords_yourwordscompetition_translated_pages',
'yourwords_yourwordscompetitionentry',
'yourwords_yourwordscompetitionindexpage',
'polls_choice',
'polls_choice_choice_votes',
'polls_choice_translated_pages',
'polls_choicevote',
'polls_choicevote_choice',
'polls_freetextquestion',
'polls_freetextvote',
'polls_pollsindexpage',
'polls_question',
'polls_question_translated_pages',
]
def remove_tables(apps, schema_editor):
migrations.RunSQL('DROP TABLE IF EXISTS {} CASCADE;'.format(','.join(TABLES)))
class Migration(migrations.Migration):
dependencies = [
('gem', '0043_invite_site'),
]
operations = [
migrations.RunPython(remove_tables)
]
| Python | 0 | |
9a678f5e856a5fcba82a1a9017dfbc841a660686 | Create ompotdar.py | Python/ompotdar.py | Python/ompotdar.py | print("Hello World!")
| Python | 0.000002 | |
0f23004da949b974a071a788ff084c2cb685b95d | use a similar `repair_wheel.py` script as cmake | scripts/repair_wheel.py | scripts/repair_wheel.py | import argparse
import shutil
import subprocess
import sys
import tempfile
from pathlib import Path
from convert_to_generic_platform_wheel import convert_to_generic_platform_wheel
def main():
if sys.platform.startswith("linux"):
os_ = "linux"
elif sys.platform == "darwin":
os_ = "macos"
elif sys.platform == "win32":
os_ = "windows"
else:
raise NotImplementedError(f"sys.platform '{sys.platform}' is not supported yet.")
p = argparse.ArgumentParser(description="Convert wheel to be independent of python implementation and ABI")
p.set_defaults(prog=Path(sys.argv[0]).name)
p.add_argument("WHEEL_FILE", help="Path to wheel file.")
p.add_argument(
"-w",
"--wheel-dir",
dest="WHEEL_DIR",
help=('Directory to store delocated wheels (default: "wheelhouse/")'),
default="wheelhouse/",
)
args = p.parse_args()
file = Path(args.WHEEL_FILE).resolve(strict=True)
wheelhouse = Path(args.WHEEL_DIR).resolve()
wheelhouse.mkdir(parents=True, exist_ok=True)
with tempfile.TemporaryDirectory() as tmpdir_:
tmpdir = Path(tmpdir_)
# use the platform specific repair tool first
if os_ == "linux":
subprocess.run(["auditwheel", "repair", "-w", str(tmpdir), str(file)], check=True, stdout=subprocess.PIPE)
elif os_ == "macos":
subprocess.run(
["delocate-wheel", "--require-archs", "x86_64", "-w", str(tmpdir), str(file)],
check=True,
stdout=subprocess.PIPE,
)
elif os_ == "windows":
# no specific tool, just copy
shutil.copyfile(file, tmpdir / file.name)
files = list(tmpdir.glob("*.whl"))
assert len(files) == 1, files
file = files[0]
# make this a py2.py3 wheel
convert_to_generic_platform_wheel(
str(file),
out_dir=str(wheelhouse),
py2_py3=True,
)
if __name__ == "__main__":
main()
| Python | 0.000002 | |
cafb83befb2cee459d44a1332e5fc7e57edf81a6 | Add script to update cvsanaly databases | updateGit.py | updateGit.py | from jiradb import *
if __name__ == "__main__":
log.setLevel(logging.DEBUG)
# Add console log handler
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch.setFormatter(logging.Formatter('%(message)s'))
log.addHandler(ch)
# Add file log handler
fh = logging.FileHandler('updateGit.log')
fh.setLevel(logging.DEBUG)
fh.setFormatter(logging.Formatter('[%(levelname)s @ %(asctime)s]: %(message)s'))
log.addHandler(fh)
# Add error file log handler
efh = logging.FileHandler('updateGitErrors.log')
efh.setLevel(logging.ERROR)
efh.setFormatter(logging.Formatter('[%(levelname)s @ %(asctime)s]: %(message)s'))
log.addHandler(efh)
args = getArguments()
jiradb = JIRADB(**args)
projectList = args['projects']
for project in projectList:
jiradb.getGitDB(project).update()
| Python | 0 | |
23c65cc59f1cdf595090a7f25e80c03828aaba68 | add `examples/references` | src/openbandparams/examples/references.py | src/openbandparams/examples/references.py | #
# Copyright (c) 2013-2015, Scott J Maddox
#
# This file is part of openbandparams.
#
# openbandparams is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# openbandparams is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with openbandparams. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
# Make sure we import the local openbandparams version
import os
import sys
sys.path.insert(0,
os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))
from openbandparams import *
# Print all references used to calculate a parameter
parameter = InAsSb.Eg
for ref in parameter.get_references():
print ref | Python | 0.000001 | |
574659044cb501a2ac61006ddc1c389622172207 | add script to calculate cv from energy intervals | compute_cv.py | compute_cv.py | import argparse
import numpy as np
def compute_Z(energies, T, K):
beta = 1./T
N = len(energies)
Z = 0.
U = 0.
U2 = 0.
Cv = 0.
Emin = energies[-1]
Ediff = energies - Emin
for n in xrange(1, len(energies)-2):
# Z += (np.exp(-float(n-1) / K) - np.exp(-float(n+1) / K)) * np.exp(-beta * energies[n])
E = Ediff[n]
Zpref = np.exp(-float(n-1) / K - beta * E) * (1. - np.exp(-2. / K))
Z += Zpref
U += Zpref * (E + Emin)
U2 += Zpref * (E + Emin)**2
U /= Z
U2 /= Z
Cv = (U2 - U**2) * beta**2
return Z, Cv, U, U2
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="load energy intervals and compute cv")
# parser.add_argument("--db", type=str, nargs=1, help="database filename",
# default="otp.db")
parser.add_argument("K", type=int, help="number of replicas")
parser.add_argument("fname", type=str, help="filenames with energies")
args = parser.parse_args()
print args.fname
energies = np.genfromtxt(args.fname)
Tmin = .02
Tmax = .5
nT = 100
dT = (Tmax-Tmin) / nT
T = np.array([Tmin + dT*i for i in range(nT)])
Z, Cv, U, U2 = compute_Z(energies, T, args.K)
print Z, Cv
with open("cv", "w") as fout:
fout.write("#T Cv <E> <E**2>\n")
for vals in zip(T, Cv, U, U2):
fout.write("%g %g %g %g\n" % vals)
| Python | 0 | |
df6aa6962dd9e265786e7337de69964f2fadfb1d | Create professor.py | djspace/registration/form_models/professor.py | djspace/registration/form_models/professor.py | # -*- coding: utf-8 -*-
from django.conf import settings
from django.db import models, connection
from django.contrib.auth.models import User
from djspace.registration.base_models import *
from djtools.fields import BINARY_CHOICES, SALUTATION_TITLES, STATE_CHOICES
from djtools.fields import GENDER_CHOICES
EMPLOYER = (
(':\(','OH NO, WE NEED TO FILL THESE OUT')
)
INTEREST = (
('aeronauticalaerospace','Aeronautical/Aerospace'),
('agricultural','Agricultural'),
('anthropology','Anthropology'),
('architectural','Architectural'),
('architecture','Architecture'),
('art','Art'),
('astronomy','Astronomy'),
('astrophysics','Astrophysics'),
('atmosphericscience','Atmospheric Science'),
('biochemistry','Biochemistry'),
('bioengineering','Bioengineering'),
('biology','Biology'),
('botany','Botany'),
('chemical','Chemistry'),
('civil','Civil'),
('climatologymeteorology','Climatology/Meteorology'),
('computer','Computer'),
('computerscience','Computer Science'),
('earthsciences','Earth Sciences'),
('economics','Economics'),
('educationelementaryschool','Elementary Education School'),
('educationhighschool','Education High School'),
('educationk12','Education K12'),
('educationk12administration','Education K12 Administration'),
('educationmiddleschool','Education Middle School'),
('electricalelectronic','Electrical/Electronic'),
('engineering','Engineering'),
('engineeringmechanics','Engineering Mechanics'),
('engineeringscience','Engineering Science'),
('environmental','Environmental'),
('environmentalscience','Environmental Science'),
('environmentalscienceandglobalclimatechange','Environmental and Global Climate Change'),
('generalpublic','General Public'),
('geography','Geography'),
('geology','Geology'),
('geophysics','Geophysics'),
('healthsciencenutrition','Health Science/Nutrition'),
('history','History'),
('industrial','Industrial'),
('lifesciences','Life Sciences'),
('materialsscienceengineering','Materials Science'),
('mathematics','Mathematics'),
('mechanical','Mechanical'),
('medicinemedicalresearch','Medicine/Medical Research'),
('miningpetroleumnaturalgas','Mining/Petroleum and Natural Gas'),
('molecularcellbiology','Molecular/Cell Biology'),
('nuclear','Nuclear'),
('oceanography','Oceanography'),
('other','Other'),
('philosophy','Philosophy'),
('physicalscience','Physical Science'),
('physics','Physics'),
('planetarygeosciences','Planetary GeoSciences'),
('planetarysciences','Planetary Sciences'),
('politicalsciencepublicpolicy','Political Science/Public Policy'),
('psychology','Psychology'),
('socialsciences','Social Sciences'),
('sociology','Sociology'),
('zoology','Zoology')
)
RACE = (
('americanindianalaskanative','American Indian/Alaska Native'),
('asian','Asian'),
('blackafricanamerican','Black/African American'),
('caucasian','Caucasian'),
('hispanic','Hispanic'),
('nativehawaiianotherpacificislander','Native Hawaiian/Other Pacific Islander'),
('otherrace','Other race')
)
class ProfessorInformation(BasePersonalInformation,BaseEmployerInformation):
first = models.CharField(
"First name",
max_length=20
)
middle = models.CharField(
"Middle name",
max_length=20
)
last = models.CharField(
"Last name",
max_length=20
)
citizen = models.BooleanField(
"US Citizen"
)
rocket_comp = models.BooleanField(
"Tribal or AISES Rocket Competition"
)
maiden = models.CharField(
"Maiden name",
max_length=20
)
additional = models.CharField(
"Additional name",
max_length=20
)
title_department = models.CharField(
"Title or Department",
max_length=20
)
webpage = models.CharField(
"Web page",
max_length=20
)
street = models.CharField(
"Street",
max_length=20
)
city = models.CharField(
"City",
max_length=20
)
state = models.CharField(
"State",
max_length=2,
choices=STATE_CHOICES
)
ZIP = models.CharField(
"Zip code",
max_length=9
)
phone = models.CharField(
"Phone number",
max_length=16
)
primary = models.CharField(
"Primary interest",
max_length=35,
choices=INTEREST
)
primary_other = models.CharField(
"Other",
max_length=35
)
secondary = models.CharField(
"Secondary interest",
max_length=35,
choices=INTEREST
)
secondary_other = models.CharField(
"Other",
max_length=35
)
birthdate = models.DateField(
"Birthdate",
auto_now=False
)
gender = models.CharField(
"Gender",
max_length=8,
choices=GENDER_CHOICES
)
disability = models.BooleanField(
"Disability"
)
race = models.CharField(
"Race",
max_length=25,
choices=RACE
)
tribe = models.CharField(
"Tribe",
max_length=20
)
| Python | 0.000001 | |
998acbd4b490ef3807d79c245c27700d3e44d5da | Add a dummy pavement file. | tools/win32/build_scripts/pavement.py | tools/win32/build_scripts/pavement.py | options(
setup=Bunch(
name = "scipy-superpack",
)
)
@task
def setup():
print "Setting up package %s" % options.name
| Python | 0.999859 | |
4f6b1a4dae7701cc79a523e96fe812efaa54745b | Add optimizers tests | tests/auto/test_optimizers.py | tests/auto/test_optimizers.py | from __future__ import print_function
import numpy as np
np.random.seed(1337)
from keras.utils.test_utils import get_test_data
from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.utils.np_utils import to_categorical
import unittest
(X_train, y_train), (X_test, y_test) = get_test_data(nb_train=1000, nb_test=200, input_shape=(10,),
classification=True, nb_class=2)
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
def get_model(input_dim, nb_hidden, output_dim):
model = Sequential()
model.add(Dense(input_dim, nb_hidden))
model.add(Activation('relu'))
model.add(Dense(nb_hidden, output_dim))
model.add(Activation('softmax'))
return model
def test_optimizer(optimizer, target=0.9):
model = get_model(X_train.shape[1], 10, y_train.shape[1])
model.compile(loss='categorical_crossentropy', optimizer=optimizer)
history = model.fit(X_train, y_train, nb_epoch=12, batch_size=16, validation_data=(X_test, y_test), show_accuracy=True, verbose=2)
return history.history['val_acc'][-1] > target
class TestOptimizers(unittest.TestCase):
def test_sgd(self):
print('test SGD')
sgd = SGD(lr=0.01, momentum=0.9, nesterov=True)
self.assertTrue(test_optimizer(sgd))
def test_rmsprop(self):
print('test RMSprop')
self.assertTrue(test_optimizer(RMSprop()))
def test_adagrad(self):
print('test Adagrad')
self.assertTrue(test_optimizer(Adagrad()))
def test_adadelta(self):
print('test Adadelta')
self.assertTrue(test_optimizer(Adadelta()))
def test_adam(self):
print('test Adam')
self.assertTrue(test_optimizer(Adam()))
if __name__ == '__main__':
print('Test optimizers')
unittest.main()
| Python | 0 | |
751b596482cdb473b1a7f9172501e25d00f15724 | Use default loop on TCP benchmark | tests/benchmark-tcp.py | tests/benchmark-tcp.py |
import sys
sys.path.insert(0, '../')
import signal
import threading
import pyuv
RESPONSE = "HTTP/1.1 200 OK\r\n" \
"Content-Type: text/plain\r\n" \
"Content-Length: 12\r\n" \
"\r\n" \
"hello world\n"
def on_client_shutdown(client):
client.close()
clients.remove(client)
def on_read(client, data):
if data is None:
client.close()
clients.remove(client)
return
data = data.strip()
if not data:
return
client.write(RESPONSE)
client.shutdown(on_client_shutdown)
def on_connection(server):
client = server.accept()
clients.append(client)
client.start_read(on_read)
def async_exit(async, data):
[c.close() for c in clients]
async.close()
signal_h.close()
server.close()
def signal_cb(sig, frame):
async.send(async_exit)
print "PyUV version %s" % pyuv.__version__
loop = pyuv.Loop.default_loop()
async = pyuv.Async(loop)
clients = []
server = pyuv.TCP(loop)
server.bind(("0.0.0.0", 1234))
server.listen(on_connection)
signal_h = pyuv.Signal(loop)
signal_h.start()
t = threading.Thread(target=loop.run)
t.start()
signal.signal(signal.SIGINT, signal_cb)
signal.pause()
t.join()
print "Stopped!"
|
import sys
sys.path.insert(0, '../')
import signal
import threading
import pyuv
RESPONSE = "HTTP/1.1 200 OK\r\n" \
"Content-Type: text/plain\r\n" \
"Content-Length: 12\r\n" \
"\r\n" \
"hello world\n"
def on_client_shutdown(client):
client.close()
clients.remove(client)
def on_read(client, data):
if data is None:
client.close()
clients.remove(client)
return
data = data.strip()
if not data:
return
client.write(RESPONSE)
client.shutdown(on_client_shutdown)
def on_connection(server):
client = server.accept()
clients.append(client)
client.start_read(on_read)
def async_exit(async, data):
[c.close() for c in clients]
async.close()
signal_h.close()
server.close()
def signal_cb(sig, frame):
async.send(async_exit)
print "PyUV version %s" % pyuv.__version__
loop = pyuv.Loop()
async = pyuv.Async(loop)
clients = []
server = pyuv.TCP(loop)
server.bind(("0.0.0.0", 1234))
server.listen(on_connection)
signal_h = pyuv.Signal(loop)
signal_h.start()
t = threading.Thread(target=loop.run)
t.start()
signal.signal(signal.SIGINT, signal_cb)
signal.pause()
t.join()
print "Stopped!"
| Python | 0 |
fb29bf9d1fdc1dc3cebb3d6034cb177479bef8c5 | Add some tests for virtool.db.iface.Collection | tests/db/test_iface.py | tests/db/test_iface.py | import pymongo.results
import pytest
from aiohttp.test_utils import make_mocked_coro
import virtool.db.iface
import virtool.utils
@pytest.fixture
async def create_test_collection(mocker, test_motor):
def func(name="samples", projection=None, silent=False) -> virtool.db.iface.Collection:
processor = mocker.Mock(side_effect=virtool.utils.base_processor)
return virtool.db.iface.Collection(
name,
test_motor[name],
make_mocked_coro(),
processor,
projection,
silent
)
return func
class TestCollection:
@pytest.mark.parametrize("projection", [None, ["name"]], ids=["projection", "no projection"])
def test_apply_projection(self, projection, create_test_collection):
"""
Test that :meth:`Collection.apply_projection` returns a projected version of the passed document when
:attr:`Collection.projection` is defined and returns the document untouched when no projection is defined.
"""
collection = create_test_collection(projection=projection)
document = {
"_id": "foo",
"name": "Foo",
"tags": [
"bar",
"baz"
]
}
projected = collection.apply_projection(document)
if projection:
assert projected == {
"_id": "foo",
"name": "Foo"
}
return
assert projected == document
@pytest.mark.parametrize("condition", [None, "param_silent", "attr_silent"])
async def test_dispatch_conditionally(self, condition, create_test_collection):
"""
Test that `dispatch_conditionally` dispatches a message when not suppressed by the `silent` parameter or
:attr:`Collection.silent`.
"""
collection = create_test_collection(silent=(condition == "attr_silent"))
document = {
"_id": "foo",
"name": "Foo",
"tags": [
"bar",
"baz"
]
}
await collection.dispatch_conditionally(document, "update", silent=(condition == "param_silent"))
if condition is None:
collection.dispatch.assert_called_with("samples", "update", {
"id": "foo",
"name": "Foo",
"tags": [
"bar",
"baz"
]
})
collection.processor.assert_called_with(document)
return
assert collection.dispatch.called is False
@pytest.mark.parametrize("attr_silent", [True, False])
@pytest.mark.parametrize("param_silent", [True, False])
async def test_delete_many(self, attr_silent, param_silent, test_motor, create_test_collection):
collection = create_test_collection(silent=attr_silent)
await test_motor.samples.insert_many([
{"_id": "foo", "tag": 1},
{"_id": "bar", "tag": 2},
{"_id": "baz", "tag": 1}
])
delete_result = await collection.delete_many({"tag": 1}, silent=param_silent)
assert isinstance(delete_result, pymongo.results.DeleteResult)
assert delete_result.deleted_count == 2
if not (attr_silent or param_silent):
collection.dispatch.assert_called_with("samples", "delete", ["foo", "baz"])
assert await test_motor.samples.find().to_list(None) == [
{"_id": "bar", "tag": 2}
]
@pytest.mark.parametrize("attr_silent", [True, False])
@pytest.mark.parametrize("param_silent", [True, False])
async def test_delete_one(self, attr_silent, param_silent, test_motor, create_test_collection):
collection = create_test_collection(silent=attr_silent)
await test_motor.samples.insert_many([
{"_id": "foo", "tag": 1},
{"_id": "bar", "tag": 2},
{"_id": "baz", "tag": 1}
])
delete_result = await collection.delete_one({"tag": 1}, silent=param_silent)
assert isinstance(delete_result, pymongo.results.DeleteResult)
assert delete_result.deleted_count == 1
if not (attr_silent or param_silent):
collection.dispatch.assert_called_with("samples", "delete", ["foo"])
assert await test_motor.samples.find().to_list(None) == [
{"_id": "bar", "tag": 2},
{"_id": "baz", "tag": 1}
]
| Python | 0 | |
53ca4755b2bb9dbe3bc0bcdc15e9b4d07a13952f | Add tests for disklabel type selection and partition weight. | tests/platform_test.py | tests/platform_test.py | from collections import namedtuple
import unittest
from unittest import mock
from blivet import platform
Weighted = namedtuple("Weighted", ["fstype", "mountpoint", "cls", "weight"])
weighted = [Weighted(fstype=None, mountpoint="/", cls=platform.Platform, weight=0),
Weighted(fstype=None, mountpoint="/boot", cls=platform.Platform, weight=2000),
Weighted(fstype="biosboot", mountpoint=None, cls=platform.X86, weight=5000),
Weighted(fstype="efi", mountpoint="/boot/efi", cls=platform.EFI, weight=5000),
Weighted(fstype="efi", mountpoint="/boot/efi", cls=platform.MacEFI, weight=5000),
Weighted(fstype="efi", mountpoint="/boot/efi", cls=platform.Aarch64EFI, weight=5000),
Weighted(fstype="prepboot", mountpoint=None, cls=platform.IPSeriesPPC, weight=5000),
Weighted(fstype="appleboot", mountpoint=None, cls=platform.NewWorldPPC, weight=5000),
Weighted(fstype="vfat", mountpoint="/boot/uboot", cls=platform.omapARM, weight=6000),
Weighted(fstype=None, mountpoint="/", cls=platform.ARM, weight=-100),
Weighted(fstype=None, mountpoint="/", cls=platform.omapARM, weight=-100)]
class PlatformTestCase(unittest.TestCase):
def test_default_disklabel_type(self):
for name in dir(platform):
cls = getattr(platform, name)
try:
if not issubclass(cls, platform.Platform):
# not a platform class instance
continue
except TypeError:
# not a class
continue
if not cls._disklabel_types:
continue
obj = cls()
type_one = obj.__class__._disklabel_types[0]
self.assertEqual(obj.default_disklabel_type, type_one)
if len(obj._disklabel_types) > 1:
new_default = obj.__class__._disklabel_types[-1]
obj.set_default_disklabel_type(new_default)
self.assertEqual(obj.default_disklabel_type, new_default)
def test_get_best_disklabel_type(self):
def fresh_disk(device, ty): # pylint: disable=unused-argument
""" Return fake parted.Disk w/ maxPartitionStartSector values suitable for testing. """
max_start = 1001
if ty == "gpt":
max_start = 5001
return mock.Mock(maxPartitionStartSector=max_start)
for name in dir(platform):
cls = getattr(platform, name)
try:
if not issubclass(cls, platform.Platform):
# not a platform class instance
continue
except TypeError:
# not a class
continue
if not cls._disklabel_types:
continue
obj = cls()
"""
1. is always in _disklabel_types
2. is the default unless the device is too long for the default
3. is msdos for fba dasd on S390
4. is dasd for non-fba dasd on S390
"""
length = 1000
blivetdev = mock.Mock()
blivetdev.name = "testdev1"
parteddev = mock.Mock()
parteddev.length = length
with mock.patch("blivet.platform.parted") as _parted:
_parted.freshDisk.return_value = mock.Mock(maxPartitionStartSector=length + 1)
_parted.Device.return_value = parteddev
with mock.patch("blivet.platform.blockdev.s390") as _s390:
if name == "S390":
_s390.dasd_is_fba.return_value = False
parteddev.type = platform.parted.DEVICE_DASD
self.assertEqual(obj.best_disklabel_type(blivetdev), "dasd")
_s390.dasd_is_fba.return_value = True
self.assertEqual(obj.best_disklabel_type(blivetdev), "msdos")
_s390.dasd_is_fba.return_value = False
parteddev.type = platform.parted.DEVICE_SCSI
best_label_type = obj.best_disklabel_type(blivetdev)
self.assertEqual(best_label_type, obj.default_disklabel_type)
if cls._disklabel_types != ["msdos", "gpt"]:
continue
# Now just make sure that we suggest gpt for devices longer than the msdos
# disklabel maximum.
_parted.freshDisk.return_value = mock.Mock()
_parted.freshDisk.side_effect = fresh_disk
parteddev.length = 4000
best_label_type = obj.best_disklabel_type(blivetdev)
self.assertEqual(obj.default_disklabel_type, "msdos")
self.assertEqual(best_label_type, "gpt")
def test_partition_weight(self):
for spec in weighted:
pl = spec.cls()
with self.subTest(spec=spec):
self.assertEqual(pl.weight(fstype=spec.fstype, mountpoint=spec.mountpoint), spec.weight)
| Python | 0 | |
6dbd81fb4b59e7394318cbd0b0f0fdb31fcd6dd2 | Add unit test to ensure we don't diff bare repos | tests/unit/states/test_git.py | tests/unit/states/test_git.py | # -*- coding: utf-8 -*-
'''
:codeauthor: Erik Johnson <erik@saltstack.com>
'''
# Import Python libs
from __future__ import absolute_import
import logging
import os
# Import Salt Testing Libs
from tests.support.helpers import with_tempdir
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
Mock,
MagicMock,
patch,
DEFAULT,
NO_MOCK,
NO_MOCK_REASON,
)
# Import Salt Libs
import salt.states.git as git_state # Don't potentially shadow GitPython
log = logging.getLogger(__name__)
@skipIf(NO_MOCK, NO_MOCK_REASON)
class GitTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.states.git
'''
def setup_loader_modules(self):
return {
git_state: {
'__env__': 'base',
'__opts__': {'test': False},
'__salt__': {},
}
}
@with_tempdir()
def test_latest_no_diff_for_bare_repo(self, target):
'''
This test ensures that we don't attempt to diff when cloning a repo
using either bare=True or mirror=True.
'''
name = 'https://foo.com/bar/baz.git'
gitdir = os.path.join(target, 'refs')
isdir_mock = MagicMock(
side_effect=lambda path: DEFAULT if path != gitdir else True)
branches = ['foo', 'bar', 'baz']
tags = ['v1.1.0', 'v.1.1.1', 'v1.2.0']
local_head = 'b9ef06ab6b7524eb7c27d740dbbd5109c6d75ee4'
remote_head = 'eef672c1ec9b8e613905dbcd22a4612e31162807'
git_diff = Mock()
dunder_salt = {
'git.current_branch': MagicMock(return_value=branches[0]),
'git.diff': git_diff,
'git.fetch': MagicMock(return_value={}),
'git.is_worktree': MagicMock(return_value=False),
'git.list_branches': MagicMock(return_value=branches),
'git.list_tags': MagicMock(return_value=tags),
'git.remote_refs': MagicMock(return_value={'HEAD': remote_head}),
'git.remotes': MagicMock(return_value={
'origin': {'fetch': name, 'push': name},
}),
'git.rev_parse': MagicMock(side_effect=git_state.CommandExecutionError()),
'git.revision': MagicMock(return_value=local_head),
'git.version': MagicMock(return_value='1.8.3.1'),
}
with patch('os.path.isdir', isdir_mock), \
patch.dict(git_state.__salt__, dunder_salt):
result = git_state.latest(
name=name,
target=target,
mirror=True, # mirror=True implies bare=True
)
assert result['result'] is True, result
git_diff.assert_not_called()
| Python | 0 | |
469eedab89d22a1051e9d3f6f7f6c94ba946fb37 | Add server tests for JOIN. | irctest/server_tests/test_channel_operations.py | irctest/server_tests/test_channel_operations.py | """
Section 3.2 of RFC 2812
<https://tools.ietf.org/html/rfc2812#section-3.2>
"""
from irctest import cases
from irctest.irc_utils.message_parser import Message
class JoinTestCase(cases.BaseServerTestCase):
def testJoin(self):
"""“If a JOIN is successful, the user receives a JOIN message as
confirmation and is then sent the channel's topic (using RPL_TOPIC) and
the list of users who are on the channel (using RPL_NAMREPLY), which
MUST include the user joining.”
-- <https://tools.ietf.org/html/rfc2812#section-3.2.1>
“If a JOIN is successful, the user is then sent the channel's topic
(using RPL_TOPIC) and the list of users who are on the channel (using
RPL_NAMREPLY), which must include the user joining.”
-- <https://tools.ietf.org/html/rfc1459#section-4.2.1>
"""
self.connectClient('foo')
self.sendLine(1, 'JOIN #chan')
m = self.getMessage(1)
self.assertMessageEqual(m, command='JOIN', params=['#chan'])
m = self.getMessage(1)
got_topic = False
if m.command in ('331', '332'): # RPL_NOTOPIC, RPL_TOPIC
got_topic = True
m = self.getMessage(1)
m = self.assertMessageEqual(m, command='353') # RPL_NAMREPLY
m = self.getMessage(1)
m = self.assertMessageEqual(m, command='366') # RPL_ENDOFNAMES
else:
m = self.assertMessageEqual(m, command='353') # RPL_NAMREPLY
m = self.getMessage(1)
m = self.assertMessageEqual(m, command='366') # RPL_ENDOFNAMES
m = self.getMessage(1)
self.assertIn(m.command, ('331', '332'), m) # RPL_NOTOPIC, RPL_TOPIC
def testJoinTwice(self):
self.connectClient('foo')
self.sendLine(1, 'JOIN #chan')
m = self.getMessage(1)
self.assertMessageEqual(m, command='JOIN', params=['#chan'])
self.sendLine(1, 'JOIN #chan')
# What should we do now?
| Python | 0 | |
ede8282eed8c198fc728985515e886e5a67ba3e0 | To create appropriate Dir structure | MROCPdjangoForm/ocpipeline/createDirStruct.py | MROCPdjangoForm/ocpipeline/createDirStruct.py | import os
import argparse
from shutil import move, rmtree # For moving files
'''
Module creates a directory structure as defined by a string userDefProjectDir & moves files in
tuple args to the userDefProjectDir
'''
def createDirStruct(userDefProjectDir, uploadDirPath, endingDir, tempDirPath, moveFileNames):
'''
userDefProjectDir - the user defined project directory structure
uploadDirPath - the location of the files to be placed in userDefProjectDir
moveFileNames - tuple of file names in temporary location uploadDirPath
tempDirPath - temp directory holding files we are concerned with
projectName - the temp project name different from user def project name
endingDir - is the directory where the files in the temp location should be moved to
'''
dataProds = ['derivatives/', 'rawdata/', 'graphs/', 'graphInvariants/']
for folder in dataProds:
if not os.path.exists(userDefProjectDir + folder):
os.makedirs(userDefProjectDir + folder)
else:
print "\n Folder does exist!"
''' Move files to appropriate location '''
uploadedFiles = [ os.path.join(uploadDirPath, moveFileNames[0]), os.path.join(uploadDirPath, moveFileNames[1])
,os.path.join(uploadDirPath, moveFileNames[2]) ]
i = 0
for thefile in uploadedFiles:
if not os.path.exists(os.path.join(endingDir,moveFileNames[i])): # If its already there... leave it alone & use the old one
move(thefile, endingDir) # Where to save derivatives
else:
print '\n File does exist!'
i += 1
''' Delete project in temp folder'''
rmtree(uploadDirPath)
def main():
parser = argparse.ArgumentParser(description='Create appropriate dir structure for project & move files that are in temp folder')
parser.add_argument('userDefProjectDir', action="store")
parser.add_argument('uploadDirPath', action="store")
parser.add_argument('endingDir', action="store")
parser.add_argument('tempDirPath', action="store")
parser.add_argument('moveFileNames', action="store")
result = parser.parse_args()
createDirStruct(result.dirName, result.zipOutName)
if __name__ == '__main__':
main() | Python | 0.998603 | |
21742da132aeb9b834b128f7a7d01b7a2173137a | Add a tcp_server which simulates graphite-relay | tcp_server.py | tcp_server.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
"""
A very simple TCP server for simulating a graphite relay, copied-paste from
Python documentation. Few things were adjusted to make pylint happy and print
incoming data.
"""
import asyncio
class EchoServerClientProtocol(asyncio.Protocol):
"""
A TCP server
"""
def __init__(self):
self.peername = None
self.transport = None
def connection_made(self, transport):
self.peername = transport.get_extra_info('peername')
print('Connection from {}'.format(self.peername))
self.transport = transport
def data_received(self, data):
message = data.decode()
print(message)
def connection_lost(self, exc):
print('client {} closed connection {}'.format(self.peername, exc))
def main():
"""
main code
"""
loop = asyncio.get_event_loop()
# Each client connection will create a new protocol instance
coro = loop.create_server(EchoServerClientProtocol, '127.0.0.1', 39991)
server = loop.run_until_complete(coro)
# Serve requests until Ctrl+C is pressed
print('Serving on {}'.format(server.sockets[0].getsockname()))
try:
loop.run_forever()
except KeyboardInterrupt:
pass
# Close the server
server.close()
loop.run_until_complete(server.wait_closed())
loop.close()
# This is the standard boilerplate that calls the main() function.
if __name__ == '__main__':
main()
| Python | 0.000005 | |
ef52b314eb5e15c34d8b034d7e6f7bdd727b6586 | Add sp500_extractor_v1 version that does not use BeautifulSoup. | Code/sp500_extractor_v1_no_bs.py | Code/sp500_extractor_v1_no_bs.py | import csv
from lxml import html
import time
import requests
"""
Make it work, make it right, make it fast
Extract the tickers from the S&P 500 table on Wikipedia, process them into
a list and save them into a CSV file.
# Retrieve HTML from URL with requests
http://docs.python-requests.org/en/master/user/quickstart/
# HTML table structure
http://www.w3schools.com/html/html_tables.asp
# Python HTML scraping
http://docs.python-guide.org/en/latest/scenarios/scrape/
# HTML table parsing with xpath
http://www.w3schools.com/xml/xpath_syntax.asp
# Save to CSV
http://gis.stackexchange.com/a/72476
"""
url = 'https://en.wikipedia.org/wiki/List_of_S%26P_500_companies'
csv_output = 'sp500_tickers.csv'
start_time = time.time()
# Download the S&P 500 table from Wikipedia, creating a string of the raw HTML
raw_html = requests.get(url).content
html_string = html.fromstring(raw_html)
ticker_list = []
# Pull first HTML table out of the HTML string, then loop through each HTML row
for html_row in html_string.xpath('//table[1]'):
# Pull each HTML row's code that starts with a <tr> flag
for col in html_row.xpath('.//tr'):
# Create a list of text values from each column in this HTML row
table_row_list = [item.text_content() for item in col.xpath('.//td')]
# Only process table row lists that have values
if table_row_list:
# Tickers are in the first column in the row (first list element)
ticker = table_row_list[0].strip()
# Append each row's ticker to the ticker list
ticker_list.append(ticker)
# Alphabetize ticker list
ticker_list.sort()
print(ticker_list)
# Save the ticker list to a csv file
with open(csv_output, 'w', newline='') as file:
writer = csv.writer(file)
for ticker in ticker_list:
writer.writerow([ticker])
end_time = time.time()
run_time = round(end_time - start_time, 2)
print('Finished extracting the S&P 500 ticker list in %s seconds' % run_time)
| Python | 0 | |
d98eebda6b3b0e42ac7ca34c6a1dd6cc8b05d342 | add functions and refactor fibonacci | quickTour/function.py | quickTour/function.py | def fibonacci(n):
a,b = 0,1
if(n==a):
return a
if(n==b):
return b
return fibonacci(n-1)+fibonacci(n-2)
for n in range(0,10):
print(fibonacci(n))
| Python | 0.00001 | |
fefb13108a151c5cbfe8c6acd5b94a480dac98ec | Add test for NPairLossScheme | tests/test_datasets.py | tests/test_datasets.py | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 21 20:30:26 2017
@author: sakurai
"""
import unittest
import numpy as np
from ..datasets.data_provider import NPairLossScheme
class TestNPairLossScheme(unittest.TestCase):
def test_pairs_of_indexes(self):
batch_size = 20
labels = sum([[i]*10 for i in range(10)], [])
scheme = NPairLossScheme(labels, batch_size)
it = scheme.get_request_iterator()
for i in range(5):
indexes = next(it)
a_indexes = indexes[:batch_size / 2]
p_indexes = indexes[batch_size / 2:]
a_labels = np.array(labels)[a_indexes]
p_labels = np.array(labels)[p_indexes]
np.testing.assert_array_equal(a_labels, p_labels)
np.testing.assert_equal(len(a_labels), len(np.unique(a_labels)))
if __name__ == '__main__':
unittest.main()
| Python | 0 | |
33375a9333852eafa1bf262fb30f5d827c4534f7 | Create networkx.py | networkx.py | networkx.py | import networkx
| Python | 0.000007 | |
1c2330d9e45b9e87ed70848fd0ce192b0d06c904 | Update build_status.py | infra/auto-setup/build_status.py | infra/auto-setup/build_status.py | #!/usr/bin/env python
import codecs
import datetime
import os
import subprocess
import sys
import traceback
import jenkins
import jinja2
from jinja2 import Environment, FileSystemLoader
JENKINS_SERVER = ('localhost', 8080)
LOGS_BUCKET = 'oss-fuzz-build-logs'
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
class Result(object):
"""Result."""
def __init__(self, name, output):
self.name = name
self.output = output
def get_build_results(server):
"""Return successes, failures."""
successes = []
failures = []
for job in server.get_jobs(1):
try:
name = job['fullname']
if not name.startswith('projects/'):
continue
print name
project = name[len('projects/'):]
info = server.get_job_info(name)
last_build_number = info['lastCompletedBuild']['number']
last_failed_builder_number = info['lastFailedBuild']['number']
if last_build_number == last_failed_builder_number:
failures.append(Result(
project,
server.get_build_console_output(name, last_build_number)))
else:
successes.append(Result(
project,
server.get_build_console_output(name, last_build_number)))
except Exception as e:
traceback.print_exc()
return successes, failures
def upload_status(successes, failures):
"""Upload main status page."""
env = Environment(loader=FileSystemLoader(os.path.join(SCRIPT_DIR,
'templates')))
with open('status.html', 'w') as f:
f.write(
env.get_template('status_template.html').render(
failures=failures, successes=successes,
last_updated=datetime.datetime.utcnow().ctime()))
subprocess.check_output(['gsutil', 'cp', 'status.html', 'gs://' +
LOGS_BUCKET], stderr=subprocess.STDOUT)
def upload_build_logs(successes, failures):
"""Upload individual build logs."""
for result in failures + successes:
with codecs.open('latest.txt', 'w', encoding='utf-8') as f:
f.write(result.output)
subprocess.check_output(['gsutil', 'cp', 'latest.txt',
'gs://%s/build_logs/%s/' %
(LOGS_BUCKET, result.name)],
stderr=subprocess.STDOUT)
def main():
jenkins_login = get_jenkins_login()
server = jenkins.Jenkins('http://%s:%d' % JENKINS_SERVER,
username=jenkins_login[0], password=jenkins_login[1])
successes, failures = get_build_results(server)
upload_status(successes, failures)
upload_build_logs(successes, failures)
def get_jenkins_login():
"""Returns (username, password) for jenkins."""
username = os.getenv('JENKINS_USER')
password = os.getenv('JENKINS_PASS')
return username, password
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import codecs
import datetime
import os
import subprocess
import sys
import jenkins
import jinja2
from jinja2 import Environment, FileSystemLoader
JENKINS_SERVER = ('localhost', 8080)
LOGS_BUCKET = 'oss-fuzz-build-logs'
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
class Result(object):
"""Result."""
def __init__(self, name, output):
self.name = name
self.output = output
def get_build_results(server):
"""Return successes, failures."""
successes = []
failures = []
for job in server.get_jobs(1):
try:
name = job['fullname']
if not name.startswith('projects/'):
continue
print name
project = name[len('projects/'):]
info = server.get_job_info(name)
last_build_number = info['lastCompletedBuild']['number']
last_failed_builder_number = info['lastFailedBuild']['number']
if last_build_number == last_failed_builder_number:
failures.append(Result(
project,
server.get_build_console_output(name, last_build_number)))
else:
successes.append(Result(
project,
server.get_build_console_output(name, last_build_number)))
except Exception as e:
print >>sys.stderr, e
return successes, failures
def upload_status(successes, failures):
"""Upload main status page."""
env = Environment(loader=FileSystemLoader(os.path.join(SCRIPT_DIR,
'templates')))
with open('status.html', 'w') as f:
f.write(
env.get_template('status_template.html').render(
failures=failures, successes=successes,
last_updated=datetime.datetime.utcnow().ctime()))
subprocess.check_output(['gsutil', 'cp', 'status.html', 'gs://' +
LOGS_BUCKET], stderr=subprocess.STDOUT)
def upload_build_logs(successes, failures):
"""Upload individual build logs."""
for result in failures + successes:
with codecs.open('latest.txt', 'w', encoding='utf-8') as f:
f.write(result.output)
subprocess.check_output(['gsutil', 'cp', 'latest.txt',
'gs://%s/build_logs/%s/' %
(LOGS_BUCKET, result.name)],
stderr=subprocess.STDOUT)
def main():
jenkins_login = get_jenkins_login()
server = jenkins.Jenkins('http://%s:%d' % JENKINS_SERVER,
username=jenkins_login[0], password=jenkins_login[1])
successes, failures = get_build_results(server)
upload_status(successes, failures)
upload_build_logs(successes, failures)
def get_jenkins_login():
"""Returns (username, password) for jenkins."""
username = os.getenv('JENKINS_USER')
password = os.getenv('JENKINS_PASS')
return username, password
if __name__ == '__main__':
main()
| Python | 0.000002 |
98a053f945e2c7cc01d8fbdec374ab90305bc11f | Create new files. | ospathex.py | ospathex.py | #!/usr/bin/python
#_*_coding:utf-8_*_
import os
for tmpdir in ('temp', r'c:\windows\temp'):
if os.path.isdir(tmpdir):
break
else:
print 'no temp directory available'
tmpdir = ''
if tmpdir:
os.chdir(tmpdir)
cwd = os.getcwd()
print '*** current temporary directory'
print cwd
print '*** creating temporary directory...'
os.mkdir('example')
os.chdir('example')
cwd = os.getcwd()
print '*** new working directory: '
print cwd
print '*** original directory listing: '
print os.listdir(cwd)
print '*** creating test file...'
fobj = open('test', 'w')
fobj.write('foo\n')
fobj.write('bar\n')
fobj.close()
print '*** updated directory listing: '
print os.listdir(cwd)
print "*** renameing 'test' to 'filetest.txt'"
os.rename('test', 'filename.txt')
print '*** updated directory listing: '
print os.listdir(cwd)
path = os.path.join(cwd, os.listdir(cwd)[0])
print '*** full file pathname'
print path
print '*** (pathname, basename)== '
print os.path.split(path)
print '*** (filename, extension) == '
print os.path.splitext(os.path.basename(path))
print '*** displaying file contents: '
fobj = open(path)
for eachLine in fobj:
print eachLine
fobj.close()
print '*** deleting test file'
os.remove(path)
print '*** updated directory listing: '
print os.listdir(cwd)
os.chdir(os.pardir)
print '*** deleting test directory'
os.rmdir('example')
print '*** DONE!' | Python | 0 | |
65717865460c725e033d3aa81695c64a92e31a5b | Add test scripts. | test/sniff.py | test/sniff.py | # Test transmitting data after exiting sniff mode
import time
import bluetool
from bluetool.core import HCICoordinator, HCIWorker, HCIWorkerProxy, HCITask, BREDRHelper
from bluetool.bluez import ba2str
from bluetool.error import HCICommandError, TestError, HCITimeoutError
import bluetool.bluez as bluez
import bluetool.command as btcmd
import bluetool.event as btevt
from bluetool.data import HCIACLData
CONN_TIMEOUT_MS = 10000
HCI_ACL_MAX_SIZE = 27
NUM_ACL_DATA = 1600
class LegacyMaster(HCIWorker):
def __init__(self, hci_sock, coord, pipe, peer_addr=None):
super(LegacyMaster, self).__init__(hci_sock, coord, pipe)
self.peer_addr = peer_addr
def create_test_acl_data(self, num_acl_data=NUM_ACL_DATA):
data = [None]*num_acl_data
data_i = 0
for i in xrange(0, num_acl_data):
data[i] = HCIACLData(self.conn_handle, 0x1, 0x0,
''.join(chr(c & 0xff) for c in xrange(data_i, data_i + HCI_ACL_MAX_SIZE)))
data_i = (data_i + 1) % 256
return data
def main(self):
helper = BREDRHelper(self.sock)
try:
helper.reset()
except HCICommandError as err:
self.log.warning('cannot reset', exc_info=True)
return
helper.create_connection_by_peer_addr(self.peer_addr)
evt = helper.wait_connection_complete()
if evt.status != 0:
raise TestError('connection fail: status: 0x{:02x}'.format(evt.status))
self.conn_handle = evt.conn_handle
self.log.info('connect to %s', ba2str(evt.bd_addr))
self.wait() # Wait slave to connect
helper.write_link_policy(self.conn_handle, 0x5)
self.send(True)
self.wait()
helper.wait_hci_evt(lambda evt: evt.code == bluez.EVT_MODE_CHANGE) # Wait to enter LPS (sniff mode)
helper.wait_hci_evt(lambda evt: evt.code == bluez.EVT_MODE_CHANGE) # Wait to exit LPS (active mode)
self.send(True) # tell exiting sniff mode
while True:
num_acl_data = self.recv()
if num_acl_data <= 0:
break
try:
i = 0
while i < num_acl_data:
pkt_type, pkt = self.recv_hci_pkt()
if pkt_type == bluez.HCI_ACLDATA_PKT:
print i, pkt
i = i + 1
self.send(True) # tell that ACL data are successfully received
else:
self.log.info('ptype: {}, {}'.format(pkt_type, pkt))
except (HCICommandError, HCITimeoutError):
self.log.warning('fail to connect to initiator', exc_info=True)
helper.disconnect(self.conn_handle, 0x13)
helper.wait_disconnection_complete(self.conn_handle)
class LegacySlave(HCIWorker):
def __init__(self, hci_sock, coord, pipe, peer_addr=None):
super(LegacySlave, self).__init__(hci_sock, coord, pipe)
self.peer_addr = peer_addr
self.num_acl_tx_not_acked = 0
def create_test_acl_data(self, num_acl_data=NUM_ACL_DATA):
data = [None]*num_acl_data
data_i = 0
for i in xrange(0, num_acl_data):
data[i] = HCIACLData(self.conn_handle, 0x1, 0x0,
''.join(chr(c & 0xff) for c in xrange(data_i, data_i + HCI_ACL_MAX_SIZE)))
data_i = (data_i + 1) % 256
return data
def main(self):
helper = BREDRHelper(self.sock)
try:
helper.reset()
#self.setup_h2c_flow_control()
except HCICommandError:
self.log.warning('cannot reset', exc_info=True)
return
helper.accept_connection()
evt = helper.wait_connection_complete()
if evt.status != 0:
raise TestError('connection fail: status: 0x{:02x}'.format(evt.status))
self.conn_handle = evt.conn_handle
self.log.info('connect to %s', ba2str(evt.bd_addr))
self.send(True) # trigger master to send data
helper.write_link_policy(self.conn_handle, 0x5)
self.send(True)
self.wait()
helper.sniff_mode(self.conn_handle, 996, 996, 10, 10)
helper.wait_hci_evt(lambda evt: evt.code == bluez.EVT_MODE_CHANGE)
time.sleep(20) # Wait to enter LPS
helper.exit_sniff_mode(self.conn_handle)
helper.wait_hci_evt(lambda evt: evt.code == bluez.EVT_MODE_CHANGE)
self.wait() # Wait for remote device to exit sniff mode
#time.sleep(1)
self.send(True) # Signal to send data
while True:
num_acl_data = self.recv()
if num_acl_data <= 0:
break
try:
data = self.create_test_acl_data(num_acl_data)
for d in data:
self.send_acl_data(d)
self.wait() # Wait for remote device to receive data
except (HCICommandError, TestError):
self.log.warning('fail to create connection by white list', exc_info=True)
helper.wait_disconnection_complete(self.conn_handle, CONN_TIMEOUT_MS)
class LegacyTester(HCICoordinator):
def __init__(self):
super(LegacyTester, self).__init__()
self.worker.append(HCIWorkerProxy(0, self, LegacyMaster))
self.worker.append(HCIWorkerProxy(1, self, LegacySlave))
self.worker[0].worker.peer_addr = self.worker[1].bd_addr
self.worker[1].worker.peer_addr = self.worker[0].bd_addr
def main(self):
print 'master[{}], slave[{}]'.format(ba2str(self.worker[0].bd_addr), ba2str(self.worker[1].bd_addr))
# Wait connection establishment
self.worker[1].recv()
self.worker[0].signal()
# Wait write link policy working
self.worker[0].recv()
self.worker[1].recv()
self.worker[0].signal()
self.worker[1].signal()
# Wait for remote device to exit sniff mode
self.worker[0].recv()
self.worker[1].signal()
# Wait to send data
self.worker[1].recv()
num_acl_data = 100
self.worker[0].send(num_acl_data)
self.worker[1].send(num_acl_data)
for i in xrange(0, num_acl_data):
self.worker[0].recv()
self.worker[1].signal()
self.worker[0].send(0)
self.worker[1].send(0)
if __name__ == "__main__":
bluetool.log_to_stream()
tester = LegacyTester()
tester.run()
| Python | 0 | |
81ade3168faa68ef43456cc35a122b9ef493a23e | Add script to plot MS flag rate and acq fail rate | plot_ms_flag_acq_fails.py | plot_ms_flag_acq_fails.py | from __future__ import division
import matplotlib.pyplot as plt
from astropy.table import Table
import numpy as np
from Ska.DBI import DBI
from chandra_aca import star_probs
db = DBI(dbi='sybase', server='sybase', user='aca_read')
stats = db.fetchall('SELECT * from trak_stats_data '
'WHERE kalman_datestart > "2014:180" '
'AND aoacmag_median is not NULL')
stats = Table(stats)
mags = stats['aoacmag_median']
ok = (mags > 9) & (mags < 11)
stats = stats[ok]
mags = mags[ok]
stats['frac_ms'] = stats['mult_star_samples'] / stats['n_samples']
stats['mag_bin'] = np.round(mags / 0.2) * 0.2
sg = stats.group_by('mag_bin')
sgm = sg.groups.aggregate(np.mean)
plt.figure(1, figsize=(6, 4))
plt.clf()
randx = np.random.uniform(-0.05, 0.05, size=len(stats))
plt.plot(mags + randx, stats['frac_ms'], '.', alpha=0.5,
label='MS flag rate per obsid')
plt.plot(sgm['mag_bin'], sgm['frac_ms'], 'r', linewidth=5, alpha=0.7,
label='MS flag rate (0.2 mag bins)')
p_acqs = star_probs.acq_success_prob('2016:001', t_ccd=-15.0, mag=sgm['mag_bin'])
plt.plot(sgm['mag_bin'], 1 - p_acqs, 'g', linewidth=5,
label='Acq fail rate (model 2016:001, T=-15C)')
plt.legend(loc='upper left', fontsize='small')
plt.xlabel('Magnitude')
plt.title('Acq fail rate compared to MS flag rate')
plt.grid()
plt.tight_layout()
plt.savefig('ms_flag_acq_fails.png')
| Python | 0 | |
f2413f05bc64818297541112f42e2a8d5ae72cbe | Create test_setup.py | test_setup.py | test_setup.py | import wget
import os
test_files_path = os.getcwd() + '/image-analysis/test/test_data/'
# test files will be here whether is data, images, videos ect.
test_files = ["https://s3.amazonaws.com/testcodas/test_video.mp4"]
for file_path in test_files:
wget.download(file_path, test_files_path)
| Python | 0 | |
c54c948531cd73b0c0dd78b6bc8a1c5245886c97 | add visualise.py | visualize.py | visualize.py | #!/usr/bin/env python
import json
import math
import numpy
import os
import re
import sys
if __name__ == '__main__':
if len(sys.argv) < 3:
print('usage: %s [result dir] [output html]' % sys.argv[0])
sys.exit()
result = [[], [], [], []]
for filename in os.listdir(sys.argv[1]):
match = re.match('([0-9]+)_([0-9]+).result', filename)
if not match:
continue
average, size = map(int, match.groups())
name = 'Average: %d, Size: %d' % (average, size)
matrix = numpy.loadtxt(os.path.join(sys.argv[1], filename), dtype = str)
data = matrix[1:,1:].astype(int)
result[0].append([numpy.mean(data[:,3]), numpy.mean(data[:,4]), len(data), name])
result[1].append([numpy.median(data[:,3]), numpy.median(data[:,4]), len(data), name])
result[2].append([numpy.amin(data[:,3]), numpy.amin(data[:,4]), len(data), name])
result[3].append([numpy.amax(data[:,3]), numpy.amax(data[:,4]), len(data), name])
path = os.path.join(os.path.dirname(__file__), 'html')
with open(os.path.join(path, 'template.html')) as input:
with open(sys.argv[2], 'w') as output:
relpath = os.path.relpath(path, os.path.dirname(sys.argv[2]))
html = input.read()
format = [relpath] * 5 + map(json.dumps, result)
output.write(html % tuple(format))
| Python | 0.000024 | |
6c61c2d367e698861657d4cfc9bba0ba3789f197 | add naive bayes | nb.py | nb.py | import numpy as np
class NaiveBayes:
def __init__(self):
self._prior = None
self._mat = None
def train(self, X, y):
y = np.matrix(y)
p1 = y*X
p2 = (1-y)*X
p = np.vstack([
np.log(p1+1) - np.log(p1.sum() + p1.shape[1]),
np.log(p2+1) - np.log(p2.sum() + p2.shape[1])])
pri = np.matrix([[float(y.sum())/y.shape[1]], [1 - float(y.sum())/y.shape[1] ]])
self._prior = np.log(pri)
self._mat = p
return p, pri
def predict_many(self, mat):
logp = self._mat*mat.T + self._prior
ans = (np.sign(logp[0] - logp[1]) + 1)/2
return ans.A1
def validate(self, mat, real_y):
predict_y = self.predict_many(mat)
return (predict_y == real_y).sum()
if __name__ == '__main__':
import loader
from sklearn.feature_extraction.text import HashingVectorizer
d = loader.DataLoader()
g = d.alldata()
def iter_data(n, y, cat):
c = 0
for business in g:
if c % 1000 == 0:
print c, '/', n
if c<n:
if cat.decode('utf-8') in business.categories:
y[c] = 1
else:
y[c] = 0
yield "".join(business.reviews)
else:
return
c += 1
# f = open('data/yelp.csv')
# def iter_data(n, y, cat):
# c = 0
# for line in f:
# if c % 1000 == 0:
# print c, '/', n
# if c < n:
# b_id, categories, review = line.split('\t')
# categories = categories.split(',')
# if cat in categories:
# y[c] = 1
# else:
# y[c] = 0
# yield review
# else:
# return
# c += 1
n = 4000
y = np.zeros(n)
v = HashingVectorizer(stop_words='english', non_negative=True, norm=None)
mat = v.transform(iter_data(n, y, 'Restaurants'))
print 'data readed', mat.shape, y.shape
nt = 1000
yt = np.zeros(nt)
mt = v.transform(iter_data(nt, yt, 'Restaurants'))
#print yt
print 'our code',
mm = NaiveBayes()
mm.train(mat, y)
print float(mm.validate(mt, yt))/nt
from sklearn.naive_bayes import MultinomialNB
model = MultinomialNB()
clf = model.fit(mat, y)
print 'model trained'
s = model.score(mt, yt)
print s
| Python | 0.999993 | |
9a33761f33c4f49a27d72944c231cb447353d81e | Add problem 10 | 010.py | 010.py | #!/usr/bin/env python3
# Author: Severin Kaderli <severin.kaderli@gmail.com>
#
# Project Euler - Problem 10:
# Find the sum of all the primes below two million.
def get_prime_numbers(n):
"""Gets all prime numbers below n."""
primes, sieve = [], [True] * n
for i in range(2, n):
if sieve[i]:
primes.append(i)
for j in range(i*i, n, i):
sieve[j] = False
return primes
def get_prime_sum(n = 2000000):
"""Calculate the sum of all prime numbers below n."""
return sum(get_prime_numbers(n))
if __name__ == "__main__":
print(get_prime_sum(2000000))
| Python | 0 | |
d075d188d541090ad8d3a5c4cf583ba10063aa88 | Move timing to right location for staging. | project/project/timing.py | project/project/timing.py | import time
from django.utils.deprecation import MiddlewareMixin
class TimingMiddleware(object):
"""Times a request and adds timing information to the content.
Adds an attribute, `_timing`, onto the request, and uses this at the end
of the rendering chain to find the time difference. It replaces a token in
the HTML, "<!-- RENDER_TIME -->", with the rendered time.
"""
# Keep these out here so they can be modified in Django settings.
REQUEST_ANNOTATION_KEY = "_timing"
REPLACE = b"<!-- RENDER_TIME -->"
REPLACE_TEMPLATE = b"<span>Handsomely rendered in %ims.</span>"
def __init__(self, get_response):
self.get_response = get_response
# One-time configuration and initialization.
def __call__(self, request):
setattr(request, self.REQUEST_ANNOTATION_KEY, time.time())
response = self.get_response(request)
then = getattr(request, self.REQUEST_ANNOTATION_KEY, None)
if then and hasattr(response, 'content'):
now = time.time()
msg = self.REPLACE_TEMPLATE % (int((now - then) * 1000))
response.content = response.content.replace(self.REPLACE, msg)
return response | Python | 0 | |
2e503a58a1f9893d25cf2dbb2c885bc9834faebf | Create urls.py | tests/urls.py | tests/urls.py | from django.conf.urls import url, include
from webhook.base import WebhookBase
class WebhookView(WebhookBase):
def process_webhook(self, data, meta):
pass
urlpatterns = [
url(r'^webhook-receiver', WebhookView.as_view(), name='web_hook'),
]
| Python | 0.000017 | |
0b3bfeb06a4594a2c188e623835c3a54262cca5d | Write initial Bible book HTML parser | utilities/book_parser.py | utilities/book_parser.py | # utilities.book_parser
# coding=utf-8
from __future__ import unicode_literals
import yvs.shared as shared
from HTMLParser import HTMLParser
class BookParser(HTMLParser):
# Resets parser variables (implicitly called on instantiation)
def reset(self):
HTMLParser.reset(self)
self.depth = 0
self.in_book = False
self.book_depth = 0
self.books = []
self.book_name_parts = []
# Detects the start of a book link
def handle_starttag(self, tag, attrs):
attr_dict = dict(attrs)
self.depth += 1
if 'data-book' in attr_dict:
self.in_book = True
self.book_depth = self.depth
self.books.append({
'id': attr_dict['data-book']
})
# Detects the end of a book link
def handle_endtag(self, tag):
if self.in_book and self.depth == self.book_depth:
self.in_book = False
self.books[-1]['name'] = ''.join(self.book_name_parts).strip()
# Empty the list containing the book name parts
del self.book_name_parts[:]
self.depth -= 1
# Handles the book name contained within the current book link
def handle_data(self, content):
if self.in_book:
self.book_name_parts.append(content)
# Handles all HTML entities within the book name
def handle_charref(self, name):
if self.in_book:
char = shared.eval_html_charref(name)
self.book_name_parts.append(char)
| Python | 0 | |
7b2d3aedbc2f78119974c9e724b37b2b336297d1 | Add device_hive_api.py | devicehive/device_hive_api.py | devicehive/device_hive_api.py | from devicehive.handler import Handler
from devicehive.device_hive import DeviceHive
class ApiCallHandler(Handler):
"""Api call handler class."""
def __init__(self, api, call, *call_args, **call_kwargs):
super(ApiCallHandler, self).__init__(api)
self._call = call
self._call_args = call_args
self._call_kwargs = call_kwargs
self._call_result = None
@property
def call_result(self):
return self._call_result
def handle_connect(self):
self._call_result = getattr(self.api, self._call)(*self._call_args,
**self._call_kwargs)
self.api.disconnect()
class DeviceHiveApi(object):
"""Device hive api class."""
def __init__(self, transport_url, **options):
self._transport_url = transport_url
self._options = options
def _call(self, call, *call_args, **call_kwargs):
device_hive = DeviceHive(ApiCallHandler, call, *call_args,
**call_kwargs)
device_hive.connect(self._transport_url, **self._options)
return device_hive.transport.handler.handler.call_result
def get_info(self):
return self._call('get_info')
def get_cluster_info(self):
return self._call('get_cluster_info')
| Python | 0.000029 | |
1d4e462188e95b1270d45f95112c2458cbeb7b2f | Add definitions.py | definitions.py | definitions.py |
def API_launch():
global app_config
global tweepy
# Twitter API configuration
consumer_key = app_config.twitter["consumer_key"]
consumer_secret = app_config.twitter["consumer_secret"]
access_token = app_config.twitter["access_token"]
access_token_secret = app_config.twitter["access_token_secret"]
# Start
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
return api
def followers_list(number_followers=200):
global api
followers = api.followers(count=number_followers)
followers_name = []
for follower in followers:
followers_name.append(str(follower.screen_name))
return followers_name
def create_db(database_name='bot_detection.db'):
global sqlite3
conn = sqlite3.connect(database_name)
def create_table(database_name='bot_detection.db'):
global sqlite3
conn = sqlite3.connect(database_name)
conn.execute('''CREATE TABLE TWEETS
(ID INT PRIMARY KEY NOT NULL,
NAME TEXT NOT NULL,
DATE TEXT NOT NULL,
TEXT TEXT NOT NULL,
MENTIONS TEXT NOT NULL);''')
conn.close()
def feed_table(ID ,NAME,DATE ,TEXT,MENTIONS,database_name='bot_detection.db'):
global sqlite3
conn = sqlite3.connect(database_name)
conn.execute("INSERT INTO TWEETS (ID,NAME,DATE,TEXT,MENTIONS) VALUES (?,?,?,?,?)"
,(ID, NAME ,DATE,TEXT, MENTIONS))
conn.commit()
conn.close()
def tweet_info(follower,tweets_number=100):
global api
global json
global unicodedata
user_info = api.user_timeline(screen_name = follower,count = tweets_number)
tweet = {}
name_mentions = []
for i,status in enumerate(user_info):
tweet = status._json
text = tweet['text']
date = tweet['created_at']
entities = tweet['entities']
user_mentions = entities['user_mentions']
for mention in user_mentions:
dict_mentions = mention
name_mentions = dict_mentions['screen_name']
ID_string = i
name_string = follower
text_string = unicodedata.normalize('NFKD', text).encode('ascii','ignore')
date_string = unicodedata.normalize('NFKD', date).encode('ascii','ignore')
name_mentions_string = unicodedata.normalize('NFKD', name_mentions).encode('ascii','ignore')
feed_table(ID_string,
name_string,
text_string,
date_string,
name_mentions_string)
| Python | 0 | |
32067112c7e0d681b84975b0e9b2fe974f1440ac | Add IINet module | i3pystatus/iinet.py | i3pystatus/iinet.py | import requests
from i3pystatus import IntervalModule
from i3pystatus.core.color import ColorRangeModule
__author__ = 'facetoe'
class IINet(IntervalModule, ColorRangeModule):
"""
Check IINet Internet usage.
Requires `requests` and `colour`
Formatters:
* `{percentage_used}` — percentage of your quota that is used
* `{percentage_available}` — percentage of your quota that is available
"""
settings = (
"format",
("username", "Username for IINet"),
("password", "Password for IINet"),
("start_color", "Beginning color for color range"),
("end_color", "End color for color range")
)
format = '{percent_used}'
start_color = "#00FF00"
end_color = "#FF0000"
username = None
password = None
keyring_backend = None
def init(self):
self.token = None
self.service_token = None
self.colors = self.get_hex_color_range(self.start_color, self.end_color, 100)
def set_tokens(self):
if not self.token or not self.service_token:
response = requests.get('https://toolbox.iinet.net.au/cgi-bin/api.cgi?'
'_USERNAME=%(username)s&'
'_PASSWORD=%(password)s'
% self.__dict__).json()
if self.valid_response(response):
self.token = response['token']
self.service_token = self.get_service_token(response['response']['service_list'])
else:
raise Exception("Failed to retrieve token for user: %s" % self.username)
def get_service_token(self, service_list):
for service in service_list:
if service['pk_v'] == self.username:
return service['s_token']
raise Exception("Failed to retrieve service token for user: %s" % self.username)
def valid_response(self, response):
return "success" in response and response['success'] == 1
def run(self):
self.set_tokens()
usage = self.get_usage()
allocation = usage['allocation']
used = usage['used']
percent_used = self.percentage(used, allocation)
percent_avaliable = self.percentage(allocation - used, allocation)
color = self.get_gradient(percent_used, self.colors)
usage['percent_used'] = '{0:.2f}%'.format(percent_used)
usage['percent_available'] = '{0:.2f}%'.format(percent_avaliable)
self.output = {
"full_text": self.format.format(**usage),
"color": color
}
def get_usage(self):
response = requests.get('https://toolbox.iinet.net.au/cgi-bin/api.cgi?Usage&'
'_TOKEN=%(token)s&'
'_SERVICE=%(service_token)s' % self.__dict__).json()
if self.valid_response(response):
for traffic_type in response['response']['usage']['traffic_types']:
if traffic_type['name'] == 'anytime':
return traffic_type
else:
raise Exception("Failed to retrieve usage information for: %s" % self.username)
| Python | 0 | |
36d7960e5899b6b85c311fcf0b47f6adb93b702d | put gui in class | config/gui.py | config/gui.py | from IPython import display
from ipywidgets import widgets
import cv2
def video(core):
import cv2
cv2.namedWindow('Video')
core.startContinuousSequenceAcquisition(1)
while True:
img = core.getLastImage()
if core.getRemainingImageCount() > 0:
# img = core.popNextImage()
img = core.getLastImage()
cv2.imshow('Video', img)
cv2.waitkey(0)
else:
print('No frame')
if cv2.waitKey(20) >= 0:
break
core.stopSequenceAcquisition()
cv2.destroyAllWindows()
def stage_control(XY, Z):
# icons are from "font-awesome"
x_minus = widgets.Button(
description='',
disabled=False,
button_style='primary',
icon = 'fa-arrow-left',
width = '50px')
def xminus(b):
XY.r_xy(-xy_slider.value,0)
display.clear_output()
print(XY.where_xy())
x_minus.on_click(xminus)
x_plus = widgets.Button(
description='',
disabled=False,
button_style='primary',
icon = 'fa-arrow-right',
width = '50px')
def xplus(b):
XY.r_xy(xy_slider.value,0)
display.clear_output()
print(XY.where_xy())
x_plus.on_click(xplus)
y_minus = widgets.Button(
description='',
disabled=False,
button_style='primary',
icon='fa-arrow-up',
width = '50px')
def yminus(b):
XY.r_xy(0, -xy_slider.value)
display.clear_output()
print(XY.where_xy())
y_minus.on_click(yminus)
y_plus = widgets.Button(
description='',
disabled=False,
button_style='primary',
icon = 'fa-arrow-down',
width = '50px')
def yplus(b):
XY.r_xy(0, xy_slider.value)
display.clear_output()
print(XY.where_xy())
y_plus.on_click(yplus)
xy_home = widgets.Button(
description='',
disabled=False,
button_style='primary',
icon = 'fa-home',
width = '50px')
def xyhome(b):
Z.home()
XY.move_xy(0,0)
display.clear_output()
print(XY.where_xy())
xy_home.on_click(xyhome)
xy_slider = widgets.FloatSlider(description='[mm]', min=.05, max=10,step=.05, orientation='vertical', height='150px')
def xystep(change):
xy_step = change['new']
xy_slider.observe(xystep, names='value')
xy_cluster = widgets.HBox([ xy_slider, widgets.VBox([ widgets.HBox([x_minus,x_plus,xy_home]), widgets.HBox([y_minus, y_plus]) ]) ])
z_minus = widgets.Button(
description='',
disabled=False,
button_style='primary',
icon = 'fa-arrow-up')
def zminus(b):
Z.move_relative(-z_slider.value)
display.clear_output()
print(Z.where())
z_minus.on_click(zminus)
z_plus = widgets.Button(
description='',
disabled=False,
button_style='primary',
icon = 'fa-arrow-down')
def zplus(b):
Z.move_relative(z_slider.value)
display.clear_output()
print(Z.where())
z_plus.on_click(zplus)
z_home = widgets.Button(
description='',
disabled=False,
button_style='primary',
icon = 'fa-home',
width = '50px')
def zhome(b):
Z.home()
display.clear_output()
print(Z.where())
z_home.on_click(zhome)
z_slider = widgets.FloatSlider(description='[mm]', min=.05, max=10,step=.05, orientation='vertical', height='150px')
def zstep(change):
z_step = change['new']
z_slider.observe(zstep, names='value')
z_cluster = widgets.VBox([ widgets.HBox([ z_slider, widgets.VBox([z_minus, z_plus]), z_home]) ])
x_pos = widgets.Text(
value='0',
placeholder='Type something',
description='X:',
disabled=False,
width='150px')
def xpos(sender):
xcurr,ycurr = XY.where_xy()
XY.move_xy(sender.value,ycurr)
x_pos.on_submit(xpos)
y_pos = widgets.Text(
value='0',
placeholder='Type something',
description='Y:',
disabled=False,
width='150px')
def ypos(sender):
xcurr,ycurr = XY.where_xy()
XY.move_xy(xcurr, sender.value)
y_pos.on_submit(ypos)
z_pos = widgets.Text(
value='0',
placeholder='Type something',
description='Z:',
disabled=False,
width='150px')
def zpos(sender):
Z.move(float(sender.value))
z_pos.on_submit(zpos)
line = widgets.Label(value="$---------------------------------------$",disabled=False)
return widgets.VBox([ widgets.HBox([x_pos, y_pos, z_pos]), line, widgets.HBox([xy_cluster, z_cluster]) ])
| Python | 0 | |
045a10457cd87e37ef5862de55e344db5e9228cf | Add configfile.py | configfile.py | configfile.py | # vim: set et ts=4 sw=4 fdm=marker
"""
MIT License
Copyright (c) 2016 Jesse Hogan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# TODO Write Tests
from entities import *
from accounts import *
import yaml
class configfile(entity):
_instance = None
@classmethod
def getinstance(cls):
if cls._instance == None:
cls._instance = configfile()
return cls._instance
@property
def file(self):
return self._file
@file.setter
def file(self, v):
self._file = v
self.load()
@property
def accounts(self):
return self._accounts
@accounts.setter
def accounts(self, v):
self._accounts = v
def clear(self):
self._accounts = accounts()
def load(self):
self.clear()
with open(self.file, 'r') as stream:
cfg = yaml.load(stream)
for acct in cfg['accounts']:
self.accounts += account.create(acct)
| Python | 0.000003 | |
2275ae52e336bd2e07e32fa3a2559926734c3567 | add pyunit for PUBDEV-1480 | h2o-py/tests/testdir_jira/pyunit_NOPASS_INTERNAL_pubdev_1480_medium.py | h2o-py/tests/testdir_jira/pyunit_NOPASS_INTERNAL_pubdev_1480_medium.py | import sys, os
sys.path.insert(1, "../../")
import h2o, tests
def pubdev_1480():
if not tests.hadoop_namenode_is_accessible(): raise(EnvironmentError, "Not running on H2O internal network. No access to HDFS.")
train = h2o.import_file("hdfs://mr-0xd6/datasets/kaggle/sf.crime.train.gz")
test = h2o.import_file("hdfs://mr-0xd6/datasets/kaggle/sf.crime.test.gz")
model = h2o.gbm(x=train[range(2,9)], y=train[1])
predictions = model.predict(test)
results_dir = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(__file__)),"..","results"))
h2o.download_csv(predictions, os.path.join(results_dir,"predictions.csv"))
if __name__ == "__main__":
tests.run_test(sys.argv, pubdev_1480)
| Python | 0 | |
b16f6ea8a723fa064a78e014ab767be1a797e613 | Create cab.py | cab.py | cab.py | """
Work with *.cab files
"""
from ctypes import pythonapi
from ctypes import cdll
from ctypes import cast
import ctypes as _ctypes
libc = cdll[_ctypes.util.find_library('c')]
libcab = cdll[_ctypes.util.find_library('cabinet')]
PyMem_Malloc = pythonapi.PyMem_Malloc
PyMem_Malloc.restype = _ctypes.c_size_t
PyMem_Malloc.argtypes = [_ctypes.c_size_t]
strncpy = libc.strncpy
strncpy.restype = _ctypes.c_char_p
strncpy.argtypes = [_ctypes.c_char_p, _ctypes.c_char_p, _ctypes.c_size_t]
HOOKFUNC = _ctypes.CFUNCTYPE(_ctypes.c_char_p, _ctypes.c_void_p, _ctypes.c_void_p, _ctypes.c_char_p)
# typedef struct {
# DWORD cbStruct;
# DWORD dwReserved1;
# DWORD dwReserved2;
# DWORD dwFileVersionMS;
# DWORD dwFileVersionLS;
# } CABINETDLLVERSIONINFO, *PCABINETDLLVERSIONINFO;
class CABINETDLLVERSIONINFO(_ctypes.Structure):
_fields_ = [('cbStruct', _ctypes.c_double),
('dwReserved1', _ctypes.c_double),
('dwReserved2', _ctypes.c_double),
('dwFileVersionMS', _ctypes.c_double),
('dwFileVersionLS', _ctypes.c_double)]
libcab.DllGetVersion.restype = CABINETDLLVERSIONINFO
| Python | 0.000006 | |
e6abd6a44de8687d88672ba80388afbe0cdb029a | 习题 1: 第一个程序 | ex1.py | ex1.py | # --coding:utf8--
print("Hello world")
print("Hello Again")
print("I like type this.")
print("This is fun")
print('Yay! Printing')
print("I'd much rather you 'not'.")
print('I "said" do not touch this')
print('妳好,我是Python!')
print('這是我的練習題')
print('下面要加上#的註釋')
# This is pound
# This is hash
# This is mesh
| Python | 0.997795 | |
e17adde73c146ded7ed5a1a347f104a5e7a09f62 | Add bzl macro. | tools/testing/python/py23.bzl | tools/testing/python/py23.bzl | """Macro to generate python 2 and 3 binaries."""
def py23_binary(name, **kwargs):
"""Generates python 2 and 3 binaries. Accepts any py_binary arguments."""
native.py_binary(
name = name + "2",
python_version = "PY2",
**kwargs
)
native.py_binary(
name = name + "3",
python_version = "PY3",
**kwargs
)
| Python | 0.000066 | |
596f432eb7d4b3fa5d1bf5dec33cc882546e8233 | Add a script to convert a GRLevelX colortable file to a dict data structure (and optionally boundaries for norm) for use with Matplotlib. | trunk/metpy/vis/util/gr2_to_mpl_colortable.py | trunk/metpy/vis/util/gr2_to_mpl_colortable.py | #!/usr/bin/env python
# This script is used to convert colortables from GRLevelX to data for a
# matplotlib colormap
import sys
from optparse import OptionParser
#Set up command line options
opt_parser = OptionParser(usage="usage: %prog [options] colortablefile")
opt_parser.add_option("-s", "--scale", action="store_true", dest="scale",
help="Scale size of colortable entries by thresholds in file.")
opts,args = opt_parser.parse_args()
if not args:
print "You must pass the colortable file as the commandline argument."
opt_parser.print_help()
sys.exit(-1)
fname = args[0]
scaleTable = opts.scale
colors = []
thresholds = []
#Initial color should end up not used by MPL
prev = [0., 0., 0.]
for line in open(fname, 'r'):
if line.startswith("Color:"):
# This ignores the word "Color:" and the threshold
# and converts the rest to float
parts = line.split()
thresholds.append(float(parts[1]))
color_info = [float(x)/255. for x in parts[2:]]
if not prev:
prev = info[:3]
colors.append(zip(prev, color_info[:3]))
prev = color_info[3:]
# Add the last half of the last line, if necessary
if prev:
colors.append(zip(prev,prev))
colordict = dict(red=[], green=[], blue=[])
num_entries = float(len(colors) - 1)
offset = min(thresholds)
scale = 1. / (max(thresholds) - offset)
for i,(t,(r,g,b)) in enumerate(zip(thresholds, colors)):
if scaleTable:
norm = (t - offset) * scale
else:
norm = i / num_entries
colordict['red'].append((norm,) + r)
colordict['green'].append((norm,) + g)
colordict['blue'].append((norm,) + b)
# Output as code that can be copied and pasted into a python script. Repr()
# would work here, but wouldn't be as human-readable.
print '{'
num_colors = len(colordict.keys())
for n,k in enumerate(sorted(colordict.keys())):
print "'%s' :" % k
num = len(colordict[k])
for i,line in enumerate(colordict[k]):
if i == 0:
print ' [%s,' % repr(line)
elif i == num - 1:
if n == num_colors - 1:
print ' %s]' % repr(line)
else:
print ' %s],' % repr(line)
else:
print " %s," % repr(line)
print '}'
if not scaleTable:
print repr(thresholds)
| Python | 0.000006 | |
a041c683475f78d6101fe1741a561a6c00492007 | add pautils, to host various utility functions like loading the P2TH keys into the local or remote node over JSON-RPC. | pypeerassets/pautils.py | pypeerassets/pautils.py |
'''miscellaneous utilities.'''
def testnet_or_mainnet(node):
'''check if local node is configured to testnet or mainnet'''
q = node.getinfo()
if q["testnet"] is True:
return "testnet"
else:
return "mainnet"
def load_p2th_privkeys_into_node(node):
if testnet_or_mainnet(node) is "testnet":
assert testnet_PAPROD_addr in node.getaddressbyaccount()
try:
node.importprivkey(testnet_PAPROD)
assert testnet_PAPROD_addr in node.getaddressbyaccount()
except Exception:
return {"error": "Loading P2TH privkey failed."}
else:
try:
node.importprivkey(mainnet_PAPROD)
assert mainnet_PAPROD_addr in node.getaddressbyaccount()
except Exception:
return {"error": "Loading P2TH privkey failed."}
def load_test_p2th_privkeys_into_node(node):
if testnet_or_mainnet(node) is "testnet":
try:
node.importprivkey(testnet_PATEST)
assert testnet_PATEST_addr in node.getaddressbyaccount()
except Exception:
return {"error": "Loading P2TH privkey failed."}
else:
try:
node.importprivkey(mainnet_PATEST)
assert mainnet_PATEST_addr in node.getaddressbyaccount()
except Exception:
return {"error": "Loading P2TH privkey failed."}
| Python | 0 | |
7012a90cd1468da95c8939a0f0c1193766595ae8 | Add event spooler module | pytest_watch/spooler.py | pytest_watch/spooler.py | # -*- coding: utf-8
from multiprocessing import Queue, Process, Event
class Timer(Process):
def __init__(self, interval, function, args=[], kwargs={}):
super(Timer, self).__init__()
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.finished = Event()
def cancel(self):
self.finished.set()
def run(self):
self.finished.wait(self.interval)
if not self.finished.is_set():
self.function(*self.args, **self.kwargs)
self.finished.set()
class EventSpooler(object):
def __init__(self, cooldown, callback):
self.cooldown = cooldown
self.callback = callback
self.inbox = Queue()
self.outbox = Queue()
def enqueue(self, event):
self.inbox.put(event)
Timer(self.cooldown, self.process).start()
def process(self):
self.outbox.put(self.inbox.get())
if self.inbox.empty():
self.callback([self.outbox.get() for _ in range(self.outbox.qsize())])
| Python | 0 | |
0ff9373de6e11d7040b6b289cb3239a9ee9a924d | Fix haproxy agent unit test to be runnable alone by tox | neutron/tests/unit/services/loadbalancer/drivers/haproxy/test_agent.py | neutron/tests/unit/services/loadbalancer/drivers/haproxy/test_agent.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mark McClain, DreamHost
import contextlib
import mock
from oslo.config import cfg
from neutron.services.loadbalancer.drivers.haproxy import agent
from neutron.tests import base
class TestLbaasService(base.BaseTestCase):
def setUp(self):
super(TestLbaasService, self).setUp()
self.addCleanup(cfg.CONF.reset)
def test_start(self):
with mock.patch.object(
agent.rpc_service.Service, 'start'
) as mock_start:
mgr = mock.Mock()
agent_service = agent.LbaasAgentService('host', 'topic', mgr)
agent_service.start()
self.assertTrue(mock_start.called)
def test_main(self):
logging_str = 'neutron.agent.common.config.setup_logging'
with contextlib.nested(
mock.patch(logging_str),
mock.patch.object(agent.service, 'launch'),
mock.patch.object(agent, 'eventlet'),
mock.patch('sys.argv'),
mock.patch.object(agent.manager, 'LbaasAgentManager'),
mock.patch.object(cfg.CONF, 'register_opts')
) as (mock_logging, mock_launch, mock_eventlet, sys_argv, mgr_cls, ro):
agent.main()
self.assertTrue(mock_eventlet.monkey_patch.called)
mock_launch.assert_called_once_with(mock.ANY)
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mark McClain, DreamHost
import contextlib
import mock
from oslo.config import cfg
from neutron.services.loadbalancer.drivers.haproxy import agent
from neutron.tests import base
class TestLbaasService(base.BaseTestCase):
def setUp(self):
super(TestLbaasService, self).setUp()
self.addCleanup(cfg.CONF.reset)
cfg.CONF.register_opts(agent.OPTS)
def test_start(self):
with mock.patch.object(
agent.rpc_service.Service, 'start'
) as mock_start:
mgr = mock.Mock()
agent_service = agent.LbaasAgentService('host', 'topic', mgr)
agent_service.start()
self.assertTrue(mock_start.called)
def test_main(self):
logging_str = 'neutron.agent.common.config.setup_logging'
with contextlib.nested(
mock.patch(logging_str),
mock.patch.object(agent.service, 'launch'),
mock.patch.object(agent, 'eventlet'),
mock.patch('sys.argv'),
mock.patch.object(agent.manager, 'LbaasAgentManager')
) as (mock_logging, mock_launch, mock_eventlet, sys_argv, mgr_cls):
agent.main()
self.assertTrue(mock_eventlet.monkey_patch.called)
mock_launch.assert_called_once_with(mock.ANY)
| Python | 0.000001 |
e4ef868660878e1ad1749be915b88ab6fea929b5 | Add asyncio example | examples/async.py | examples/async.py | """
w1thermsensor
~~~~~~~~~~~~~
A Python package and CLI tool to work with w1 temperature sensors.
:copyright: (c) 2020 by Timo Furrer <tuxtimo@gmail.com>
:license: MIT, see LICENSE for more details.
"""
import asyncio
from w1thermsensor import AsyncW1ThermSensor
async def main():
# initialize sensor with first available sensor
sensor = AsyncW1ThermSensor()
# continuously read temperature from sensor
while True:
temperature = await sensor.get_temperature()
print(f"Temperature: {temperature:.3f}")
await asyncio.sleep(1)
if __name__ == "__main__":
asyncio.run(main())
| Python | 0.000001 | |
ef4aeb1e16245c76e7d10091b6fc8b0b289d635f | Split IP validation to a module | validateIp.py | validateIp.py | #!/usr/bin/env python
import socket
def parse(ip):
# parse and validate ip address
try:
socket.inet_pton(socket.AF_INET,ip)
return "valid"
except socket.error, e:
try:
socket.inet_pton(socket.AF_INET6,ip)
return "valid"
except:
print "ERROR: %s" % e
| Python | 0 | |
d0ce887da3043106da1b875a46b6fe1bc0ce7145 | Create 0018_auto_20201109_0655.py | herders/migrations/0018_auto_20201109_0655.py | herders/migrations/0018_auto_20201109_0655.py | # Generated by Django 2.2.17 on 2020-11-09 14:55
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('herders', '0017_auto_20200808_1642'),
]
operations = [
migrations.AlterField(
model_name='artifactinstance',
name='effects_value',
field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(blank=True, null=True), blank=True, default=list, help_text='Bonus value of this effect', size=4),
),
]
| Python | 0.000008 | |
f9db946f9b067495d2785d46efe447371e22eb26 | Add tex2pdf function | docstamp/pdflatex.py | docstamp/pdflatex.py | # coding=utf-8
# -------------------------------------------------------------------------------
# Author: Alexandre Manhaes Savio <alexsavio@gmail.com>
# Grupo de Inteligencia Computational <www.ehu.es/ccwintco>
# Universidad del Pais Vasco UPV/EHU
#
# 2015, Alexandre Manhaes Savio
# Use this at your own risk!
# -------------------------------------------------------------------------------
import os
import shutil
import os.path as op
from glob import glob
from .commands import call_command
from .filenames import remove_ext
def tex2pdf(tex_file, output_file=None, output_format='pdf'):
""" Call PDFLatex to convert TeX files to PDF.
Parameters
----------
tex_file: str
Path to the input LateX file.
output_file: str
Path to the output PDF file.
If None, will use the same output directory as the tex_file.
output_format: str
Output file format. Choices: 'pdf' or 'dvi'. Default: 'pdf'
Returns
-------
return_value
PDFLatex command call return value.
"""
if not op.exists(tex_file):
raise IOError('Could not find file {}.'.format(tex_file))
if output_format != 'pdf' and output_format != 'dvi':
raise ValueError("Invalid output format given {}. Can only accept 'pdf' or 'dvi'.".format(output_format))
cmd_name = 'pdflatex'
args_strings = ' '
if output_file is not None:
args_strings += '-output-directory={} '.format(op.abspath(op.dirname(output_file)))
if output_file:
args_strings += '-output-format={} '.format(output_format)
result_dir = op.dirname(output_file)
else:
result_dir = op.dirname(tex_file)
args_strings += tex_file
ret = call_command(cmd_name, args_strings.split())
result_file = op.join(result_dir, remove_ext(op.basename(tex_file)) + '.' + output_format)
if op.exists(result_file):
shutil.move(result_file, output_file)
else:
raise IOError('Could not find PDFLatex result file.')
[os.remove(f) for f in glob(op.join(result_dir, '*.aux'))]
[os.remove(f) for f in glob(op.join(result_dir, '*.log'))]
return ret
| Python | 0.000001 | |
96a9d00ea20dee3ffd9114b4a094868ed7ae2413 | add createmask.py | createMask.py | createMask.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
python createmask.py [voc-like dir]
'''
import os, sys
import numpy as np
import cv2
def parsexml(xmlfile):
tree = ET.parse(xmlfile)
width = int(tree.find('size').find('width').text)
height = int(tree.find('size').find('height').text)
objs = tree.findall('object')
for index, obj in enumerate(objs):
name = obj.find('name').text.lower()
bbox = obj.find('bndbox')
x0 = int(bbox.find('x').text)
y0 = int(bbox.find('y').text)
a = int(bbox.find('a').text)
b = int(bbox.find('b').text)
angle = int(bbox.find('angle').text)
break
return width, height, x, y, a, b, angle
def createmask(argv):
annodir = os.path.join(argv[0], 'Annotations')
maskdir = os.path.join(argv[0], 'JPEGImagesMask')
if not os.path.exists(maskdir):
os.makedirs(maskdir)
annofiles = sorted([os.path.join(annodir, x) for x in sorted(os.listdir(annodir)) if x.endswith('.xml')])
for xmlfile in annofiles:
w, h, x, y, a, b, angle = parsexml(xmlfile)
img = np.zeros(shape=(h, w, 1))
delta = 4
cv2.ellipse(img, (x, y), (a-delta, b-delta), anble, 0, 360, 255, -1)
cv2.imshow(img)
cv2.waitKey(0)
return
def main():
import sys
if len(sys.argv) != 2:
print(__doc__)
return
createmask(sys.argv)
if __name__ == "__main__":
main()
| Python | 0.000002 | |
8ba179518a5901a250fdc7f864f79667c319cf2a | Enhance test | utest/api/test_exposed_api.py | utest/api/test_exposed_api.py | import unittest
from os.path import join
from robot import api, model, parsing, reporting, result, running
from robot.api import parsing as api_parsing
from robot.utils.asserts import assert_equal, assert_true
class TestExposedApi(unittest.TestCase):
def test_execution_result(self):
assert_equal(api.ExecutionResult, result.ExecutionResult)
def test_test_suite(self):
assert_equal(api.TestSuite, running.TestSuite)
def test_result_writer(self):
assert_equal(api.ResultWriter, reporting.ResultWriter)
def test_visitors(self):
assert_equal(api.SuiteVisitor, model.SuiteVisitor)
assert_equal(api.ResultVisitor, result.ResultVisitor)
def test_deprecated_parsing(self):
assert_equal(api.get_model, parsing.get_model)
assert_equal(api.get_resource_model, parsing.get_resource_model)
assert_equal(api.get_tokens, parsing.get_tokens)
assert_equal(api.get_resource_tokens, parsing.get_resource_tokens)
assert_equal(api.Token, parsing.Token)
def test_parsing_getters(self):
assert_equal(api_parsing.get_model, parsing.get_model)
assert_equal(api_parsing.get_resource_model, parsing.get_resource_model)
assert_equal(api_parsing.get_tokens, parsing.get_tokens)
assert_equal(api_parsing.get_resource_tokens, parsing.get_resource_tokens)
def test_parsing_token(self):
assert_equal(api_parsing.Token, parsing.Token)
def test_parsing_model_statements(self):
for cls in parsing.model.Statement._statement_handlers.values():
assert_equal(getattr(api_parsing, cls.__name__), cls)
assert_true(not hasattr(api_parsing, 'Statement'))
def test_parsing_model_blocks(self):
for name in ('File', 'SettingSection', 'VariableSection', 'TestCaseSection',
'KeywordSection', 'CommentSection', 'TestCase', 'Keyword', 'For',
'If', 'Try', 'While'):
assert_equal(getattr(api_parsing, name), getattr(parsing.model, name))
assert_true(not hasattr(api_parsing, 'Block'))
def test_parsing_visitors(self):
assert_equal(api_parsing.ModelVisitor, parsing.ModelVisitor)
assert_equal(api_parsing.ModelTransformer, parsing.ModelTransformer)
class TestModelObjects(unittest.TestCase):
"""These model objects are part of the public API.
They are only seldom needed directly and thus not exposed via the robot.api
package. Tests just validate they are not removed accidentally.
"""
def test_running_objects(self):
assert_true(running.TestSuite)
assert_true(running.TestCase)
assert_true(running.Keyword)
def test_result_objects(self):
assert_true(result.TestSuite)
assert_true(result.TestCase)
assert_true(result.Keyword)
class TestTestSuiteBuilder(unittest.TestCase):
# This list has paths like `/path/file.py/../file.robot` on purpose.
# They don't work unless normalized.
sources = [join(__file__, '../../../atest/testdata/misc', name)
for name in ('pass_and_fail.robot', 'normal.robot')]
def test_create_with_datasources_as_list(self):
suite = api.TestSuiteBuilder().build(*self.sources)
assert_equal(suite.name, 'Pass And Fail & Normal')
def test_create_with_datasource_as_string(self):
suite = api.TestSuiteBuilder().build(self.sources[0])
assert_equal(suite.name, 'Pass And Fail')
if __name__ == '__main__':
unittest.main()
| import unittest
from os.path import join
from robot import api, model, parsing, reporting, result, running
from robot.api import parsing as api_parsing
from robot.utils.asserts import assert_equal, assert_true
class TestExposedApi(unittest.TestCase):
def test_execution_result(self):
assert_equal(api.ExecutionResult, result.ExecutionResult)
def test_test_suite(self):
assert_equal(api.TestSuite, running.TestSuite)
def test_result_writer(self):
assert_equal(api.ResultWriter, reporting.ResultWriter)
def test_visitors(self):
assert_equal(api.SuiteVisitor, model.SuiteVisitor)
assert_equal(api.ResultVisitor, result.ResultVisitor)
def test_deprecated_parsing(self):
assert_equal(api.get_model, parsing.get_model)
assert_equal(api.get_resource_model, parsing.get_resource_model)
assert_equal(api.get_tokens, parsing.get_tokens)
assert_equal(api.get_resource_tokens, parsing.get_resource_tokens)
assert_equal(api.Token, parsing.Token)
def test_parsing_getters(self):
assert_equal(api_parsing.get_model, parsing.get_model)
assert_equal(api_parsing.get_resource_model, parsing.get_resource_model)
assert_equal(api_parsing.get_tokens, parsing.get_tokens)
assert_equal(api_parsing.get_resource_tokens, parsing.get_resource_tokens)
def test_parsing_token(self):
assert_equal(api_parsing.Token, parsing.Token)
def test_parsing_model_statements(self):
for cls in parsing.model.Statement._statement_handlers.values():
assert_equal(getattr(api_parsing, cls.__name__), cls)
assert_true(not hasattr(api_parsing, 'Statement'))
def test_parsing_model_blocks(self):
for name in ('File', 'SettingSection', 'VariableSection', 'TestCaseSection',
'KeywordSection', 'CommentSection', 'TestCase', 'Keyword', 'For',
'If'):
assert_equal(getattr(api_parsing, name), getattr(parsing.model, name))
assert_true(not hasattr(api_parsing, 'Block'))
def test_parsing_visitors(self):
assert_equal(api_parsing.ModelVisitor, parsing.ModelVisitor)
assert_equal(api_parsing.ModelTransformer, parsing.ModelTransformer)
class TestModelObjects(unittest.TestCase):
"""These model objects are part of the public API.
They are only seldom needed directly and thus not exposed via the robot.api
package. Tests just validate they are not removed accidentally.
"""
def test_running_objects(self):
assert_true(running.TestSuite)
assert_true(running.TestCase)
assert_true(running.Keyword)
def test_result_objects(self):
assert_true(result.TestSuite)
assert_true(result.TestCase)
assert_true(result.Keyword)
class TestTestSuiteBuilder(unittest.TestCase):
# This list has paths like `/path/file.py/../file.robot` on purpose.
# They don't work unless normalized.
sources = [join(__file__, '../../../atest/testdata/misc', name)
for name in ('pass_and_fail.robot', 'normal.robot')]
def test_create_with_datasources_as_list(self):
suite = api.TestSuiteBuilder().build(*self.sources)
assert_equal(suite.name, 'Pass And Fail & Normal')
def test_create_with_datasource_as_string(self):
suite = api.TestSuiteBuilder().build(self.sources[0])
assert_equal(suite.name, 'Pass And Fail')
if __name__ == '__main__':
unittest.main()
| Python | 0.000001 |
45bc2562d3afd3674929e56425b597b54e74ba24 | Create Legends.py | Legends.py | Legends.py | #Draws Legends, Titles and Labels using matplotlib
import matplotlib.pyplot as plt
x = [1, 2, 3]
y = [5, 7, 4]
x1 = [1, 2, 3]
y1 = [10, 14, 12]
plt.plot(x, y, label='First Line')
plt.plot(x1, y1, label='Second Line')
plt.xlabel('X Axis')
plt.ylabel('Y Axis')
plt.title('This is a Title')
plt.legend()
plt.show()
| Python | 0 | |
3adbcb8bc7fb4c805e7933a362b62f70873d4f9f | Add emergency_scale module | paasta_tools/paasta_cli/cmds/emergency_scale.py | paasta_tools/paasta_cli/cmds/emergency_scale.py | #!/usr/bin/env python
# Copyright 2015 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from service_configuration_lib import DEFAULT_SOA_DIR
from paasta_tools.paasta_cli.utils import execute_paasta_serviceinit_on_remote_master
from paasta_tools.paasta_cli.utils import figure_out_service_name
from paasta_tools.paasta_cli.utils import lazy_choices_completer
from paasta_tools.paasta_cli.utils import list_services
from paasta_tools.paasta_cli.utils import list_instances
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import list_clusters
def add_subparser(subparsers):
status_parser = subparsers.add_parser(
'emergency-scale',
description="Scale a PaaSTA service instance",
help=("Scale a PaaSTA service instance by scaling it up or down to n instances (for Marathon apps)"))
status_parser.add_argument(
'-s', '--service',
help="Service that you want to scale. Like 'example_service'.",
).completer = lazy_choices_completer(list_services)
status_parser.add_argument(
'-i', '--instance',
help="Instance of the service that you want to scale. Like 'main' or 'canary'.",
required=True,
).completer = lazy_choices_completer(list_instances)
status_parser.add_argument(
'-c', '--cluster',
help="The PaaSTA cluster that has the service instance you want to scale. Like 'norcal-prod'.",
required=True,
).completer = lazy_choices_completer(list_clusters)
status_parser.add_argument(
'-a', '--appid',
help="The complete marathon appid to scale. Like 'example-service.main.gitf0cfd3a0.config7a2a00b7",
required=False,
)
status_parser.add_argument(
'-y', '--yelpsoa-config-root',
default=DEFAULT_SOA_DIR,
required=False,
help="Path to root of yelpsoa-configs checkout",
)
status_parser.add_argument(
'--delta',
default=0,
required=True,
help="Number of instances you want to scale up (positive number) or down (negative number)",
)
status_parser.set_defaults(command=paasta_emergency_scale)
def paasta_emergency_scale(args):
"""Performs an emergency scale on a given service instance on a given cluster
Warning: This command does not permanently scale the service. The next time the service is updated
(config change, deploy, bounce, etc.), those settings will override the emergency scale.
If you want this scale to be permanant, adjust the relevant config file to reflect that.
For example, this can be done for Marathon apps by setting 'instances: n'
"""
service = figure_out_service_name(args, soa_dir=args.yelpsoa_config_root)
print "Performing an emergency scale on %s..." % compose_job_id(service, args.instance)
output = execute_paasta_serviceinit_on_remote_master('scale', args.cluster, service, args.instance,
app_id=args.appid, delta=args.delta)
print "Output: %s" % output
print "%s" % "\n".join(paasta_emergency_scale.__doc__.splitlines()[-7:])
| Python | 0 | |
29090add692e6c32a75e123be6cd201949efd6ce | Add elasticsearch-administer | scripts/elasticsearch-administer.py | scripts/elasticsearch-administer.py | """
Utilities for administering elasticsearch
"""
from argparse import ArgumentParser, RawDescriptionHelpFormatter
from collections import namedtuple
import json
import sys
from elasticsearch import Elasticsearch
from elasticsearch.client import ClusterClient, NodesClient, CatClient
def pprint(data):
print json.dumps(data, indent=4)
def confirm(msg):
if raw_input(msg + "\n(y/n)") != 'y':
sys.exit()
Node = namedtuple("Node", "name node_id docs")
def get_nodes_info(es):
nc = NodesClient(es)
stats = nc.stats(metric="indices", index_metric="docs")
return [Node(info['name'], node_id, info['indices']['docs'])
for node_id, info in stats['nodes'].items()]
def cluster_status(es):
cluster = ClusterClient(es)
print "\nCLUSTER HEALTH"
pprint(cluster.health())
print "\nPENDING TASKS"
pprint(cluster.pending_tasks())
print "\nNODES"
for node in get_nodes_info(es):
print node.name, node.docs
print "\nSHARD ALLOCATION"
cat = CatClient(es)
print cat.allocation(v=True)
def shard_status(es):
cat = CatClient(es)
print cat.shards(v=True)
def cluster_settings(es):
cluster = ClusterClient(es)
pprint(cluster.get_settings())
def decommission_node(es):
cluster = ClusterClient(es)
print "The nodes are:"
nodes = get_nodes_info(es)
for node in nodes:
print node.name, node.docs
confirm("Are you sure you want to decommission a node?")
node_name = raw_input("Which one would you like to decommission?\nname:")
names = [node.name for node in nodes]
if node_name not in names:
print "You must enter one of {}".format(", ".join(names))
return
confirm("This will remove all shards from {}, okay?".format(node_name))
cmd = {"transient": {"cluster.routing.allocation.exclude._name": node_name}}
pprint(cluster.put_settings(cmd))
print "The node is now being decommissioned."
commands = {
'cluster_status': cluster_status,
'cluster_settings': cluster_settings,
'decommission_node': decommission_node,
'shard_status': shard_status,
}
def main():
parser = ArgumentParser(description=__doc__, formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('host_url')
parser.add_argument('command', choices=commands.keys())
args = parser.parse_args()
es = Elasticsearch([{'host': args.host_url, 'port': 9200}])
commands[args.command](es)
if __name__ == "__main__":
main()
| Python | 0 | |
eee700f46e1edee1133722ee94992abda1ad6a4c | Add GYP build for zlib | deps/zlib.gyp | deps/zlib.gyp | {
'target_defaults': {
'conditions': [
['OS != "win"', {
'defines': [
'_LARGEFILE_SOURCE',
'_FILE_OFFSET_BITS=64',
'_GNU_SOURCE',
'HAVE_SYS_TYPES_H',
'HAVE_STDINT_H',
'HAVE_STDDEF_H',
],
},
{ # windows
'defines': [
'_CRT_SECURE_NO_DEPRECATE',
'_CRT_NONSTDC_NO_DEPRECATE',
],
},
],
],
},
'targets': [
{
'target_name': 'zlib',
'type': 'static_library',
'sources': [
'zlib/adler32.c',
'zlib/compress.c',
'zlib/crc32.c',
'zlib/deflate.c',
'zlib/gzclose.c',
'zlib/gzlib.c',
'zlib/gzread.c',
'zlib/gzwrite.c',
'zlib/inflate.c',
'zlib/infback.c',
'zlib/inftrees.c',
'zlib/inffast.c',
'zlib/trees.c',
'zlib/uncompr.c',
'zlib/zutil.c',
'zlib/win32/zlib1.rc'
],
'include_dirs': [
'zlib',
],
'direct_dependent_settings': {
'include_dirs': [
'zlib',
],
},
}
],
}
| Python | 0 | |
c46962f8055dc1c9d45a35b21afaac363ec3eb46 | add home view | simple_media_service/views/pages.py | simple_media_service/views/pages.py | #
# Copyright (c) Elliot Peele <elliot@bentlogic.net>
#
# This program is distributed under the terms of the MIT License as found
# in a file called LICENSE. If it is not present, the license
# is always available at http://www.opensource.org/licenses/mit-license.php.
#
# This program is distributed in the hope that it will be useful, but
# without any warrenty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the MIT License for full details.
#
from pyramid.response import Response
from prism_core.views import lift
from prism_core.views import BaseView
from prism_core.views import view_defaults
@lift()
@view_defaults(route_name='home', renderer='text')
class Home(BaseView):
def _get(self):
return Response('UI goes here')
| Python | 0 | |
597e9d6f3d5804d403e3cd58a380ea882cbd5267 | Add tracker init support | home/iot/tracker.py | home/iot/tracker.py | import functools
from flask import abort, request
from flask_login import login_required
from flask_socketio import join_room, emit
from home.core.models import get_device
from home.settings import DEBUG
from home.web.utils import api_auth_required
from home.web.web import socketio, app
class TrackedDevice:
def __init__(self, id_: str):
self.id = id_
self.sid = None
def cmd(self, cmd: str):
socketio.emit('cmd', {'cmd': cmd}, namespace='/tracker', room="asdf")
def register(self):
socketio.emit('cmd', {'cmd': 'ls'}, namespace='/tracker', room="asdf")
@app.route('/api/tracker', methods=['POST'])
@api_auth_required
def commands(client):
command = request.form.get('command')
if command == 'exec':
socketio.emit('cmd', {'cmd': request.form.get('cmd')}, namespace='/tracker', room="asdf")
return '', 204
def ws_android_auth(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
t = get_device('android')
t.dev.sid = request.sid
if DEBUG:
return f(*args, **kwargs)
abort(403)
return wrapped
@socketio.on('register', namespace='/tracker')
@ws_android_auth
def register(data):
print(data['id'], "tried to register")
join_room("asdf")
emit('registered', 'registered')
| Python | 0 | |
860f6b612c39bb5b569b0fae8279134bca264e70 | Add 2017-faust/toilet | 2017-faust/toilet/toilet.py | 2017-faust/toilet/toilet.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import re
import dateutil.parser
from pwn import *
context(arch='amd64', os='linux')
def get_latest_shas(io):
io.sendline('8')
io.recvuntil('#################################################################################################')
logs = io.recvuntil('#################################################################################################')
shas = re.findall('#==== (.{64}) ====', logs)[1:]
# filter out shas older than 15 minutes
times = [dateutil.parser.parse(time) for time in re.findall('==== (........) ====', logs)[1:]]
youngest_time = times[0]
return filter(lambda (_, time): (youngest_time - time).seconds <= (15 * 60), zip(shas, times))
with process('./toilet') as io:
latest_shas = get_latest_shas(io)
for sha, _ in latest_shas:
with process('./toilet') as io:
io.sendline('1')
io.sendline(fit(length=64))
io.sendline('5')
io.send('\n')
io.sendline(sha)
io.sendline('7')
io.sendline('4')
io.recvuntil('Name: ', timeout=3)
flag = io.recvregex(r'FAUST_[A-Za-z0-9/\+]{32}', exact=True, timeout=3)
if flag:
print flag
break
| Python | 0.000001 | |
571334df8e26333f34873a3dcb84441946e6c64c | Bump version number to 0.12.2 | flask/__init__.py | flask/__init__.py | # -*- coding: utf-8 -*-
"""
flask
~~~~~
A microframework based on Werkzeug. It's extensively documented
and follows best practice patterns.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
__version__ = '0.12.2'
# utilities we import from Werkzeug and Jinja2 that are unused
# in the module but are exported as public interface.
from werkzeug.exceptions import abort
from werkzeug.utils import redirect
from jinja2 import Markup, escape
from .app import Flask, Request, Response
from .config import Config
from .helpers import url_for, flash, send_file, send_from_directory, \
get_flashed_messages, get_template_attribute, make_response, safe_join, \
stream_with_context
from .globals import current_app, g, request, session, _request_ctx_stack, \
_app_ctx_stack
from .ctx import has_request_context, has_app_context, \
after_this_request, copy_current_request_context
from .blueprints import Blueprint
from .templating import render_template, render_template_string
# the signals
from .signals import signals_available, template_rendered, request_started, \
request_finished, got_request_exception, request_tearing_down, \
appcontext_tearing_down, appcontext_pushed, \
appcontext_popped, message_flashed, before_render_template
# We're not exposing the actual json module but a convenient wrapper around
# it.
from . import json
# This was the only thing that Flask used to export at one point and it had
# a more generic name.
jsonify = json.jsonify
# backwards compat, goes away in 1.0
from .sessions import SecureCookieSession as Session
json_available = True
| # -*- coding: utf-8 -*-
"""
flask
~~~~~
A microframework based on Werkzeug. It's extensively documented
and follows best practice patterns.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
__version__ = '0.12.2-dev'
# utilities we import from Werkzeug and Jinja2 that are unused
# in the module but are exported as public interface.
from werkzeug.exceptions import abort
from werkzeug.utils import redirect
from jinja2 import Markup, escape
from .app import Flask, Request, Response
from .config import Config
from .helpers import url_for, flash, send_file, send_from_directory, \
get_flashed_messages, get_template_attribute, make_response, safe_join, \
stream_with_context
from .globals import current_app, g, request, session, _request_ctx_stack, \
_app_ctx_stack
from .ctx import has_request_context, has_app_context, \
after_this_request, copy_current_request_context
from .blueprints import Blueprint
from .templating import render_template, render_template_string
# the signals
from .signals import signals_available, template_rendered, request_started, \
request_finished, got_request_exception, request_tearing_down, \
appcontext_tearing_down, appcontext_pushed, \
appcontext_popped, message_flashed, before_render_template
# We're not exposing the actual json module but a convenient wrapper around
# it.
from . import json
# This was the only thing that Flask used to export at one point and it had
# a more generic name.
jsonify = json.jsonify
# backwards compat, goes away in 1.0
from .sessions import SecureCookieSession as Session
json_available = True
| Python | 0.000153 |
a5188b4a172e17ac755ba4ce8d8890c7b211eb74 | Create ex11.py | learningpythonthehardway/ex11.py | learningpythonthehardway/ex11.py | print "How old are you brother ?"
age = raw_input() # will get some text ;def
print "How tall are you ?"
height = raw_input()
print "do you eat enough ?"
eat = raw_input()
print "So, you're a %r years old and %r tall guy that says : '%r' to the food, right ?" % (age, height, eat)
# Nb: to get a number from the return stuff, 'x = int(raw_input())'
| Python | 0.000001 | |
e652ef42e5645671cde6522254c2b86d4a71114c | Add a new migration to rename key and secret columns. | allaccess/migrations/0004_rename_key_secret.py | allaccess/migrations/0004_rename_key_secret.py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from ..compat import get_user_model, AUTH_USER_MODEL
User = get_user_model()
class Migration(SchemaMigration):
def forwards(self, orm):
# Rename field 'Provider.secret'
db.rename_column('allaccess_provider', 'secret', 'consumer_secret')
# Rename field 'Provider.key'
db.rename_column('allaccess_provider', 'key', 'consumer_key')
def backwards(self, orm):
# Rename field 'Provider.secret'
db.rename_column('allaccess_provider', 'consumer_secret', 'secret')
# Rename field 'Provider.key'
db.rename_column('allaccess_provider', 'consumer_key', 'key')
models = {
'allaccess.accountaccess': {
'Meta': {'unique_together': "((u'identifier', u'provider'),)", 'object_name': 'AccountAccess'},
'access_token': ('allaccess.fields.EncryptedField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['allaccess.Provider']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % AUTH_USER_MODEL, 'null': 'True', 'blank': 'True'})
},
'allaccess.provider': {
'Meta': {'object_name': 'Provider'},
'access_token_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'authorization_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'consumer_key': ('allaccess.fields.EncryptedField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'consumer_secret': ('allaccess.fields.EncryptedField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'profile_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'request_token_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': User.__name__, "db_table": "'%s'" % User._meta.db_table},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['allaccess'] | Python | 0 | |
e4bc9684c10a360ad8df32b2c6bfb8f013ea4b77 | Add Composite.py | Python/Composite/Composite.py | Python/Composite/Composite.py | #! /usr/bin/python
# -*- coding: utf-8 -*-
'''
Composite Pattern
Author: Kei Nakata
Data: Oct.10.2014
'''
import abc
import exceptions
class Component(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self, name):
pass
@abc.abstractmethod
def add(self, child):
pass
@abc.abstractmethod
def remove(self, index):
pass
@abc.abstractmethod
def getChild(self, index):
pass
@abc.abstractmethod
def show(self):
pass
class Composite(Component):
def __init__(self, name):
self.children = []
def add(self, child):
self.children.append(child)
def remove(self, index):
del self.children[index]
def getChild(self, index):
return self.children[index]
def show(self):
for child in self.children:
child.show()
class Leaf(Component):
count = 0
def __init__(self, name):
self.name = name
Leaf.count = Leaf.count + 1
self.number = Leaf.count
def add(self):
raise exceptions.RuntimeError("can not add item to leaf")
def remove(self):
raise exceptions.RuntimeError("can not remove item through leaf class")
def getChild(self):
raise exceptions.RuntimeError("leaf does not have child")
def show(self):
print self.number, self.name
if __name__ == '__main__':
container = Composite('box')
small_container = Composite('small box')
small_container.add(Leaf('chicken'))
small_container.add(Leaf('beaf'))
small_container.add(Leaf('pork'))
container.add(Leaf('apple'))
container.add(Leaf('orange'))
container.add(Leaf('pear'))
container.add(small_container)
container.show()
print
container.remove(1)
container.show()
| Python | 0.000001 | |
ac3b5be9a6f71afb402db2f293e1198bce973440 | Create the login server using Flask | flask/login.py | flask/login.py | from abc import ABCMeta, ABC, abstractmethod, abstractproperty
from flask import Flask, app
import flask
from flask_login import LoginManager
class User(ABC):
authenticated = False
active = False
anonymous = False
id = None
def is_authenticated(self):
return self.authenticated
def is_active(self):
return self.active
def is_anonymous(self):
return self.anonymous
def get_id(self):
return self.id
login_manager = LoginManager()
@login_manager.user_loader
def load_user(user_id):
pass #TODO: unimplemented for the moment
@app.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
login_user(user)
flask.flash('Logged in successfully.')
next = flask.request.args.get('next')
if not is_safe_url(next): #TODO: unimplemented
return flask.abort(400)
return flask.redirect(next or flask.url_for('index'))
return flask.render_template('htdoc/login.html', form=form) | Python | 0.000001 | |
e15f59f29907d740d0a0f8dab46d77aa833ef802 | fix "peru -v" | peru/main.py | peru/main.py | #! /usr/bin/env python3
import os
import sys
from . import runtime
from . import module
def main():
peru_file_name = os.getenv("PERU_FILE_NAME") or "peru.yaml"
if not os.path.isfile(peru_file_name):
print(peru_file_name + " not found.")
sys.exit(1)
r = runtime.Runtime()
m = module.parse(r, peru_file_name)
flags = {"-v", "--verbose"}
args = [arg for arg in sys.argv if arg not in flags]
if len(args) > 1:
target = args[1].split('.')
else:
target = []
m.build(r, target)
| #! /usr/bin/env python3
import os
import sys
from . import runtime
from . import module
def main():
peru_file_name = os.getenv("PERU_FILE_NAME") or "peru.yaml"
if not os.path.isfile(peru_file_name):
print(peru_file_name + " not found.")
sys.exit(1)
r = runtime.Runtime()
m = module.parse(r, peru_file_name)
if len(sys.argv) > 1:
target = sys.argv[1].split('.')
else:
target = []
m.build(r, target)
| Python | 0 |
0d2c04790fb6c97b37f6e0700bb0162796e3dc4c | Add unit tests for AmountTaxScale serialization | tests/web_api/test_scale_serialization.py | tests/web_api/test_scale_serialization.py | # -*- coding: utf-8 -*-
from openfisca_web_api.loader.parameters import walk_node
from openfisca_core.parameters import ParameterNode, Scale
def test_amount_scale():
parameters = []
metadata = {'location':'foo', 'version':'1', 'repository_url':'foo'}
root_node = ParameterNode(data = {})
amount_scale_data = {'brackets':[{'amount':{'2014-01-01':{'value':0}},'threshold':{'2014-01-01':{'value':1}}}]}
scale = Scale('scale', amount_scale_data, 'foo')
root_node.children['scale'] = scale
walk_node(root_node, parameters, [], metadata)
assert parameters == [{'description': None, 'id': 'scale', 'metadata': {}, 'source': 'foo/blob/1', 'brackets': {'2014-01-01': {1: 0}}}]
| Python | 0 | |
b0f8064d0d6a747aac5b45bc44c3c4abda7873ad | Add unit test | apps/sam/python/src/i2p/test/test_select.py | apps/sam/python/src/i2p/test/test_select.py |
# -----------------------------------------------------
# test_select.py: Unit tests for select.py.
# -----------------------------------------------------
# Make sure we can import i2p
import sys; sys.path += ['../../']
import time
import traceback, sys
from i2p import socket, select
import i2p.socket
import socket as pysocket
def minitest_select(rans, wans, eans, timeout,
f1=None, f4=None, c1=None, c4=None):
"""Mini-unit test for select (Python and I2P sockets).
Calls f1() on socket S1, f4() on socket S4, uses select()
timeout 'timeout'. rans, wans, and eans should be lists
containing indexes 1...6 of the sockets defined below. The
result of i2p.select.select() will be verified against these
lists. After this, calls c1() on S1, and c4() on S4."""
S1 = pysocket.socket(pysocket.AF_INET, pysocket.SOCK_STREAM)
S2 = pysocket.socket(pysocket.AF_INET, pysocket.SOCK_DGRAM)
S3 = pysocket.socket(pysocket.AF_INET, pysocket.SOCK_RAW)
kw = {'in_depth':0, 'out_depth':0}
S4 = socket.socket('Fella', socket.SOCK_STREAM, **kw)
S5 = socket.socket('Boar', socket.SOCK_DGRAM, **kw)
S6 = socket.socket('Gehka', socket.SOCK_RAW, **kw)
if f1: f1(S1)
if f4: f4(S4)
L = [S1, S2, S3, S4, S5, S6]
start = time.time()
ans = select.select(L, L, L, timeout)
ans1 = select.select(L, [], [], timeout)
ans2 = select.select([], L, [], timeout)
ans3 = select.select([], [], L, timeout)
end = time.time()
T = end - start
ans = [[L.index(x) + 1 for x in ans [i]] for i in range(3)]
ans1 = [[L.index(x) + 1 for x in ans1[i]] for i in range(3)]
ans2 = [[L.index(x) + 1 for x in ans2[i]] for i in range(3)]
ans3 = [[L.index(x) + 1 for x in ans3[i]] for i in range(3)]
print ans1[0], rans
assert ans1[0] == rans
print ans2[1], wans
assert ans2[1] == wans
print ans3[2], eans
assert ans3[2] == eans
print ans, [rans, wans, eans]
assert ans == [rans, wans, eans]
assert T < 4 * timeout + 0.1
if c1: c1(S1)
if c4: c4(S4)
def test_select():
"""Unit test for select (Python and I2P sockets)."""
def connect1(S):
"""Connect regular Python socket to Google."""
ip = pysocket.gethostbyname('www.google.com')
S.connect((ip, 80))
def connect4(S):
"""Connect I2P Python socket to duck.i2p."""
S.connect('duck.i2p')
def full1(S):
"""Connect regular Python socket to Google, and send."""
connect1(S)
S.sendall('GET / HTTP/1.0\r\n\r\n')
print S.recv(1)
def full4(S):
"""Connect I2P Python socket to duck.i2p, and send."""
connect4(S)
S.sendall('GET / HTTP/1.0\r\n\r\n')
S.recv(1)
# Peek twice (make sure peek code isn't causing problems).
S.recv(1, i2p.socket.MSG_PEEK | i2p.socket.MSG_DONTWAIT)
S.recv(1, i2p.socket.MSG_PEEK | i2p.socket.MSG_DONTWAIT)
def check(S):
"""Verify that three chars recv()d are 'TTP'."""
assert S.recv(3) == 'TTP'
try:
for t in [0.0, 1.0]:
minitest_select([], [2, 3, 5, 6], [], t)
minitest_select([], [1, 2, 3, 4, 5, 6], [], t,
f1=connect1, f4=connect4)
minitest_select([], [1, 2, 3, 5, 6], [], t,
f1=connect1)
minitest_select([], [2, 3, 4, 5, 6], [], t,
f4=connect4)
minitest_select([1, 4], [1, 2, 3, 4, 5, 6], [], t,
f1=full1, f4=full4, c1=check, c4=check)
except:
print 'Unit test failed for i2p.select.select().'
traceback.print_exc(); sys.exit()
print 'i2p.select.select(): OK'
if __name__ == '__main__':
test_select()
| Python | 0.000001 | |
a04d5745257c16e127711fbded6899f8f226aeba | add html generator using pdoc3 | doc/py/gen.py | doc/py/gen.py | import os
import pdoc
import clingo
import clingo.ast
import re
ctx = pdoc.Context()
cmod = pdoc.Module(clingo, context=ctx)
amod = pdoc.Module(clingo.ast, supermodule=cmod, context=ctx)
cmod.doc["ast"] = amod
pdoc.link_inheritance(ctx)
def replace(s):
s = s.replace('href="clingo.html', 'href="clingo/')
s = s.replace('href="../clingo.html', 'href="../')
s = s.replace('href="clingo/ast.html', 'href="ast/')
s = re.sub(r"['\"]https://cdnjs\.cloudflare\.com/.*/([^/'\"]+\.(css|js))['\"]", r"'\2/\1'", s)
return s
os.makedirs("clingo/ast", exist_ok=True)
open("clingo/index.html", "w").write(replace(cmod.html(external_links=True)))
open("clingo/ast/index.html", "w").write(replace(amod.html(external_links=True)))
| Python | 0 | |
e4efaa947533e6d63eb7518306e31386ec688c73 | write testing test | bioinformatics/tests/test_frequent_words.py | bioinformatics/tests/test_frequent_words.py | def test_sanity_check_pass():
assert True
def test_sanity_check_fail():
assert False
def test_sanity_check_error():
assert 0/0
| Python | 0.000095 | |
42f614e7f22dfa93c07c09e6e2fedb5546f8d236 | read pwscf occupations and evals | qe_reader.py | qe_reader.py | import numpy as np
from mmap import mmap
def retrieve_occupations(nscf_outfile, max_nbnd_lines=10):
""" read the eigenvalues and occupations of DFT orbitals at every available kpoint in an non-scf output produced by pwscf """
fhandle = open(nscf_outfile,'r+')
mm = mmap(fhandle.fileno(),0)
# read number of k points
nk_prefix = "number of k points="
idx = mm.find(nk_prefix)
mm.seek(idx)
nk_line = mm.readline()
nk = int( nk_line.strip(nk_prefix).split()[0] )
# skip to the end of band structure calculation
idx = mm.find('End of band structure calculation')
mm.seek(idx)
# read the eigenvalues and occupations at each kpoint
kpt_prefix = "k ="
data = []
for ik in range(nk):
idx = mm.find(kpt_prefix)
mm.seek(idx)
kpt_line = mm.readline()
kpt = map(float,kpt_line.strip(kpt_prefix).split()[:3])
mm.readline() # skip empty line
eval_arr = np.array([])
for iline in range(max_nbnd_lines):
tokens = mm.readline().split()
if len(tokens)==0:
break
# end if
eval_arr = np.append(eval_arr, map(float,tokens))
# end for iline
idx = mm.find('occupation numbers')
mm.seek(idx)
mm.readline() # skip current line
occ_arr = np.array([])
for iline in range(4):
tokens = mm.readline().split()
if len(tokens)==0:
break
# end if
occ_arr = np.append(occ_arr, map(float,tokens))
# end for iline
entry = {'ik':ik,'kpt':list(kpt),'eval':list(eval_arr),'occ':list(occ_arr)}
data.append(entry)
# end for
mm.close()
fhandle.close()
return data
# end def
import os
import h5py
def retrieve_psig(h5_file,only_occupied=False,occupations=None):
""" return a list dictionaries of DFT orbital coefficients in PW basis by reading an hdf5 file written by pw2qmcpack. If only_occupied=True and a database of occupied orbitals are given, then only read orbitals that are occupied. """
if only_occupied and (occupations is None):
raise NotImplementedError("no occupation database is given")
# end if
ha = 27.21138602 # ev from 2014 CODATA
orbitals = []
h5handle = h5py.File(h5_file)
electron = h5handle['electrons']
kpt_labels = []
for key in electron.keys():
if key.startswith('kpoint'):
kpt_labels.append(key)
# end if
# end for key
nk = electron['number_of_kpoints'].value
assert nk==len(kpt_labels)
for label in kpt_labels:
# get kpoint index
kpt_idx = int( label.split('_')[-1] )
# get plane wave wave numbers
if kpt_idx == 0:
mypath = os.path.join(label,'gvectors')
gvecs = electron[mypath].value
# end if
# verify eigenstates at this kpoint
kpt_ptr = electron[os.path.join(label,'spin_0')]
nbnd = kpt_ptr['number_of_states'].value
evals = kpt_ptr['eigenvalues'].value
# compare to nscf output (eigenvalues and occupation)
if occupations is not None:
mydf = occupations[occupations['ik']==kpt_idx]
myval= mydf['eval'].values[0]
myocc= mydf['occ'].values[0]
assert nbnd == len(myval), "expect %d bands, nscf has %d bands" % (nbnd,len(myval))
assert np.allclose(evals*ha,myval,atol=1e-4), str(evals*ha-myval)
# end if
for iband in range(nbnd):
if only_occupied and (np.isclose(myocc[iband],0.0)):
continue
# end if
psig = kpt_ptr['state_%d/psi_g'%iband].value
entry = {'ik':kpt_idx,'iband':iband,'eval':evals[iband],'psig':psig}
orbitals.append(entry)
# end for iband
# end for label
h5handle.close()
return orbitals
# end def retrieve_psig
| Python | 0 | |
aee6afe48bf4d2992c39a22d9e492377dcec527c | Add migrations | dash/orgs/migrations/0029_auto_20211025_1504.py | dash/orgs/migrations/0029_auto_20211025_1504.py | # Generated by Django 3.2.6 on 2021-10-25 15:04
import functools
from django.db import migrations, models
import dash.utils
class Migration(migrations.Migration):
dependencies = [
("orgs", "0028_alter_org_config"),
]
operations = [
migrations.AlterField(
model_name="org",
name="logo",
field=models.ImageField(
blank=True,
help_text="The logo that should be used for this organization",
null=True,
upload_to=functools.partial(dash.utils.generate_file_path, *("logos",), **{}),
),
),
migrations.AlterField(
model_name="orgbackground",
name="image",
field=models.ImageField(
help_text="The image file",
upload_to=functools.partial(dash.utils.generate_file_path, *("org_bgs",), **{}),
),
),
]
| Python | 0.000001 | |
6b9933cce4cac3131d603880969e1d9b78b1e4f0 | Remove party_affiliation table | alembic/versions/138c92cb2218_feed.py | alembic/versions/138c92cb2218_feed.py | """Remove PartyAffiliation
Revision ID: 138c92cb2218
Revises: 3aecd12384ee
Create Date: 2013-09-28 16:34:40.128374
"""
# revision identifiers, used by Alembic.
revision = '138c92cb2218'
down_revision = '3aecd12384ee'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table(u'party_affiliation')
def downgrade():
op.create_table(u'party_affiliation',
sa.Column(u'id', sa.INTEGER(), server_default="nextval('party_affiliation_id_seq'::regclass)", nullable=False),
sa.Column(u'person_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'party_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'start_date', sa.CHAR(length=8), autoincrement=False, nullable=True),
sa.Column(u'end_date', sa.CHAR(length=8), autoincrement=False, nullable=True),
sa.Column(u'is_current_member', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['party_id'], [u'party.id'], name=u'party_affiliation_party_id_fkey'),
sa.ForeignKeyConstraint(['person_id'], [u'person.id'], name=u'party_affiliation_person_id_fkey'),
sa.PrimaryKeyConstraint(u'id', name=u'party_affiliation_pkey')
)
| Python | 0.000002 | |
6f7afea4aed4dd77cd06e8dce66e9ed1e6390a00 | Add a dummy label printer server. | dummyprint.py | dummyprint.py | #!/usr/bin/env python3
# It does work with Python 2.7, too.
from __future__ import print_function
from __future__ import unicode_literals
try:
from SocketServer import TCPServer, BaseRequestHandler
except ImportError: # Python 3
from socketserver import TCPServer, BaseRequestHandler
class DummyHandler(BaseRequestHandler):
""" Simply write everything to stdout. """
def handle(self):
print("-----------------------------------------------------")
print("New connection from {}:".format(self.client_address))
buffer = b''
while True:
data = self.request.recv(1024)
if data:
buffer += data
else:
break
print(buffer)
print("-----------------------------------------------------")
if __name__ == "__main__":
listen_config = ("127.0.0.1", 9100)
print("Listening at {}...".format(listen_config))
server = TCPServer(listen_config, DummyHandler)
server.serve_forever()
| Python | 0 | |
d173374a2bb0b3336a44c204f250ee1fa928051f | Add CLI mechanics stub. | grafcli/cli.py | grafcli/cli.py |
from grafcli.config import config
from grafcli.elastic import Elastic
from grafcli.filesystem import FileSystem
ROOT_PATH = "/"
PROMPT = "> "
class GrafCLI(object):
def __init__(self):
self._elastic = Elastic()
self._filesystem = FileSystem()
self._current_path = ROOT_PATH
def run(self):
while True:
try:
print(self._format_prompt(), end='')
user_input = input()
except (KeyboardInterrupt, EOFError):
break
def _format_prompt(self):
return "[{path}]{prompt}".format(path=self._current_path,
prompt=PROMPT)
| Python | 0 | |
e937e461e7e130dc80e1a4403b0a810db0e04b29 | Create an environment based on a config file. | wmtexe/env.py | wmtexe/env.py | from os import path, pathsep
import subprocess
def find_babel_libs():
try:
return subprocess.check_output(['cca-spec-babel-config',
'--var', 'CCASPEC_BABEL_LIBS']).strip()
except (OSError, subprocess.CalledProcessError):
return None
def python_version(python):
version = subprocess.check_output(
[python, '-c', 'import sys; print(sys.version[:3])']).strip()
return 'python%s' % version
def env_from_config_path(path_to_cfg):
import ConfigParser
config = ConfigParser.RawConfigParser()
config.read(path_to_cfg)
python_prefix = config.get('wmt', 'python_prefix')
cca_prefix = config.get('wmt', 'cca_prefix')
wmt_prefix = config.get('wmt', 'wmt_prefix')
components_prefix = config.get('wmt', 'components_prefix')
ver = python_version(path.join(python_prefix, 'bin', 'python'))
environ = {
'CURL': config.get('wmt', 'curl'),
'TAIL': config.get('wmt', 'tail'),
'BASH': config.get('wmt', 'bash'),
'PYTHONPATH': pathsep.join([
path.join(python_prefix, 'lib', ver, 'site-packages'),
path.join(python_prefix, 'lib', ver),
path.join(components_prefix, 'lib', ver, 'site-packages'),
path.join(cca_prefix, 'lib', ver, 'site-packages'),
path.join(find_babel_libs(), ver, 'site-packages'),
]),
'LD_LIBRARY_PATH': pathsep.join([
path.join(python_prefix, 'lib'),
path.join(components_prefix, 'lib'),
path.join(wmt_prefix, 'lib'),
path.join(cca_prefix, 'lib'),
]),
'PATH': pathsep.join([
path.join(python_prefix, 'bin'),
'/usr/local/bin',
'/usr/bin',
'/bin',
]),
'CLASSPATH': pathsep.join([
path.join(components_prefix, 'lib', 'java'),
]),
'SIDL_DLL_PATH': ';'.join([
path.join(components_prefix, 'share', 'cca'),
]),
}
environ['LD_RUN_PATH'] = environ['LD_LIBRARY_PATH']
return environ
def _is_executable(program):
from os import access, X_OK
return path.isfile(program) and access(program, X_OK)
def audit(environ):
from os import linesep
messages = []
for command in ['TAIL', 'CURL', 'BASH']:
if not _is_executable(environ[command]):
messages.append('%s: file is not executable' % command)
for path_var in ['PYTHONPATH', 'LD_LIBRARY_PATH', 'PATH', 'CLASSPATH']:
for item in environ[path_var].split(pathsep):
if not path.isdir(item):
messages.append('%s: not a directory' % item)
for path_var in ['SIDL_DLL_PATH']:
for item in environ[path_var].split(';'):
if not path.isdir(item):
messages.append('%s: not a directory' % item)
return linesep.join(messages)
def main():
environ = env_from_config_path('wmt.cfg')
for item in environ.items():
print 'export %s=%s' % item
print audit(environ)
if __name__ == '__main__':
main()
| Python | 0 | |
590f9b896be367ded589c90ac5eacd4d3006ebc8 | Create Combinations_001.py | leetcode/077-Combinations/Combinations_001.py | leetcode/077-Combinations/Combinations_001.py | class Solution:
# @param {integer} n
# @param {integer} k
# @return {integer[][]}
def combine(self, n, k):
if k < 1 or k > n:
return []
if k == 1:
return [[i] for i in range(1, n+1)]
res = self.combine(n - 1, k -1)
[i.append(n) for i in res ]
second = self.combine(n - 1, k)
res.extend(second)
return res
| Python | 0.000004 | |
e515b4000c42f1947519118772e22c09e692289d | Manage schemas on ACI MultiSite (#47758) | lib/ansible/modules/network/aci/msc_schema.py | lib/ansible/modules/network/aci/msc_schema.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: msc_schema
short_description: Manage schemas
description:
- Manage schemas on Cisco ACI Multi-Site.
author:
- Dag Wieers (@dagwieers)
version_added: '2.8'
options:
schema_id:
description:
- The ID of the schema.
type: str
required: yes
schema:
description:
- The name of the schema.
type: str
required: yes
aliases: [ name, schema_name ]
templates:
description:
- A list of templates for this schema.
type: list
sites:
description:
- A list of sites mapped to templates in this schema.
type: list
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: msc
'''
EXAMPLES = r'''
- name: Add a new schema
msc_schema:
host: msc_host
username: admin
password: SomeSecretPassword
schema: Schema 1
state: present
templates:
- name: Template1
displayName: Template 1
tenantId: north_europe
anps:
<...>
- name: Template2
displayName: Template 2
tenantId: nort_europe
anps:
<...>
delegate_to: localhost
- name: Remove schemas
msc_schema:
host: msc_host
username: admin
password: SomeSecretPassword
schema: Schema 1
state: absent
delegate_to: localhost
- name: Query a schema
msc_schema:
host: msc_host
username: admin
password: SomeSecretPassword
schema: Schema 1
state: query
delegate_to: localhost
register: query_result
- name: Query all schemas
msc_schema:
host: msc_host
username: admin
password: SomeSecretPassword
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.msc import MSCModule, msc_argument_spec, issubset
def main():
argument_spec = msc_argument_spec()
argument_spec.update(
schema=dict(type='str', required=False, aliases=['name', 'schema_name']),
schema_id=dict(type='str', required=False),
templates=dict(type='list'),
sites=dict(type='list'),
# messages=dict(type='dict'),
# associations=dict(type='list'),
# health_faults=dict(type='list'),
# references=dict(type='dict'),
# policy_states=dict(type='list'),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['schema']],
['state', 'present', ['schema']],
],
)
schema = module.params['schema']
schema_id = module.params['schema_id']
templates = module.params['templates']
sites = module.params['sites']
state = module.params['state']
msc = MSCModule(module)
path = 'schemas'
# Query for existing object(s)
if schema_id is None and schema is None:
msc.existing = msc.query_objs(path)
elif schema_id is None:
msc.existing = msc.get_obj(path, displayName=schema)
if msc.existing:
schema_id = msc.existing['id']
elif schema is None:
msc.existing = msc.get_obj(path, id=schema_id)
else:
msc.existing = msc.get_obj(path, id=schema_id)
existing_by_name = msc.get_obj(path, displayName=schema)
if existing_by_name and schema_id != existing_by_name['id']:
msc.fail_json(msg="Provided schema '{1}' with id '{2}' does not match existing id '{3}'.".format(schema, schema_id, existing_by_name['id']))
if schema_id:
path = 'schemas/{id}'.format(id=schema_id)
if state == 'query':
pass
elif state == 'absent':
msc.previous = msc.existing
if msc.existing:
if module.check_mode:
msc.existing = {}
else:
msc.existing = msc.request(path, method='DELETE')
elif state == 'present':
msc.previous = msc.existing
payload = dict(
id=schema_id,
displayName=schema,
templates=templates,
sites=sites,
)
msc.sanitize(payload, collate=True)
if msc.existing:
if not issubset(msc.sent, msc.existing):
if module.check_mode:
msc.existing = msc.proposed
else:
msc.existing = msc.request(path, method='PUT', data=msc.sent)
else:
if module.check_mode:
msc.existing = msc.proposed
else:
msc.existing = msc.request(path, method='POST', data=msc.sent)
msc.exit_json()
if __name__ == "__main__":
main()
| Python | 0 | |
0f0116be7870490447bbfa794c118205e8eca120 | Add an adapter for pecan. | wsme/pecan.py | wsme/pecan.py | import inspect
import sys
import json
import xml.etree.ElementTree as et
import wsme
import wsme.protocols.commons
import wsme.protocols.restjson
import wsme.protocols.restxml
pecan = sys.modules['pecan']
class JSonRenderer(object):
def __init__(self, path, extra_vars):
pass
def render(self, template_path, namespace):
data = wsme.protocols.restjson.tojson(
namespace['datatype'],
namespace['result']
)
return json.dumps(data)
class XMLRenderer(object):
def __init__(self, path, extra_vars):
pass
def render(self, template_path, namespace):
data = wsme.protocols.restxml.toxml(
namespace['datatype'],
'result',
namespace['result']
)
return et.tostring(data)
pecan.templating._builtin_renderers['wsmejson'] = JSonRenderer
pecan.templating._builtin_renderers['wsmexml'] = XMLRenderer
def wsexpose(*args, **kwargs):
pecan_json_decorate = pecan.expose(
template='wsmejson:',
content_type='application/json',
generic=False)
pecan_xml_decorate = pecan.expose(
template='wsmexml:',
content_type='application/xml',
generic=False
)
sig = wsme.sig(*args, **kwargs)
def decorate(f):
sig(f)
funcdef = wsme.api.FunctionDefinition.get(f)
def callfunction(self, *args, **kwargs):
args, kwargs = wsme.protocols.commons.get_args(
funcdef, args, kwargs
)
result = f(self, *args, **kwargs)
return dict(
datatype=funcdef.return_type,
result=result
)
pecan_json_decorate(callfunction)
pecan_xml_decorate(callfunction)
pecan.util._cfg(callfunction)['argspec'] = inspect.getargspec(f)
return callfunction
return decorate
| Python | 0 | |
9a1bf2ed13ed5b719472ba87fca7697b0612924e | Implement class for managing a set of libraries | bundle.py | bundle.py | import os
import shutil
import sqlite3
class Package:
def __init__(self, name, version, deps, files_rel, files_dbg=[], files_dev=[]):
self.name = name
self.version = version
self.deps = deps
self.files_rel = files_rel
self.files_dbg = files_dbg
self.files_dev = files_dev
class LibBundle:
def __init__(self):
self.is_setup = False
self.path = None
self.platform = None
self.toolchain = None
self.arch = None
self._manifest_path = None
def load(self, path):
self.path = path
self._manifest_path = os.path.join(path, "MANIFEST.db")
if os.path.exists(self._manifest_path):
#TODO: error handling
connection = sqlite3.connect(self._manifest_path)
cursor = connection.cursor()
r = cursor.execute("SELECT platform, toolchain, arch FROM info").fetchone()
self.patform = r[0]
self.toolchain = r[1]
self.arch = r[2]
self.is_setup = True
connection.close()
def create(self, path, platform, toolchain, arch):
self.path = path
if os.path.exists(self.path):
raise ValueError("Directory already exsits: " + self.path)
self.platform = platform
self.toolchain = toolchain
self.arch = arch
self._manifest_path = os.path.join(self.path, "MANIFEST.db")
os.mkdir(self.path)
connection = sqlite3.connect(self._manifest_path)
cursor = connection.cursor()
cursor.execute("CREATE TABLE installed"
"(id INTEGER PRIMARY KEY AUTOINCREMENT,"
"name TEXT UNIQUE, version TEXT)")
cursor.execute("CREATE TABLE files (id INT, name TEXT, category TEXT)")
cursor.execute("CREATE TABLE dep_graph (name TEXT, deps TEXT)")
cursor.execute("CREATE TABLE info (platform TEXT, toolchain TEXT, arch TEXT)")
cursor.execute("INSERT INTO info VALUES (?,?,?)", (platform, toolchain, arch))
connection.commit()
connection.close()
self.is_setup = True
def is_installed(self, package_name):
connection = sqlite3.connect(self._manifest_path)
cursor = connection.cursor()
row = cursor.execute("SELECT * FROM installed WHERE name = ?",
(name,)).fetchone()
connection.commit()
connection.close()
return row != None
def install(self, package):
if self.is_setup and not self.is_installed(package.name):
connection = sqlite3.connect(self._manifest_path)
cursor = connection.cursor()
query = "INSERT INTO installed (name, version) VALUES (?,?)"
cursor.execute(query, (package.name, package.version))
query = "SELECT id FROM installed WHERE name = ?"
lib_id = cursor.execute(query, (package.name,)).fetchone()[0]
query = "INSERT INTO files VALUES (?,?,?)"
#TODO: make sure mutually exclusive
for name in package.files_rel:
cursor.execute(query, (lib_id, name, "rel"))
for name in package.files_dbg:
cursor.execute(query, (lib_id, name, "dbg"))
for name in package.files_dev:
cursor.execute(query, (lib_id, name, "dev"))
for dep_name in package.deps:
cursor.execute("INSERT INTO dep_graph VALUES (?,?)", (package.name, dep_name))
connection.commit()
connection.close()
def uninstall(self, package_name):
if not self.is_installed(name):
#raise LibPackError(name + " is not installed")
return
files_delete = self.list_files(package_name)
connection = sqlite3.connect(self._manifest_path)
cursor = connection.cursor()
query = "SELECT id FROM installed WHERE name = ?"
lib_id = cursor.execute(query, (package_name,)).fetchone()[0]
cursor.execute("DELETE FROM files WHERE id = ?", (lib_id,))
cursor.execute("DELETE FROM installed WHERE id = ?", (lib_id,))
connection.commit()
connection.close()
for item in files_delete:
path = os.path.join(self.path, item[0])
if os.path.isfile(path):
os.remove(path)
elif os.path.isdir(path):
shutil.rmtree(path, ignore_errors=True)
def list_installed(self):
if self.is_setup:
connection = sqlite3.connect(self._manifest_path)
cursor = conn.cursor()
query = "SELECT name FROM installed"
result = cursor.execute(query, (package_name,)).fetchall()
connection.close()
return result
def list_files(self, package_name, category=""):
if self.is_installed(package_name):
connection = sqlite3.connect(self._manifest_path)
cursor = conn.cursor()
query = "SELECT id FROM installed WHERE name = ?"
lib_id = cursor.execute(query, (package_name,)).fetchone()[0]
if category:
query = "SELECT name FROM files WHERE id = ? AND category = ?"
files = cursor.execute(query, (lib_id, category)).fetchall()
else:
query = "SELECT name FROM files WHERE id = ?"
files = cursor.execute(query, (lib_id,)).fetchall()
connection.close()
return files
def list_missing_files(self):
pass
def list_untracked_files(self):
pass
def delete_files(self, package_name, files):
pass
| Python | 0.000002 | |
b113689db8b845471728a336b0fae30b45333022 | Create hilightresponses.py | HexChat/hilightresponses.py | HexChat/hilightresponses.py | import hexchat
__module_name__ = 'Hilight Responses'
__module_version__ = '0.0.1'
__module_description__ = 'Highlights messages after yours'
__module_author__ = 'Vlek'
_lastresponder = {}
def check_for_highlight(word, word_to_eol, userdata):
global _lastresponder
context = hexchat.get_context()
channelname = context.get_info('channel')
if channelname in _lastresponder and _lastresponder[channelname] == hexchat.get_info('nick'):
if len(word) == 2:
word.append('')
hexchat.emit_print('Channel Msg Hilight', word[0], word[1], word[2])
return hexchat.EAT_ALL
update_responder(word, word_to_eol, userdata)
return hexchat.EAT_NONE
def update_responder(word, word_to_eol, userdata):
global _lastresponder
context = hexchat.get_context()
_lastresponder[context.get_info('channel')] = word[0]
return hexchat.EAT_NONE
hexchat.hook_print('Channel Message', check_for_highlight, priority=hexchat.PRI_LOW)
hexchat.hook_print('Your Message', update_responder, priority=hexchat.PRI_LOW)
hexchat.hook_print('Channel Msg Hilight', update_responder, priority=hexchat.PRI_LOW)
| Python | 0.000002 | |
8f3f9d79d8ce1960ad225e236ca3e11c72de28e0 | Add test for dials.report on integrated data | test/command_line/test_report.py | test/command_line/test_report.py | from __future__ import absolute_import, division, print_function
import os
import procrunner
def test_report_integrated_data(dials_regression, run_in_tmpdir):
"""Simple test to check that dials.symmetry completes"""
result = procrunner.run(
[
"dials.report",
os.path.join(dials_regression, "xia2-28", "20_integrated_experiments.json"),
os.path.join(dials_regression, "xia2-28", "20_integrated.pickle"),
]
)
assert result["exitcode"] == 0
assert result["stderr"] == ""
assert os.path.exists("dials-report.html")
| Python | 0 | |
74329cd397e9dc4593333591700923e0ba7453a1 | Create __init__.py (#148) | robosuite/environments/manipulation/__init__.py | robosuite/environments/manipulation/__init__.py | Python | 0.000006 | ||
6167ef40df491985749102bd4ca3f3f656f71f6c | Add migrations | mainapp/migrations/0030_auto_20210125_1431.py | mainapp/migrations/0030_auto_20210125_1431.py | # Generated by Django 3.1.5 on 2021-01-25 13:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0029_auto_20201206_2026'),
]
operations = [
migrations.AddField(
model_name='file',
name='manually_deleted',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='historicalfile',
name='manually_deleted',
field=models.BooleanField(default=False),
),
]
| Python | 0.000001 | |
7f94b7fa328583c7b0bf617c6c69c06af78b49d8 | Add files via upload | src/getCAPMemberInfo.py | src/getCAPMemberInfo.py | #!/usr/bin/env /usr/bin/python3
#
# Find a member or members and print all contacts
#
# Input: CAPID or first letters of last name to search for,
# plus optional first name.
#
# History:
# 18Aug19 MEG Search by CAPID, better agg pipeline handling.
# 17Aug19 MEG Made parseable for data extraction by other scripts
# 15Apr19 MEG Added expiration date.
# 14May18 MEG Created.
#
import os, sys
from bson.regex import Regex
from bson.son import SON
from pymongo import MongoClient
from query_creds import *
from query_conf import *
# Aggregation pipeline
pipeline = []
try:
pat = sys.argv[1]
except IndexError:
print( 'Usage:', sys.argv[0], 'CAPID|[lastname', '[firstname]]' )
print( 'Look-up a member by CAPID or lastname and optional firstname')
print( "\tCAPID - CAPID number" )
print( "\tlastname - first letters, partial string, case insensitive" )
print( "\tfirstname - first letters, partial string, case insensitive" )
sys.exit( 1 )
# either we go a capid or a lastname
try:
pipeline.append( {'$match': {u'CAPID': int( pat ) }} )
except ValueError:
pat = u'^' + pat
pipeline.append( { u"$match": { u"NameLast": { u"$regex": Regex( pat, u"i") }}} )
try:
pat2 = u'^' + sys.argv[2]
pipeline.append( { u"$match":{ u'NameFirst': { u"$regex": Regex( pat2, u"i" ) }}} )
except IndexError:
pass
# Append additional operations to the pipeline
# Sort
pipeline.append( { u"$sort": SON( [ (u"CAPID", 1 ) ] ) } )
# Lookup phone and email contacts
pipeline.append( { u"$lookup": {
u"from": u"MbrContact",
u"localField": u"CAPID",
u"foreignField": u"CAPID",
u"as": u"Contacts"
}} )
# Lookup postal addresses
pipeline.append( { u"$lookup": {
u"from": u"MbrAddresses",
u"localField": u"CAPID",
u"foreignField": u"CAPID",
u"as": u"Addresses"
}} )
#print( len( pipeline ))
#for i in pipeline:
# print( i )
#exit(1)
# setup db connection
client = MongoClient( host=Q_HOST, port=Q_PORT,
username=USER, password=PASS,
authSource=Q_DB)
DB = client[ Q_DB ]
# Workaround for pymongo 3.6.? to get around the fact MongoClient
# no longer throws connection errors.
try:
client.admin.command( 'ismaster' )
except pymongo.errors.OperationFailure as e:
print( 'MongoDB error:', e )
sys.exit( 1 )
# print format templates
heading = '{0}: {1}, {2} {3} {4}'
f2 = "\t\t{0}: {1} Priority: {2}"
f3 = "\t\t{0}: {1}"
f4 = '\t\tGoogle account: {0}'
f5 = "\t{0}: {1}"
# run the aggregation query to find member contacts
cur = DB.Member.aggregate( pipeline, allowDiskUse = False )
# unwind it all
for m in cur:
print( heading.format( 'Member', m['NameLast'], m['NameFirst'],
m['NameMiddle'], m['NameSuffix'] ))
print( f5.format( 'CAPID', m['CAPID'] ))
print( f5.format( 'Type', m['Type'] ))
print( f5.format( 'Status', m['MbrStatus'] ))
print( f5.format( "Rank", m['Rank'] ))
u = DB.Squadrons.find_one( { 'Unit' : int( m['Unit'] ) } )
print( f5.format( "Unit", m['Unit'] + " " +u['SquadName'] ))
print( f5.format( "Expiration", m['Expiration'] ))
print( "\tMember Contacts:" )
g = DB.Google.find_one( {'externalIds.value' : m['CAPID']} )
if g :
print( f4.format( g[ 'primaryEmail' ] ) )
else:
print( f4.format( "NONE" ))
for j in m['Contacts']:
print( f2.format(j['Type'], j['Contact'], j['Priority']))
print( "\tMember Addresses:" )
for k in m['Addresses']:
print( f3.format( k['Type'], k['Priority'] ))
print( f3.format( 'Addr1', k['Addr1'] ))
print( f3.format( 'Addr2', k['Addr2'] ))
print( f3.format( 'City', k['City'] ))
print( f3.format( 'State', k['State'] ))
print( f3.format( 'Zipcode', k['Zip'] ))
DB.logout()
client.close()
sys.exit( 0 )
| Python | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.