commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13 values | lang stringclasses 23 values |
|---|---|---|---|---|---|---|---|---|
02371d2ace7c366f0b0b6332010323d478bc7652 | Add new package nlopt (#6499) | tmerrick1/spack,matthiasdiener/spack,LLNL/spack,mfherbst/spack,iulian787/spack,matthiasdiener/spack,tmerrick1/spack,EmreAtes/spack,mfherbst/spack,krafczyk/spack,tmerrick1/spack,mfherbst/spack,iulian787/spack,krafczyk/spack,mfherbst/spack,iulian787/spack,tmerrick1/spack,matthiasdiener/spack,EmreAtes/spack,EmreAtes/spack,EmreAtes/spack,iulian787/spack,EmreAtes/spack,mfherbst/spack,krafczyk/spack,matthiasdiener/spack,LLNL/spack,krafczyk/spack,LLNL/spack,matthiasdiener/spack,LLNL/spack,tmerrick1/spack,krafczyk/spack,iulian787/spack,LLNL/spack | var/spack/repos/builtin/packages/nlopt/package.py | var/spack/repos/builtin/packages/nlopt/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Nlopt(CMakePackage):
"""NLopt is a free/open-source library for nonlinear optimization,
providing a common interface for a number of different free optimization
routines available online as well as original implementations of various
other algorithms."""
homepage = "https://nlopt.readthedocs.io"
url = "https://github.com/stevengj/nlopt/releases/download/nlopt-2.4.2/nlopt-2.4.2.tar.gz"
version('develop', git='https://github.com/stevengj/nlopt.git', branch='master')
version('2.4.2', 'd0b8f139a4acf29b76dbae69ade8ac54')
variant('shared', default=True, description='Enables the build of shared libraries')
variant('python', default=True, description='Build python wrappers')
variant('guile', default=False, description='Enable Guile support')
variant('octave', default=False, description='Enable GNU Octave support')
variant('cxx', default=False, description='Build the C++ routines')
# Note: matlab is licenced - spack does not download automatically
variant("matlab", default=False, description="Build the Matlab bindings.")
depends_on('cmake@3.0:', type='build', when='@develop')
depends_on('python', when='+python')
depends_on('py-numpy', when='+python', type=('build', 'run'))
depends_on('swig', when='+python')
depends_on('guile', when='+guile')
depends_on('octave', when='+octave')
depends_on('matlab', when='+matlab')
def cmake_args(self):
# Add arguments other than
# CMAKE_INSTALL_PREFIX and CMAKE_BUILD_TYPE
spec = self.spec
args = []
# Specify on command line to alter defaults:
# eg: spack install nlopt@develop +guile -octave +cxx
# Spack should locate python by default - but to point to a build
if '+python' in spec:
args.append("-DPYTHON_EXECUTABLE=%s" % spec['python'].command.path)
# On is default
if '-shared' in spec:
args.append('-DBUILD_SHARED_LIBS:Bool=OFF')
if '+cxx' in spec:
args.append('-DNLOPT_CXX:BOOL=ON')
if '+matlab' in spec:
args.append("-DMatlab_ROOT_DIR=%s" % spec['matlab'].command.path)
return args
| lgpl-2.1 | Python | |
152bf235721c5b6c8ba61da4d8521733a2842885 | Send script | alexfalcucc/crawler_mapsofworld | extract_norcal_table.py | extract_norcal_table.py | import urllib2
from bs4 import BeautifulSoup
url = "http://www.mapsofworld.com/usa/states/california/map-of-northern-
california.html"
page = urllib2.urlopen(url)
soup = BeautifulSoup(page)
tables = soup.findAll("table")
tables[3].find_all('td')
for td in tables[3].find_all('td'):
print td.text | mit | Python | |
32c95175538b4324f1cf6b21a2c3bd5d2cb29413 | Add product type test | rackerlabs/django-DefectDojo,OWASP/django-DefectDojo,rackerlabs/django-DefectDojo,OWASP/django-DefectDojo,OWASP/django-DefectDojo,rackerlabs/django-DefectDojo,OWASP/django-DefectDojo,OWASP/django-DefectDojo,rackerlabs/django-DefectDojo | tests/Product_type_unit_test.py | tests/Product_type_unit_test.py | from selenium import webdriver
from selenium.webdriver.support.ui import Select
import unittest
import re
import sys
class ProductTest(unittest.TestCase):
def setUp(self):
# change path of chromedriver according to which directory you have chromedriver.
self.driver = webdriver.Chrome('/home/dr3dd/gsoc/chromedriver')
self.driver.implicitly_wait(30)
self.base_url = "http://localhost:8000/"
self.verificationErrors = []
self.accept_next_alert = True
def login_page(self):
driver = self.driver
driver.get(self.base_url + "login")
driver.find_element_by_id("id_username").clear()
driver.find_element_by_id("id_username").send_keys('admin')
driver.find_element_by_id("id_password").clear()
driver.find_element_by_id("id_password").send_keys('admin')
driver.find_element_by_css_selector("button.btn.btn-success").click()
return driver
def test_create_product_type(self):
driver = self.login_page()
driver.get(self.base_url + "product/type")
driver.find_element_by_id("dropdownMenu1").click()
driver.find_element_by_link_text("Add Product Type").click()
driver.find_element_by_id("id_name").clear()
driver.find_element_by_id("id_name").send_keys("product test type")
driver.find_element_by_id("id_critical_product").click()
driver.find_element_by_css_selector("input.btn.btn-primary").click()
productTxt = driver.find_element_by_tag_name("BODY").text
self.assertTrue(re.search(r'Product type added successfully.', productTxt))
def test_edit_product_type(self):
driver = self.login_page()
driver.get(self.base_url + "product/type")
driver.find_element_by_link_text("Edit Product Type").click()
driver.find_element_by_id("id_name").clear()
driver.find_element_by_id("id_name").send_keys("Edited product test type")
driver.find_element_by_css_selector("input.btn.btn-primary").click()
productTxt = driver.find_element_by_tag_name("BODY").text
self.assertTrue(re.search(r'Product type updated successfully.', productTxt))
def test_delete_product_type(self):
driver = self.login_page()
driver.get(self.base_url + "product/type")
driver.find_element_by_link_text("Edit Product Type").click()
driver.find_element_by_css_selector("input.btn.btn-danger").click()
productTxt = driver.find_element_by_tag_name("BODY").text
self.assertTrue(re.search(r'Product type Deleted successfully.', productTxt))
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
def suite():
suite = unittest.TestSuite()
suite.addTest(ProductTest('test_create_product_type'))
suite.addTest(ProductTest('test_edit_product_type'))
suite.addTest(ProductTest('test_delete_product_type'))
return suite
if __name__ == "__main__":
runner = unittest.TextTestRunner(descriptions=True, failfast=True)
ret = not runner.run(suite()).wasSuccessful()
sys.exit(ret)
| bsd-3-clause | Python | |
b80e52ecf09f96e84625eb6fff9aa7a20059c0f8 | Add new top level script to ease running of individual unittests. | tskisner/pytoast,tskisner/pytoast | test_single.py | test_single.py |
import sys
import unittest
from toast.mpirunner import MPITestRunner
file = sys.argv[1]
loader = unittest.TestLoader()
runner = MPITestRunner(verbosity=2)
suite = loader.discover('tests', pattern='{}'.format(file), top_level_dir='.')
runner.run(suite)
| bsd-2-clause | Python | |
2ae235215d33555b077fbd9e2f0c42d52ccce8c4 | add listener | omgapuppy/le-dyn-postback | dyn-listener.py | dyn-listener.py | #!/usr/bin/env python
from logentries import LogentriesHandler
import logging
from flask import Flask, jsonify, request
listener = Flask(__name__)
# Configure the port your postback URL will listen on and provide your
# LOGENTRIES_TOKEN
PORT = 5000
LOGENTRIES_TOKEN = "your-log-token-here"
log = logging.getLogger('logentries')
log.setLevel(logging.INFO)
dyn = LogentriesHandler(LOGENTRIES_TOKEN)
log.addHandler(dyn)
# Enter the following for the bounce postback URL:
# SCRIPT_HOST_IP:PORT/bounce?e=@email&r=@bouncerule&t=@bouncetype&dc=@diagnostic&s=@status
@listener.route('/bounce', methods=['GET'])
def bounce():
e = request.args.get('e')
r = request.args.get('r')
t = request.args.get('t')
dc = request.args.get('dc')
s = request.args.get('s')
log.info("BOUNCE: email='{}' rule='{}' type='{}' diagnostic='{}' \
status='{}'".format(e, r, t, dc, s))
return jsonify(result={"status": 200})
# Enter the following for the complaint postback URL:
# SCRIPT_HOST_IP:PORT/complaint?e=@email
@listener.route('/complaint', methods=['GET'])
def complaint():
e = request.args.get('e')
log.info("COMPLAINT: email='{}'".format(e))
return jsonify(result={"status": 200})
if __name__ == '__main__':
listener.run(host='0.0.0.0',
port=PORT,
debug=False)
| mit | Python | |
abc32403d85c536f38a2072941f1864418c55b4f | Create editdistance.py | vikramraman/algorithms,vikramraman/algorithms | editdistance.py | editdistance.py | # Author: Vikram Raman
# Date: 09-12-2015
import time
# edit distance between two strings
# e(i,j) = min (1 + e(i-1,j) | 1 + e(i,j-1) | diff(i,j) + e(i-1,j-1))
def editdistance(s1, s2):
m = 0 if s1 is None else len(s1)
n = 0 if s2 is None else len(s2)
if m == 0:
return n
elif n == 0:
return m
l = [[i for i in range(0,n+1)]]
for i in range(1,m+1):
l.append([i])
for i in range(1,m+1):
for j in range(1,n+1):
minimum = min(1 + l[i-1][j], 1 + l[i][j-1], diff(s1,s2,i,j) + l[i-1][j-1])
l[i].append(minimum)
return l[m][n]
def diff (s1, s2, i, j):
return s1[i-1] != s2[j-1]
s1 = "exponential"
s2 = "polynomial"
print "s1=%s, s2=%s" % (s1,s2)
start_time = time.clock()
distance=editdistance(s1, s2)
print "distance=%d" % (distance)
print("--- %s seconds ---" % (time.clock() - start_time))
print editdistance("foo", "bar")
| mit | Python | |
f79e0782235943e0ace543db754cca232682f6ad | Add some basic tests | tamasgal/km3pipe,tamasgal/km3pipe | km3pipe/io/tests/test_aanet.py | km3pipe/io/tests/test_aanet.py | # Filename: test_aanet.py
# pylint: disable=locally-disabled,C0111,R0904,C0301,C0103,W0212
from km3pipe.testing import TestCase, patch, Mock
from km3pipe.io.aanet import AanetPump
import sys
sys.modules['ROOT'] = Mock()
sys.modules['aa'] = Mock()
__author__ = "Tamas Gal"
__copyright__ = "Copyright 2018, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Tamas Gal"
__email__ = "tgal@km3net.de"
__status__ = "Development"
class TestAanetPump(TestCase):
def test_init_raises_valueerror_if_no_filename_given(self):
with self.assertRaises(ValueError):
AanetPump()
def test_init_with_filename(self):
filename = 'a'
p = AanetPump(filename=filename)
assert filename in p.filenames
@patch("ROOT.gSystem")
def test_init_with_custom_aanet_lib(self, root_gsystem_mock):
filename = 'a'
custom_aalib = 'an_aalib'
p = AanetPump(filename=filename, aa_lib=custom_aalib)
assert filename in p.filenames
root_gsystem_mock.Load.assert_called_once_with(custom_aalib)
def test_init_with_indexed_filenames(self):
filename = 'a[index]b'
indices = [1, 2, 3]
p = AanetPump(filename=filename, indices=indices)
for index in indices:
assert "a"+str(index)+"b" in p.filenames
| mit | Python | |
9f39ed48b6f745a96b5874bc87e306c01d3f016f | add 0.py | bm5w/pychal | 0.py | 0.py | if __name__ == "__main__":
print 2**38
| mit | Python | |
0575be4316e930de71dce8c92d7be428d4565470 | Add c.py | tanacasino/test | c.py | c.py |
class C(object):
def c(self):
print("c")
C().c()
| apache-2.0 | Python | |
61cfa59b7881f8658a8eab13ba4bc50ac17ba6ce | Add sample plugin used by functional tests | ptthiem/nose2,ezigman/nose2,leth/nose2,little-dude/nose2,ojengwa/nose2,leth/nose2,little-dude/nose2,ezigman/nose2,ojengwa/nose2,ptthiem/nose2 | nose2/tests/functional/support/lib/plugin_a.py | nose2/tests/functional/support/lib/plugin_a.py | from nose2 import events
class PluginA(events.Plugin):
configSection = 'a'
def __init__(self):
self.a = self.config.as_int('a', 0)
| bsd-2-clause | Python | |
2e6c7235c555799cc9dbb9d1fa7faeab4557ac13 | Add stubby saved roll class | foxscotch/foxrollbot | db.py | db.py | import sqlite3
connection = sqlite3.connect('data.db')
class SavedRoll:
@staticmethod
def save(user, name, args):
pass
@staticmethod
def get(user, name):
pass
@staticmethod
def delete(user, name):
pass
| mit | Python | |
85044ad914029d9b421b3492e828ad89a85b62a3 | Create ept.py | mthbernardes/EternalProxyTor | ept.py | ept.py | # -*- coding: utf-8 -*-
from TorCtl import TorCtl
import requests,json
proxies = {'http': 'socks5://127.0.0.1:9050','https': 'socks5://127.0.0.1:9050'}
class TorProxy(object):
def __init__(self,):
pass
def connect(self, url, method):
r = getattr(requests, method)(url,proxies=proxies)
return r
def new_ip(self,):
self.conn = TorCtl.connect(controlAddr="127.0.0.1", controlPort=9051, passphrase="RTFM_FODAO")
self.conn.send_signal("NEWNYM")
self.conn.close()
def check_ip(self,):
self.url = "http://ipinfo.io"
self.r = requests.get(self.url,proxies=proxies)
try:
return json.loads(self.r.content)['ip']
except:
return 'Error to get your IP'
if __name__ == '__main__':
tor = TorProxy()
print tor.check_ip()
print tor.new_ip()
print tor.check_ip()
| mit | Python | |
062473c20e59f259d38edcd79e22d0d215b8f52f | Add file to store API Access keys | sgregg85/QuoteBot | key.py | key.py | consumer_key = '' # Enter your values here
consumer_secret = '' # Enter your values here
access_token = '' # Enter your values here
access_token_secret = '' # Enter your values here
| unlicense | Python | |
49b8f4b50ea1ff8c62977699c8e568a6d8d14887 | Create obs.py | spmls/pydelft,spmls/pydelft | obs.py | obs.py | import numpy as np
from pydelft.read_griddep import grd, dep
from PyQt4 import QtGui
import mpl_toolkits.basemap.pyproj as pyproj
import mpl_toolkits.basemap as Basemap
#------------------------------------------------------------------------------
# OBS SAVE FILE DIALOG
class SaveObsFileDialog(QtGui.QMainWindow):
def __init__(self):
super(SaveObsFileDialog, self).__init__()
fname = []
self.initUI()
def initUI(self):
self.setGeometry(300,300,350,300)
self.setWindowTitle('Save obs file')
self.savefileDialog()
def savefileDialog(self):
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file', os.getcwd(), "OBS (*.obs)")
self.fname = fname
#------------------------------------------------------------------------------
# OBSERVATIONS DATA CLASS
class obs():
'''Read or write a Delft3d obs file.'''
def __init__(self, fname=None):
self.names = ''
self.m = ''
self.n = ''
self.num_obs = ''
self.filename = fname
if self.filename:
self.read_obs(self.filename)
def coords2mn(self, grid, station_names, station_x, station_y, grid_epsg = 4326, station_epsg = 4326):
'''Calculate nearest m, n indices on a grid for an array of type (['name', x, y])
where x and y are coordinates (longitude/latitude or eastin/northing etc.).
If the two are different coordinate systems, will convert the array to the grid
coordinate system (using EPSG codes, default is 4326=WGS84'''
def find_nearest(grid, query):
m = np.unravel_index(np.abs(grid.y-query[1]).argmin(), np.shape(grid.y))[0]
n = np.unravel_index(np.abs(grid.x-query[0]).argmin(), np.shape(grid.x))[1]
return [m,n]
grid_proj = pyproj.Proj("+init=EPSG:%i" % grid_epsg)
station_proj = pyproj.Proj("+init=EPSG:%i" % station_epsg)
if grid_epsg != station_epsg:
pyproj.transform(station_proj, grid_proj, station_x, station_y)
obs_idx = [find_nearest(grid,[station_x[i], station_y[i]]) for i in range(0, np.size(station_names)-1)]
self.names = station_names
self.m = [i[0] for i in obs_idx]
self.n = [i[1] for i in obs_idx]
self.num_obs = np.shape(obs_idx)[0]
def write(self, fname = None):
if not fname:
app = QtGui.QApplication(sys.argv)
filedialog = SaveObsFileDialog()
fname = filedialog.fname
else:
fname = fname
self.filename = fname
f = open(fname,'w')
for i in range(0,self.num_obs-1):
name = self.names[i].ljust(20)
line = str('%s\t%s\t%s\n' % (name, int(self.m[i]), int(self.n[i])))
if len(line) > 132:
print('ERROR: RECORD LENGTH TOO LONG, MAX 132\n@ %s' % line)
break
f.write(line)
f.close()
print('obs file written: %s' % self.filename)
| mit | Python | |
7f3411268e153c47edc77c681e14aef5747639de | use the subdir /httplib2, follow up for 10273 | TridevGuha/pywikibot-core,jayvdb/pywikibot-core,VcamX/pywikibot-core,darthbhyrava/pywikibot-local,hasteur/g13bot_tools_new,h4ck3rm1k3/pywikibot-core,npdoty/pywikibot,magul/pywikibot-core,h4ck3rm1k3/pywikibot-core,PersianWikipedia/pywikibot-core,wikimedia/pywikibot-core,npdoty/pywikibot,Darkdadaah/pywikibot-core,magul/pywikibot-core,Darkdadaah/pywikibot-core,smalyshev/pywikibot-core,trishnaguha/pywikibot-core,happy5214/pywikibot-core,hasteur/g13bot_tools_new,jayvdb/pywikibot-core,xZise/pywikibot-core,emijrp/pywikibot-core,happy5214/pywikibot-core,hasteur/g13bot_tools_new,icyflame/batman,valhallasw/pywikibot-core,wikimedia/pywikibot-core | pwb.py | pwb.py | import sys,os
sys.path.append('.')
sys.path.append('externals/httplib2')
sys.path.append('pywikibot/compat')
if "PYWIKIBOT2_DIR" not in os.environ:
os.environ["PYWIKIBOT2_DIR"] = os.path.split(__file__)[0]
sys.argv.pop(0)
if len(sys.argv) > 0:
if not os.path.exists(sys.argv[0]):
testpath = os.path.join(os.path.split(__file__)[0], 'scripts', sys.argv[0])
if os.path.exists(testpath):
sys.argv[0] = testpath
else:
testpath = testpath + '.py'
if os.path.exists(testpath):
sys.argv[0] = testpath
else:
raise Exception("%s not found!" % sys.argv[0])
sys.path.append(os.path.split(sys.argv[0])[0])
execfile(sys.argv[0])
else:
sys.argv.append('')
| import sys,os
sys.path.append('.')
sys.path.append('externals')
sys.path.append('pywikibot/compat')
if "PYWIKIBOT2_DIR" not in os.environ:
os.environ["PYWIKIBOT2_DIR"] = os.path.split(__file__)[0]
sys.argv.pop(0)
if len(sys.argv) > 0:
if not os.path.exists(sys.argv[0]):
testpath = os.path.join(os.path.split(__file__)[0], 'scripts', sys.argv[0])
if os.path.exists(testpath):
sys.argv[0] = testpath
else:
testpath = testpath + '.py'
if os.path.exists(testpath):
sys.argv[0] = testpath
else:
raise Exception("%s not found!" % sys.argv[0])
sys.path.append(os.path.split(sys.argv[0])[0])
execfile(sys.argv[0])
else:
sys.argv.append('')
| mit | Python |
81c722316d75e929d120f4d7139c499052a4e2fb | add cli program | TakeshiTseng/Dragon-Knight,TakeshiTseng/ryu-dynamic-loader,John-Lin/ryu-dynamic-loader,Ryu-Dragon-Knight/Dragon-Knight,pichuang/Dragon-Knight | cli.py | cli.py | #!/usr/bin/env python
# -*- codeing: utf-8 -*-
import socket
import logging
import json
LOG = logging.getLogger('DynamicLoadCmd')
def main():
sc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sc.connect(('127.0.0.1', 10807))
while True:
line = raw_input('(ryu) ')
if line == 'exit':
break
elif line == 'list':
msg = json.dumps({'cmd': 'list'})
sc.sendall(msg)
buf = sc.recv(2048)
print buf
app_list = json.loads(buf)
app_id = 0
for app_info in app_list:
print '[%02d]%s' % (app_id, app_info['name']),
if app_info['installed']:
print '[\033[92minstalled\033[0m]'
else:
print ''
app_id += 1
elif 'install' in line:
argv = line.split(' ')
if len(argv) < 2:
print 'install [app_id]'
continue
app_id = int(argv[1])
msg = json.dumps({'cmd':'install', 'app_id': app_id})
sc.sendall(msg)
if __name__ == '__main__':
main()
| mit | Python | |
4f08f057c7e4cc8230a996d853892ab3eef36065 | Add simple terminal-based version of rock-paper-scissors. | kubkon/ee106-additional-material | rps.py | rps.py | from random import choice
class RPSGame:
shapes = ['rock', 'paper', 'scissors']
draws = [('rock', 'rock'), ('paper', 'paper'), ('scissors', 'scissors')]
first_wins = [('rock', 'scissors'), ('scissors', 'paper'), ('paper', 'rock')]
def _evaluate(self, player_move, computer_move):
if (player_move, computer_move) in RPSGame.draws:
return "Draw!"
elif (player_move, computer_move) in RPSGame.first_wins:
return "Player wins!"
else:
return "Computer wins!"
def play(self, rounds=1):
for i in range(rounds):
player_move = input("[rock,paper,scissors]: ")
computer_move = choice(RPSGame.shapes)
winner = self._evaluate(player_move, computer_move)
print(20 * "-")
print("You played: %s" % player_move)
print("Computer played: %s" % computer_move)
print(winner)
print(20 * "-")
if __name__ == '__main__':
game = RPSGame()
game.play(rounds=10)
| mit | Python | |
024b9dbfb3e34b5ff092ad86a1bec1e82ccfb9f9 | Convert tests/test_elsewhere_twitter.py to use Harness & TestClient. | eXcomm/gratipay.com,studio666/gratipay.com,bountysource/www.gittip.com,studio666/gratipay.com,eXcomm/gratipay.com,bountysource/www.gittip.com,bountysource/www.gittip.com,mccolgst/www.gittip.com,gratipay/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,bountysource/www.gittip.com,gratipay/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com | tests/test_elsewhere_twitter.py | tests/test_elsewhere_twitter.py | from gittip.elsewhere import twitter
from gittip.models import Elsewhere
from gittip.testing import Harness
class TestElsewhereTwitter(Harness):
def test_twitter_resolve_resolves(self):
alice = self.make_participant('alice')
alice_on_twitter = Elsewhere(platform='twitter', user_id="1",
user_info={'screen_name': 'alice'})
alice.accounts_elsewhere.append(alice_on_twitter)
self.session.commit()
expected = 'alice'
actual = twitter.resolve(u'alice')
assert actual == expected, actual
| from gittip.testing import tip_graph
from gittip.elsewhere import twitter
def test_twitter_resolve_resolves():
with tip_graph(('alice', 'bob', 1, True, False, False, "twitter", "2345")):
expected = 'alice'
actual = twitter.resolve(u'alice')
assert actual == expected, actual
| cc0-1.0 | Python |
3739819ed85a03520ad3152a569ad6cfb3dd7fb5 | Add a used test. | kbrose/article-tagging,chicago-justice-project/article-tagging,chicago-justice-project/article-tagging,kbrose/article-tagging | lib/tagnews/tests/test_crimetype_tag.py | lib/tagnews/tests/test_crimetype_tag.py | import tagnews
class TestCrimetype():
@classmethod
def setup_method(cls):
cls.model = tagnews.CrimeTags()
def test_tagtext(self):
self.model.tagtext('This is example article text')
def test_tagtext_proba(self):
article = 'Murder afoul, someone has been shot!'
probs = self.model.tagtext_proba(article)
max_prob = probs.max()
max_type = probs.idxmax()
tags = self.model.tagtext(article,
prob_thresh=max_prob-0.001)
assert max_type in tags
| mit | Python | |
edc335e68d44c6a0c99499bc4416c55a6072232e | add proper test for govobj stuff | thelazier/sentinel,thelazier/sentinel,dashpay/sentinel,ivansib/sentinel,ivansib/sentinel,dashpay/sentinel | test/test_governance_methods.py | test/test_governance_methods.py | import pytest
import os
os.environ['SENTINEL_ENV'] = 'test'
import sys
sys.path.append( os.path.join( os.path.dirname(__file__), '..', 'lib' ) )
# NGM/TODO: setup both Proposal and Superblock, and insert related rows,
# including Events
def setup():
pass
#this is doog.
def teardown():
pass
#you SON OF A BITCH!
# pw_event = PeeWeeEvent.get(
# (PeeWeeEvent.start_time < misc.get_epoch() ) &
# (PeeWeeEvent.error_time == 0) &
# (PeeWeeEvent.prepare_time == 0)
# )
#
# if pw_event:
# govobj = GovernanceObject()
# govobj.load(pw_event.governance_object_id)
# setup/teardown?
# Event model
#govobj.get_prepare_command
# GovernanceObject model
@pytest.fixture
def governance_object():
from models import PeeWeeGovernanceObject
from governance import GovernanceObject
govobj = GovernanceObject()
#govobj.
return
def test_prepare_command(governance_object):
d = governance_object.get_dict()
assert type(d) == type({})
fields = [ 'parent_id', 'object_creation_time', 'object_hash',
'object_parent_hash', 'object_name', 'object_type', 'object_revision',
'object_data', 'object_fee_tx' ]
fields.sort()
sorted_keys = d.keys()
sorted_keys.sort()
assert sorted_keys == fields
| mit | Python | |
8c49123ccaf16a4513f8096475dd2b865cfee66f | Revert of Re-enable mobile memory tests. (https://codereview.chromium.org/414473002/) | markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,Chilledheart/chromium,krieger-od/nwjs_chromium.src,jaruba/chromium.src,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,Chilledheart/chromium,markYoungH/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,M4sse/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,ltilve/chromium,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,ondra-novak/chromium.src,littlstar/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,jaruba/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,dushu1203/chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,dednal/chromium.src,Fireblend/chromium-crosswalk,M4sse/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,chuan9/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,ltilve/chromium,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,krieger-od/nwjs_chromium.src,Just-D/chromium-1,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,fujunwei/chromium-crosswalk,dushu1203/chromium.src,ltilve/chromium,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,markYoungH/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,dednal/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk,Chilledheart/chromium,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,Jonekee/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,Chilledheart/chromium,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,Chilledheart/chromium,jaruba/chromium.src,jaruba/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,Just-D/chromium-1,littlstar/chromium.src,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,dednal/chromium.src,ondra-novak/chromium.src,axinging/chromium-crosswalk | tools/perf/benchmarks/memory.py | tools/perf/benchmarks/memory.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import memory
import page_sets
from telemetry import benchmark
@benchmark.Disabled('android') # crbug.com/370977
class MemoryMobile(benchmark.Benchmark):
test = memory.Memory
page_set = page_sets.MobileMemoryPageSet
@benchmark.Disabled('android')
class MemoryTop25(benchmark.Benchmark):
test = memory.Memory
page_set = page_sets.Top25PageSet
@benchmark.Disabled('android')
class Reload2012Q3(benchmark.Benchmark):
tag = 'reload'
test = memory.Memory
page_set = page_sets.Top2012Q3PageSet
@benchmark.Disabled('android') # crbug.com/371153
class MemoryToughDomMemoryCases(benchmark.Benchmark):
test = memory.Memory
page_set = page_sets.ToughDomMemoryCasesPageSet
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import memory
import page_sets
from telemetry import benchmark
class MemoryMobile(benchmark.Benchmark):
test = memory.Memory
page_set = page_sets.MobileMemoryPageSet
@benchmark.Disabled('android')
class MemoryTop25(benchmark.Benchmark):
test = memory.Memory
page_set = page_sets.Top25PageSet
@benchmark.Disabled('android')
class Reload2012Q3(benchmark.Benchmark):
tag = 'reload'
test = memory.Memory
page_set = page_sets.Top2012Q3PageSet
@benchmark.Disabled('android') # crbug.com/371153
class MemoryToughDomMemoryCases(benchmark.Benchmark):
test = memory.Memory
page_set = page_sets.ToughDomMemoryCasesPageSet
| bsd-3-clause | Python |
baff0200dfbe5ac33949f2fa3cddca72912b3b09 | add results.py | neurospin/pylearn-epac,neurospin/pylearn-epac | epac/results.py | epac/results.py | # -*- coding: utf-8 -*-
"""
Created on Fri May 17 16:37:54 2013
@author: edouard.duchesnay@cea.fr
"""
class Results(dict):
TRAIN = "tr"
TEST = "te"
SCORE = "score"
PRED = "pred"
TRUE = "true"
SEP = "_"
def __init__(self, **kwargs):
if kwargs:
self.add(**kwargs)
def _c(self, k1, k2):
return k1 + self.SEP + k2
def add(self, key2, suffix, score=None, pred=[], true=[]):
tr = dict()
if score:
tr[self._c(self.SCORE, suffix)] = score
if len(pred):
tr[self._c(self.PRED, suffix)] = pred
if len(true):
tr[self._c(self.TRUE, suffix)] = true
if key2 in self:
self[key2].update(tr)
else:
self[key2] = tr | bsd-3-clause | Python | |
553ba87b8858c11b2c2778d35a3c6e3694304278 | create the Spider of Turkey of McDonalds | iandees/all-the-places,iandees/all-the-places,iandees/all-the-places | locations/spiders/mcdonalds_tr.py | locations/spiders/mcdonalds_tr.py | # -*- coding: utf-8 -*-
import scrapy
import json
import re
from locations.items import GeojsonPointItem
class McDonaldsTRSpider(scrapy.Spider):
name = 'mcdonalds_tr'
allowed_domains = ['www.mcdonalds.com.tr']
def start_requests(self):
url = 'https://www.mcdonalds.com.tr/Content/WebService/ClientSiteWebService.asmx/GetRestaurantsV5'
# for state in STATES:
formdata = {
"cityId": "0",
"townId": "0",
"Services": ""
}
headers = {
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.9',
'Content-Type': 'application/json',
'Origin': 'https://www.mcdonalds.com.tr',
'Host':'www.mcdonalds.com.tr',
'Referer': 'https://www.mcdonalds.com.tr/kurumsal/restoranlar',
'X-Requested-With': 'XMLHttpRequest'
}
yield scrapy.http.Request(
url,
self.parse,
method = 'POST',
body = json.dumps(formdata),
headers = headers,
)
def normalize_time(self, time_str):
match = re.search(r'([0-9]{1,2}):([0-9]{1,2})', time_str)
h, m = match.groups()
return '%02d:%02d' % (
int(h) + 12 if int(h)<13 else int(h),
int(m),
)
def store_hours(self, hour):
data = hour[0]
if not data['Name']:
return '24/7'
value = data['Value'].strip()
if value == "-":
return None
start = value.split("-")[0].strip()
end = value.split("-")[1].strip()
end = self.normalize_time(end)
return 'Mo-Su ' + start + ':' + end
def parse(self, response):
results = json.loads(response.body_as_unicode())
results = results['d']
for data in results:
properties = {
'city': data['City'],
'ref': data['ID'],
'phone': data['Phone'].strip(),
'lon': data['Longitude'],
'lat': data['Latitude'],
'name': data['Name'],
'addr_full': data['Address'],
'state': data['Town']
}
opening_hours = self.store_hours(data['WorkingHours'])
if opening_hours:
properties['opening_hours'] = opening_hours
yield GeojsonPointItem(**properties)
| mit | Python | |
0c100408bce925392ee1cae3b5b201ab4eb15112 | Add tests for VirHostNet processor | bgyori/indra,sorgerlab/belpy,sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,johnbachman/indra,bgyori/indra,johnbachman/indra,johnbachman/belpy | indra/tests/test_virhostnet.py | indra/tests/test_virhostnet.py | from indra.statements import Complex
from indra.sources import virhostnet
from indra.sources.virhostnet.api import data_columns
from indra.sources.virhostnet.processor import parse_psi_mi, parse_source_ids, \
parse_text_refs, get_agent_from_grounding, process_row
def test_get_agent_from_grounding():
ag = get_agent_from_grounding('uniprotkb:P15056')
assert ag.name == 'BRAF'
assert ag.db_refs['UP'] == 'P15056', ag.db_refs
ag = get_agent_from_grounding('uniprotkb:P15056-PRO_0000085665')
# This is the name of the chain in UniProt, it will have to be uncommented
# once normalization for chains is merged
# assert ag.name == 'Serine/threonine-protein kinase B-raf'
assert ag.name == 'BRAF'
assert ag.db_refs['UP'] == 'P15056'
assert ag.db_refs['UPPRO'] == 'PRO_0000085665'
ag = get_agent_from_grounding('refseq:NP_828867')
assert ag.db_refs['REFSEQ_PROT'] == 'NP_828867'
def test_parse_text_refs():
tr = parse_text_refs('pubmed:22046132')
assert tr['PMID'] == '22046132'
tr = parse_text_refs('pubmed:https(//doi.org/10.1101/2020.03.22.002386)')
assert tr['DOI'] == '10.1101/2020.03.22.002386'
def test_parse_source_ids():
sd = parse_source_ids('virhostnet-rid:2564|virhostnet-nrid:2199')
assert sd == {'virhostnet-rid': '2564',
'virhostnet-nrid': '2199'}
def test_parse_psi_mi():
res = parse_psi_mi('psi-mi:"MI:0915"(physical association)')
assert len(res) == 2, res
assert res[0] == 'MI:0915', res
assert res[1] == 'physical association'
def test_process_row():
test_row_str = ('uniprotkb:Q6P5R6 uniprotkb:Q1K9H5 '
'uniprotkb:RL22L_HUMAN uniprotkb:Q1K9H5_I33A0 '
'uniprotkb:RL22L_HUMAN uniprotkb:Q1K9H5_I33A0 '
'psi-mi:"MI:0004"(affinity chromatography technology) '
'- pubmed:26651948 taxid:9606 taxid:381518 '
'psi-mi:"MI:0915"(physical association) '
'psi-mi:"MI:1114"(virhostnet) '
'virhostnet-rid:19809|virhostnet-nrid:18603 '
'virhostnet-miscore:0.32715574')
row = {k: v for k, v in zip(data_columns, test_row_str.split('\t'))}
stmt = process_row(row)
assert isinstance(stmt, Complex)
host_ag = stmt.members[0]
assert host_ag.name == 'RPL22L1'
vir_ag = stmt.members[1]
# This is unreviewed so we currently can't get its name
assert vir_ag.name == 'Q1K9H5'
assert len(stmt.evidence) == 1
ev = stmt.evidence[0]
assert ev.source_api == 'virhostnet'
assert ev.source_id == '19809'
assert ev.pmid == '26651948'
assert ev.text_refs == {'PMID': '26651948'}
assert ev.annotations['host_tax'] == '9606'
assert ev.annotations['vir_tax'] == '381518'
assert ev.annotations['score'] == 0.32715574
assert ev.annotations['int_type'] == {'id': 'MI:0915',
'name': 'physical association'}
assert ev.annotations['virhostnet-rid'] == '19809'
assert ev.annotations['virhostnet-nrid'] == '18603'
assert ev.annotations['exp_method'] == {'id': 'MI:0004',
'name': ('affinity chromatography '
'technology')}
| bsd-2-clause | Python | |
fab191fa1c490e8fb494417ba33e8f41c8ae4fec | Add a slice viewer widget class. | berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop | ui/widgets/SliceViewerWidget.py | ui/widgets/SliceViewerWidget.py | """
SliceViewerWidget
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkRenderer
from vtk import vtkInteractorStyleUser
from vtk import vtkImagePlaneWidget
from vtk import vtkCellPicker
from PySide.QtGui import QGridLayout
from PySide.QtGui import QWidget
from PySide.QtCore import Signal
from ui.QVTKRenderWindowInteractor import QVTKRenderWindowInteractor
from ui.Interactor import Interactor
from core.vtkDrawing import CreateCircle
class SliceViewerWidget(QWidget, Interactor):
"""
SliceViewerWidget shows slices that you can scroll through. Slicing happens
in world coordinates. It can be synced to another slicer widget.
"""
slicePositionChanged = Signal(object)
mouseMoved = Signal(object)
def __init__(self):
super(SliceViewerWidget, self).__init__()
self.renderer = vtkRenderer()
self.renderer.SetBackground2(0.4, 0.4, 0.4)
self.renderer.SetBackground(0.1, 0.1, 0.1)
self.renderer.SetGradientBackground(True)
self.renderer.SetLayer(0)
# Overlay renderer which is synced with the default renderer
self.rendererOverlay = vtkRenderer()
self.rendererOverlay.SetLayer(1)
self.rendererOverlay.SetInteractive(0)
self.renderer.GetActiveCamera().AddObserver("ModifiedEvent", self._syncCameras)
self.rwi = QVTKRenderWindowInteractor(parent=self)
self.rwi.SetInteractorStyle(vtkInteractorStyleUser())
self.rwi.GetRenderWindow().AddRenderer(self.renderer)
self.rwi.GetRenderWindow().AddRenderer(self.rendererOverlay)
self.rwi.GetRenderWindow().SetNumberOfLayers(2)
# Set camera to parallel
camera = self.renderer.GetActiveCamera()
camera.SetParallelProjection(1)
# Add new observers for mouse wheel
self.AddObserver(self.rwi, "CharEvent", self.charTyped)
self.AddObserver(self.rwi, "MouseWheelBackwardEvent", self.mouseWheelChanged)
self.AddObserver(self.rwi, "MouseWheelForwardEvent", self.mouseWheelChanged)
self.AddObserver(self.rwi, "MouseMoveEvent", self.mouseMovedEvent, 1)
self.picker = vtkCellPicker()
self.picker.SetTolerance(1e-6)
# Known state of mouse (maybe can ask the event as well...)
self.leftButtonPressed = False
self.circle = None
layout = QGridLayout()
layout.setSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
layout.addWidget(self.rwi)
self.setLayout(layout)
def _syncCameras(self, camera, ev):
"""
Camera modified event callback. Copies the parameters of
the renderer camera into the camera of the overlay so they
stay synced at all times.
"""
self.rendererOverlay.GetActiveCamera().ShallowCopy(camera)
def charTyped(self, arg1, arg2):
# print arg1.GetKeyCode()
pass
def setLocatorPosition(self, position):
self.circle.SetPosition(position[0], position[1], position[2])
def setImageData(self, imageData):
self.imagedata = imageData
# Add a slicer widget that looks at camera
self.slicer = vtkImagePlaneWidget()
self.slicer.DisplayTextOn()
self.slicer.SetInteractor(self.rwi)
self.slicer.SetInputData(imageData)
self.slicer.SetPlaneOrientation(2)
self.slicer.SetRestrictPlaneToVolume(1)
self.slicer.PlaceWidget()
self.slicer.On()
self.renderer.ResetCamera()
camera = self.renderer.GetActiveCamera()
camera.SetClippingRange(0.1, 10000)
if not self.circle:
bounds = self.imagedata.GetBounds()
size = [bounds[1] - bounds[0], bounds[3] - bounds[2], bounds[5] - bounds[4]]
meanSize = sum(size) / len(size)
self.circle = CreateCircle(meanSize / 20.0)
self.rendererOverlay.AddViewProp(self.circle)
def mouseWheelChanged(self, arg1, arg2):
sign = 1 if arg2 == "MouseWheelForwardEvent" else -1
index = self.slicer.GetSliceIndex()
nextIndex = index + sign
self.slicer.SetSliceIndex(nextIndex)
self.slicer.UpdatePlacement()
self.render()
self.slicePositionChanged.emit(self)
def mouseMovedEvent(self, arg1, arg2):
x, y = arg1.GetEventPosition()
self.picker.Pick(x, y, 0, self.renderer)
pos = self.picker.GetPickPosition()
self.mouseMoved.emit(pos)
def render(self):
self.slicer.UpdatePlacement()
self.renderer.Render()
self.rwi.GetRenderWindow().Render()
| mit | Python | |
0fef9ab4e7a70a5e53cf5e5ae91d7cc5fd8b91da | Create xml_grabber.py | agusmakmun/Some-Examples-of-Simple-Python-Script,agusmakmun/Some-Examples-of-Simple-Python-Script | grabbing/xml_grabber.py | grabbing/xml_grabber.py | """XML TYPE
<?xml version="1.0" encoding="utf-8"?>
<rss xmlns:atom="http://www.w3.org/2005/Atom" version="2.0">
<channel>
<title>Q Blog</title>
<link>http://agus.appdev.my.id/feed/</link>
<description>Latest Posts of Q</description>
<atom:link href="http://agus.appdev.my.id/feed/" rel="self"></atom:link>
<language>en-us</language>
<lastBuildDate>Mon, 29 Jun 2015 12:49:38 -0000</lastBuildDate>
<item>
<title>Sample Post Kapal Pesiar</title>
<link>http://agus.appdev.my.id/entry/sample-post-kapal-pesiar</link>
<description>Sample Post Kapal Pesiar</description>
<guid>http://agus.appdev.my.id/entry/sample-post-kapal-pesiar</guid>
</item>
<item>
<title>Test Post from user</title>
<link>http://agus.appdev.my.id/entry/test-post-user</link>
<description>Test Post from user</description>
<guid>http://agus.appdev.my.id/entry/test-post-user</guid>
</item>
</channel>
</rss>
"""
import urllib
from bs4 import BeautifulSoup as BS
url = 'http://agus.appdev.my.id/feed/'
soup = BeautifulSoup(url)
def _getUrl_Image(url):
start = urllib.urlopen(url)
soup = BS(start)
all_link = soup.findAll('item', None)
for i in all_link:
item = str(i)+'\n'
split = item.split('<')
title = split[2][6:]
link = "<a href='"+split[4][6:]+"'>"+title+"</a>"
print link
_getUrl_Image(url)
"""RESULT
<a href='http://agus.appdev.my.id/entry/sample-post-kapal-pesiar'>Sample Post Kapal Pesiar</a>
<a href='http://agus.appdev.my.id/entry/test-post-user'>Test Post from user</a>
"""
| agpl-3.0 | Python | |
8fded9a735f40c4d4503ae01f1f5bb9592226bf6 | Add script to synchronize photos and poses from the 2019 porto IR dataset | fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop | python/fire_rs/neptus_mission_analysis.py | python/fire_rs/neptus_mission_analysis.py | # Copyright (c) 2019, CNRS-LAAS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import argparse
import datetime
import logging
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import pytz
import os
import os.path
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Obtain UAV pose list at picture timestamps')
parser.add_argument(
'estimated_state', nargs=1, type=argparse.FileType('r'),
help='EstimatedState as csv file')
parser.add_argument(
'photo_paths', nargs='+', type=str,
help='Photo files to process')
parser.add_argument(
'result_file', nargs=1, type=argparse.FileType('w'),
help='CSV file where poses are stored to')
args = parser.parse_args()
logger = logging.getLogger(__name__)
logger.info("Loading EstimatedState from: " % args.estimated_state)
dateparser = lambda dates: [pd.datetime.fromtimestamp(float(d), tz=pytz.utc) for d in dates]
es = pd.read_csv(args.estimated_state[0], index_col=0, skipinitialspace=True,
parse_dates=["timestamp (seconds since 01/01/1970)"], date_parser=dateparser)
logger.info("Analyze %s photos" % len(args.photo_paths))
new_ts = []
new_ts_float = []
for photo_path in args.photo_paths:
print(photo_path)
new_ts.append(pd.Timestamp(datetime.datetime.fromtimestamp(float(os.path.splitext(os.path.basename(photo_path))[0]), tz=pytz.utc)))
new_ts_float.append(float(os.path.splitext(os.path.basename(photo_path))[0]))
es2 = es.reindex(es.index.append(pd.Index(new_ts)).sort_values())
es3 = es2.interpolate(method="time")
es4 = es3.reindex(pd.Index(new_ts))
logger.info("Writing UAV poses to %s" % args.result_file[0])
es4.index = new_ts_float
es4.to_csv(args.result_file[0], index_label="timestamp", index=True, columns=["lat (rad)", "lon (rad)", "height (m)", "phi (rad)", "theta (rad)", "psi (rad)"])
logger.info("End")
| bsd-2-clause | Python | |
6b6f7d225633e9c6bd406de695a1e52ce830a14e | Create feature_util.py | CSC591ADBI-TeamProjects/Product-Search-Relevance,CSC591ADBI-TeamProjects/Product-Search-Relevance | feature_util.py | feature_util.py | '''
Contains methods to extract features for training
'''
| mit | Python | |
88ff76fbc9275a327e016e9aef09d4ab2c3647e9 | test setup | nickmarton/Vivid | Classes/test_Classes/test_State.py | Classes/test_Classes/test_State.py | """Attribute System unit tests."""
import pytest
from ..State import State
| mit | Python | |
ae86eb3f7a3d7b2a8289f30c8d3d312c459710fb | update code laplacian article | Tulip4attoo/Tulip4attoo.github.io,Tulip4attoo/Tulip4attoo.github.io,Tulip4attoo/Tulip4attoo.github.io,Tulip4attoo/Tulip4attoo.github.io | assets/codes/laplacian_filter.py | assets/codes/laplacian_filter.py | import cv2
import numpy as np
from PIL import Image
image = cv2.imread("output.jpg")
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
laplacian0 = np.array(([0, 1, 0],
[1, -4, 1],
[0, 1, 0]), dtype="int")
laplacian1 = np.array(([1, 1, 1],
[1, -8, 1],
[1, 1, 1]), dtype="int")
laplacian2 = np.array(([1, 2, 1],
[2, -12, 2],
[1, 2, 1]), dtype="int")
laplacian3 = np.array(([1, 0, 1],
[4, -12, 4],
[1, 0, 1]), dtype="int")
laplacian4 = np.array(([1, 4, 1],
[0, -12, 0],
[1, 4, 1]), dtype="int")
kernelBank = (("laplacian0", laplacian0),
("laplacian1", laplacian1),
("laplacian2", laplacian2),
("laplacian3", laplacian3),
("laplacian4", laplacian4))
Image.fromarray(gray).show()
for (kernelName, kernel) in kernelBank:
opencvOutput = cv2.filter2D(gray, -1, kernel)
cv2.putText(opencvOutput,
kernelName,
(30,30),
cv2.FONT_HERSHEY_SIMPLEX,
1,
(255,0,255))
Image.fromarray(opencvOutput).show()
| mit | Python | |
d892914381a3067fdd04d6d0af0aceda0c092039 | test staff | jscott1989/happening,happeninghq/happening,jscott1989/happening,jscott1989/happening,jscott1989/happening,happeninghq/happening,happeninghq/happening,happeninghq/happening | staff/tests/test_staff.py | staff/tests/test_staff.py | """Test sending emails."""
from happening.tests import TestCase
from model_mommy import mommy
from django.conf import settings
class TestStaff(TestCase):
"""Test staff views."""
def setUp(self):
"""Set up users."""
self.user = mommy.make(settings.AUTH_USER_MODEL, is_staff=True)
self.user.set_password("password")
self.user.save()
self.non_staff_user = mommy.make(settings.AUTH_USER_MODEL)
self.non_staff_user.set_password("password")
self.non_staff_user.save()
def test_dashboard(self):
"""Test dashboard loads only for staff."""
self.client.login(username=self.user.username, password="password")
response = self.client.get("/staff/")
self.assertEquals(response.status_code, 200)
self.client.login(username=self.non_staff_user.username,
password="password")
response = self.client.get("/staff/")
self.assertEquals(response.status_code, 302)
| mit | Python | |
a193f1d9b1816f72661254bba69c2c4a1e2c1b30 | Add tests for google menu | EndPointCorp/appctl,EndPointCorp/appctl | tests/extensions/functional/tests/test_google_menu.py | tests/extensions/functional/tests/test_google_menu.py | """
Google Menu tests
"""
from base import BaseTouchscreenTest
import time
from base import MAPS_URL, ZOOMED_IN_MAPS_URL, Pose
from base import screenshot_on_error, make_screenshot
import re
class TestGoogleMenu(BaseTouchscreenTest):
@screenshot_on_error
def test_google_menu_is_visible(self):
self.browser.get(MAPS_URL)
morefun = self.browser.find_element_by_id('morefun')
assert morefun.is_displayed() is True
items = self.browser.find_element_by_id('morefun_items')
assert items.is_displayed() is False
@screenshot_on_error
def test_google_items_are_visible_on_click(self):
self.browser.get(MAPS_URL)
morefun = self.browser.find_element_by_id('morefun')
morefun.click()
assert morefun.is_displayed() is True
items = self.browser.find_element_by_id('morefun_items')
assert items.is_displayed() is True
@screenshot_on_error
def test_clicking_doodle_item(self):
"Clicking on the doodle item should change the url to the doodles page"
self.browser.get(ZOOMED_IN_MAPS_URL)
time.sleep(5)
morefun = self.browser.find_element_by_id('morefun')
morefun.click()
items = self.browser.find_element_by_id('morefun_items')
li_items = items.find_elements_by_tag_name('li')
assert len(li_items) == 2
doodle = li_items[1]
doodle.click()
assert re.match(r'chrome-extension:\/\/[a-z]+\/pages\/doodles.html',
self.browser.current_url)
| apache-2.0 | Python | |
26df96a0c772c70013cc7a027022e84383ccaee2 | Add a helper script for converting -print-before-all output into a file based equivelent | llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm | utils/chunk-print-before-all.py | utils/chunk-print-before-all.py | #!/usr/bin/env python
# Given a -print-before-all -print-module-scope log from an opt invocation,
# chunk it into a series of individual IR files, one for each pass invocation.
# If the log ends with an obvious stack trace, try to split off a separate
# "crashinfo.txt" file leaving only the valid input IR in the last chunk.
# Files are written to current working directory.
import sys
basename = "chunk-"
chunk_id = 0
def print_chunk(lines):
global chunk_id
global basename
fname = basename + str(chunk_id) + ".ll"
chunk_id = chunk_id + 1
print "writing chunk " + fname + " (" + str(len(lines)) + " lines)"
with open(fname, "w") as f:
f.writelines(lines)
is_dump = False
cur = []
for line in sys.stdin:
if line.startswith("*** IR Dump Before ") and len(cur) != 0:
print_chunk(cur);
cur = []
cur.append("; " + line)
elif line.startswith("Stack dump:"):
print_chunk(cur);
cur = []
cur.append(line)
is_dump = True
else:
cur.append(line)
if is_dump:
print "writing crashinfo.txt (" + str(len(cur)) + " lines)"
with open("crashinfo.txt", "w") as f:
f.writelines(cur)
else:
print_chunk(cur);
| apache-2.0 | Python | |
cd08fb72fea040d31394435bc6c1892bc208bcc0 | Add sumclip.py for WPA analysis | randomascii/tools,randomascii/tools,randomascii/tools | bin/sumclip.py | bin/sumclip.py | # Copyright 2016 Bruce Dawson. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Sum data from the clipboard. This script reads lines of data from the clipboard,
converts each line to a python float (double) and sums them, also printing other
statistics.
This is useful for summing columns of WPA (Windows Performance Analyzer) data
from ETW traces. Just select all data, control-clicking as needed to deselect
particular rows, then right-click in the column-> Copy Other-> Copy Column
Selection. Then run this tool. This tool assumes that commas can be discarded
and will fail in many non-English locales.
"""
from __future__ import print_function
import sys
import win32clipboard
def main():
win32clipboard.OpenClipboard()
data = win32clipboard.GetClipboardData()
win32clipboard.CloseClipboard()
sum = 0
min = 1e100
max = 0
count = 0
missed_count = 0
for line in data.splitlines():
try:
val = float(line.replace(',', ''))
count += 1
sum += val
if val < min:
min = val
if val > max:
max = val
except:
missed_count += 1
if count > 0:
print("Found %d values, sum is %1.3f, min %1.3f, avg %1.3f, max %1.3f." % (count, sum, min, sum / count, max))
if missed_count > 0:
print("Found %d non-numeric values" % missed_count)
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 | Python | |
34317172bc8b0cf6ec512181e7fac30bc4804cea | Create goingLoopyWithPython.py | AlexEaton1105/computerScience | goingLoopyWithPython.py | goingLoopyWithPython.py | # date: 11/09/15
# username: A1fus
# name: Alfie Bowman
# description: Going Loopy with Python
lines = 0 #defines variable
while lines <50: #causes Python to do anything indented until the condition is met
print("I will not mess about in Computer Science lessons") #prints the str
lines = lines + 1 #adds 1 to the variable
value = 1 #Aspire
while value < 101:
print(value)
value = value + 1
yourNumber = int(input("Pick a number: ")) #Aspire+
while yourNumber < 101:
print(yourNumber)
yourNumber = yourNumber + 1
| mit | Python | |
a90c05355c2735c0a8d2b87d12b143d91f801660 | make timeline of training output | deworrall92/groupConvolutions,deworrall92/harmonicConvolutions | bsd/epochizer.py | bsd/epochizer.py | '''Group ims'''
import os
import sys
import time
if __name | mit | Python | |
d3fa9df4c4f91ddb42954ea125ed69c2380ada62 | create python version of list_change_file_hashes | niyaton/kenja-java-parser,daiki1217/kenja,daiki1217/kenja,yum-kvn/kenja,niyaton/kenja,yum-kvn/kenja,niyaton/kenja | src/list_changed_file_hashes.py | src/list_changed_file_hashes.py | from git import Repo
import os
class CommitList:
def __init__(self, repo):
self.repo = repo
def print_all_blob_hashes(self):
hashes = set()
for commit in self.repo.iter_commits(self.repo.head):
for p in commit.parents:
diff = p.diff(commit)
for change in diff.iter_change_type("M"):
if change.b_blob.name.endswith(".java"):
hashes.add(change.b_blob.hexsha)
for change in diff.iter_change_type("A"):
if change.b_blob.name.endswith(".java"):
hashes.add(change.b_blob.hexsha)
for h in hashes:
print h
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Edit distance calculator')
parser.add_argument('org_git_dir')
args = parser.parse_args()
git_dir = args.org_git_dir
if not os.path.isdir(git_dir):
print "%s is not a directory" % (git_dir)
repo = Repo(git_dir)
cl = CommitList(repo)
cl.print_all_blob_hashes()
| mit | Python | |
c2d9801ada5f28267edfeaf090c3ce973a6197b4 | add breast_segment.py, implement threshold, initial documentation | olieidel/breast_segment | breast_segment/breast_segment.py | breast_segment/breast_segment.py | import numpy as np
from skimage.exposure import equalize_hist
from skimage.filters.rank import median
from skimage.measure import regionprops
from skimage.morphology import disk
from skimage.segmentation import felzenszwalb
from skimage.transform import rescale
from scipy.ndimage import binary_fill_holes
from scipy.misc import imresize
# TODO: implement threshold variable
def breast_segment(im, scale_factor=0.25, threshold=3900, felzenzwalb_scale=0.15):
"""
Fully automated breast segmentation in mammographies.
https://github.com/olieidel/breast_segment
:param im: Image
:param scale_factor: Scale Factor
:param threshold: Threshold
:param felzenzwalb_scale: Felzenzwalb Scale
:return: (im_mask, bbox) where im_mask is the segmentation mask and
bbox is the bounding box (rectangular) of the segmentation.
"""
# set threshold to remove artifacts around edges
im_thres = im.copy()
im_thres[im_thres > threshold] = 0
# determine breast side
col_sums_split = np.array_split(np.sum(im_thres, axis=0), 2)
left_col_sum = np.sum(col_sums_split[0])
right_col_sum = np.sum(col_sums_split[1])
if left_col_sum > right_col_sum:
breast_side = 'l'
else:
breast_side = 'r'
# rescale and filter aggressively, normalize
im_small = rescale(im_thres, scale_factor)
im_small_filt = median(im_small, disk(50))
# this might not be helping, actually sometimes it is
im_small_filt = equalize_hist(im_small_filt)
# run mr. felzenzwalb
segments = felzenszwalb(im_small_filt, scale=felzenzwalb_scale)
segments += 1 # otherwise, labels() would ignore segment with segment=0
props = regionprops(segments)
# Sort Props by area, descending
props_sorted = sorted(props, key=lambda x: x.area, reverse=True)
expected_bg_index = 0
bg_index = expected_bg_index
bg_region = props_sorted[bg_index]
minr, minc, maxr, maxc = bg_region.bbox
filled_mask = bg_region.filled_image
im_small_fill = np.zeros((im_small_filt.shape[0]+2, im_small_filt.shape[1]+1), dtype=int)
if breast_side == 'l':
# breast expected to be on left side,
# pad on right and bottom side
im_small_fill[minr+1:maxr+1, minc:maxc] = filled_mask
im_small_fill[0, :] = 1 # top
im_small_fill[-1, :] = 1 # bottom
im_small_fill[:, -1] = 1 # right
elif breast_side == 'r':
# breast expected to be on right side,
# pad on left and bottom side
im_small_fill[minr+1:maxr+1, minc+1:maxc+1] = filled_mask # shift mask to right side
im_small_fill[0, :] = 1 # top
im_small_fill[-1, :] = 1 # bottom
im_small_fill[:, 0] = 1 # left
im_small_fill = binary_fill_holes(im_small_fill)
im_small_mask = im_small_fill[1:-1, :-1] if breast_side == 'l' \
else im_small_fill[1:-1, 1:]
# rescale mask
im_mask = imresize(im_small_mask, im.shape).astype(bool)
# invert!
im_mask = ~im_mask
# determine side of breast in mask and compare
col_sums_split = np.array_split(np.sum(im_mask, axis=0), 2)
left_col_sum = np.sum(col_sums_split[0])
right_col_sum = np.sum(col_sums_split[1])
if left_col_sum > right_col_sum:
breast_side_mask = 'l'
else:
breast_side_mask = 'r'
if breast_side_mask != breast_side:
# breast mask is not on expected side
# we might have segmented bg instead of breast
# so invert again
print('breast and mask side mismatch. inverting!')
im_mask = ~im_mask
# exclude thresholded area (artifacts) in mask, too
im_mask[im > threshold] = False
# fill holes again, just in case there was a high-intensity region
# in the breast
im_mask = binary_fill_holes(im_mask)
# if no region found, abort early and return mask of complete image
if im_mask.ravel().sum() == 0:
all_mask = np.ones_like(im).astype(bool)
bbox = (0, 0, im.shape[0], im.shape[1])
print('Couldn\'t find any segment')
return all_mask, bbox
# get bbox
minr = np.argwhere(im_mask.any(axis=1)).ravel()[0]
maxr = np.argwhere(im_mask.any(axis=1)).ravel()[-1]
minc = np.argwhere(im_mask.any(axis=0)).ravel()[0]
maxc = np.argwhere(im_mask.any(axis=0)).ravel()[-1]
bbox = (minr, minc, maxr, maxc)
return im_mask, bbox
| mit | Python | |
5f3f2ce52569eb3ae57ab3e4a2eaff29fc0d6522 | add pyqt demo | cheenwe/cheenwe.github.io,cheenwe/cheenwe.github.io,cheenwe/cheenwe.github.io,cheenwe/cheenwe.github.io,cheenwe/cheenwe.github.io,cheenwe/cheenwe.github.io,cheenwe/cheenwe.github.io | study/python/pyqt/demo.py | study/python/pyqt/demo.py | from PyQt5.QtWidgets import QMainWindow, QPushButton , QWidget , QMessageBox, QApplication, QHBoxLayout
import sys, sqlite3
class WinForm(QMainWindow):
def __init__(self, parent=None):
super(WinForm, self).__init__(parent)
button1 = QPushButton('插入数据')
button2 = QPushButton('显示数据')
button1.clicked.connect(lambda: self.onButtonClick(1))
button2.clicked.connect(lambda: self.onButtonClick(2))
layout = QHBoxLayout()
layout.addWidget(button1)
layout.addWidget(button2)
main_frame = QWidget()
main_frame.setLayout(layout)
self.setCentralWidget(main_frame)
def onButtonClick(self, n):
if n == 1:
query = 'INSERT INTO users(name, phone, age, remark) VALUES(?, ?, ?, ?)'
curs.execute(query, ("test", "12312312312", 12, "text" ))
conn.commit()
print('Button {0} 被按下了'.format(n))
QMessageBox.information(self, "信息提示框", 'Button {0} clicked'.format(n))
if n == 2:
print('hhhh {0} 被按下了'.format(n))
QMessageBox.information(self, "信息提示框", 'Button {0} clicked'.format(n))
if __name__ == "__main__":
app = QApplication(sys.argv)
conn = sqlite3.connect("user.db")
curs = conn.cursor()
curs.execute('''
CREATE TABLE IF NOT EXISTS users(
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
phone TEXT,
age INTEGER,
remark TEXT
)
''')
conn.commit()
form = WinForm()
form.show()
sys.exit(app.exec_())
| mit | Python | |
bf8328ff9b020bd3b99268744f86f94db2924011 | Create process.py | elecabfer/Bowtie,elecabfer/Bowtie | process.py | process.py | rm 1_*
rm 21_*
head *error.txt >> info_gg_unpaired.txt
source /mnt/common/epfl/etc/bbcf_bashrc ### para llamar a todos los programas de bbcf
module add UHTS/Analysis/samtools/1.2;
python -c "from bbcflib import mapseq"
for i in {2..48}
do
add_nh_flag "$i"_16S_gg.sam "$i"_SE_gg.bam
samtools sort "$i"_SE_gg.bam "$i"_SE_gg_s
samtools view -F0x4 $i"_SE_gg_s.bam | cut -f 3 | uniq -c >> "$i"_counts.txt
done
| mit | Python | |
93589c7e139d3af4b0a949f107fc5e20ed69fee4 | add atop stats library | ronniedada/litmus,mikewied/cbagent,vmx/cbagent,couchbase/cbmonitor,ronniedada/litmus,couchbase/cbmonitor,pavel-paulau/cbagent,couchbase/cbagent | cbagent/collectors/libstats/atopstats.py | cbagent/collectors/libstats/atopstats.py | from uuid import uuid4
from fabric.api import run
from systemstats import SystemStats, multi_task, single_task
uhex = lambda: uuid4().hex
class AtopStats(SystemStats):
def __init__(self, hosts, user, password):
super(AtopStats, self).__init__(hosts, user, password)
self.logfile = "/tmp/{0}.atop".format(uhex())
self._base_cmd =\
"d=`date +%H:%M` && atop -r {0} -b $d -e $d".format(self.logfile)
self._cpu_column = self._get_cpu_column()
self._vsize_column = self._get_vsize_column()
self._rss_column = self._get_rss_column()
@multi_task
def stop_atop(self):
run("killall -q atop")
run("rm -rf {0}".format(self.logfile))
@multi_task
def start_atop(self):
run("nohup atop -a -w {0} 5 > /dev/null 2>&1 &".format(self.logfile))
def is_atop_running(self):
raise NotImplementedError
def restart_atop(self):
self.stop_atop()
self.start_atop()
@single_task
def _get_vsize_column(self):
output = run("atop -m 1 1 | grep PID")
return output.split().index("VSIZE")
@single_task
def _get_rss_column(self):
output = run("atop -m 1 1 | grep PID")
return output.split().index("RSIZE")
@single_task
def _get_cpu_column(ip):
output = run("atop 1 1 | grep PID")
return output.split().index("CPU")
@multi_task
def get_process_cpu(self, process):
cmd = self._base_cmd + "| grep {0}".format(process)
output = run(cmd)
return output.split()[self._cpu_column]
@multi_task
def get_process_vzize(self, process):
cmd = self._base_cmd + " -m | grep {0}".format(process)
output = run(cmd)
return output.split()[self._vsize_column]
@multi_task
def get_process_rss(self, process):
cmd = self._base_cmd + " -m | grep {0}".format(process)
output = run(cmd)
return output.split()[self._rss_column]
| apache-2.0 | Python | |
a6c03ff9bc850248999afa3f597f460ee3eadc26 | Add lexer | 9seconds/curly | curly/lexer.py | curly/lexer.py | # -*- coding: utf-8 -*-
import collections
import re
import textwrap
def make_regexp(pattern):
pattern = textwrap.dedent(pattern)
pattern = re.compile(pattern, re.UNICODE | re.VERBOSE)
return pattern
class Token:
__slots__ = "contents", "raw_string"
REGEXP = make_regexp(".+")
def __init__(self, raw_string):
matcher = self.REGEXP.match(raw_string)
if matcher is None:
raise ValueError(
"String {0!r} is not valid for pattern {1!r}".format(
raw_string, self.REGEXP.pattern))
self.contents = self.extract_contents(matcher)
self.raw_string = raw_string
def extract_contents(self, matcher):
return {}
def __str__(self):
return ("<{0.__class__.__name__}(raw={0.raw_string!r}, "
"contents={0.contents!r})>").format(self)
def __repr__(self):
return str(self)
class PrintToken(Token):
REGEXP = make_regexp(
r"""
\{\{ # opening {{
([a-zA-Z0-9_ \t\n\r\f\v]+) # group 1, 'var' in {{ var }}
\}\} # closing }}
"""
)
def extract_contents(self, matcher):
return {"var": matcher.group(1).strip()}
class IfStartToken(Token):
REGEXP = make_regexp(
r"""
\{\? # opening {?
([a-zA-Z0-9_ \t\n\r\f\v]+) # group 1, 'var' in {? var ?}
\?\} # closing ?}
"""
)
def extract_contents(self, matcher):
return {"var": matcher.group(1).strip()}
class IfEndToken(Token):
REGEXP = make_regexp(r"\{\?\}")
class LoopStartToken(Token):
REGEXP = make_regexp(
r"""
\{\% # opening {%
([a-zA-Z0-9_ \t\n\r\f\v]+) # group 1, 'var' in {% var %}
\%\} # closing %}
"""
)
def extract_contents(self, matcher):
return {"var": matcher.group(1).strip()}
class LoopEndToken(Token):
REGEXP = make_regexp(r"\{%\}")
class LiteralToken(Token):
def __init__(self, text):
self.raw_string = text
self.contents = {"text": text}
TOKENS = collections.OrderedDict()
TOKENS["print"] = PrintToken
TOKENS["if_start"] = IfStartToken
TOKENS["if_end"] = IfEndToken
TOKENS["loop_start"] = LoopStartToken
TOKENS["loop_end"] = LoopEndToken
TOKENIZER_REGEXP = make_regexp(
"|".join(
"(?P<{0}>{1})".format(k, v.REGEXP.pattern) for k, v in TOKENS.items()
)
)
def tokenize(text):
print(TOKENIZER_REGEXP.pattern)
return tuple(tokenize_iter(text))
def tokenize_iter(text):
previous_end = 0
for matcher in TOKENIZER_REGEXP.finditer(text):
if matcher.start(0) != previous_end:
yield LiteralToken(text[previous_end:matcher.start(0)])
previous_end = matcher.end(0)
match_groups = matcher.groupdict()
token_class = TOKENS[matcher.lastgroup]
yield token_class(match_groups[matcher.lastgroup])
leftover = text[previous_end:]
if leftover:
yield LiteralToken(leftover)
if __name__ == "__main__":
text = """
Hello, world! This is {{ first_name }} {{ last_name }}
{? show_phone ?}
{{ phone }}
{?} {?
And here is the list of stuff I like:
{% like %}
- {{ item }} \{\{sdfsd {?verbose?}{{ tada }}!{?}
{%}
Thats all!
""".strip()
print("--- TEXT:\n{0}\n---".format(text))
print("--- HERE GO TOKENS\n")
for tok in tokenize(text):
print(tok)
| mit | Python | |
8c65226b79ad0f7ac3487a117298498cff4b23be | Update cherry-pickup.py | kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015 | Python/cherry-pickup.py | Python/cherry-pickup.py | # Time: O(n^3)
# Space: O(n^2)
class Solution(object):
def cherryPickup(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
# dp holds the max # of cherries two k-length paths can pickup.
# The two k-length paths arrive at (i, k - i) and (j, k - j),
# respectively.
n = len(grid)
dp = [[-1 for _ in xrange(n)] for _ in xrange(n)]
dp[0][0] = grid[0][0]
max_len = 2 * (n-1)
directions = [(0, 0), (-1, 0), (0, -1), (-1, -1)]
for k in xrange(1, max_len+1):
for i in reversed(xrange(max(0, k-n-1), min(k+1, n))):
for j in reversed(xrange(i, min(k+1, n))):
if not (0 <= k-i < n and 0 <= k-j < n):
continue
if grid[i][k-i] == -1 or grid[j][k-j] == -1:
dp[i][j] = -1
continue
cnt = grid[i][k-i]
if i != j:
cnt += grid[j][k-j]
max_cnt = -1
for direction in directions:
ii, jj = i+direction[0], j+direction[1]
if ii >= 0 and jj >= 0 and dp[ii][jj] >= 0:
max_cnt = max(max_cnt, dp[ii][jj]+cnt)
dp[i][j] = max_cnt
return max(dp[n-1][n-1], 0)
| # Time: O(n^3)
# Space: O(n^2)
class Solution(object):
def cherryPickup(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
# dp holds the max # of cherries two k-length paths can pickup.
# The two k-length paths arrive at (i, k - i) and (j, k - j),
# respectively.
n = len(grid)
dp = [[-1 for _ in xrange(n)] for _ in xrange(n)]
dp[0][0] = grid[0][0]
max_len = 2 * (n-1)
directions = [(0, 0), (-1, 0), (0, -1), (-1, -1)]
for k in xrange(1, max_len+1):
for i in reversed(xrange(min(k+1, n))):
for j in reversed(xrange(i, min(k+1, n))):
if not (0 <= k-i < n and 0 <= k-j < n):
continue
if grid[i][k-i] == -1 or grid[j][k-j] == -1:
dp[i][j] = -1
continue
cnt = grid[i][k-i]
if i != j:
cnt += grid[j][k-j]
max_cnt = -1
for direction in directions:
ii, jj = i+direction[0], j+direction[1]
if ii >= 0 and jj >= 0 and dp[ii][jj] >= 0:
max_cnt = max(max_cnt, dp[ii][jj]+cnt)
dp[i][j] = max_cnt
return max(dp[n-1][n-1], 0)
| mit | Python |
43f2accb8cd4f63d62f7515bb5633296a7d592f0 | Add setup.py to spm. | carolFrohlich/nipype,fprados/nipype,grlee77/nipype,FredLoney/nipype,carolFrohlich/nipype,carolFrohlich/nipype,glatard/nipype,wanderine/nipype,blakedewey/nipype,iglpdc/nipype,sgiavasis/nipype,carlohamalainen/nipype,blakedewey/nipype,iglpdc/nipype,pearsonlab/nipype,fprados/nipype,satra/NiPypeold,gerddie/nipype,mick-d/nipype,pearsonlab/nipype,FCP-INDI/nipype,sgiavasis/nipype,gerddie/nipype,mick-d/nipype_source,mick-d/nipype,grlee77/nipype,FredLoney/nipype,dmordom/nipype,Leoniela/nipype,mick-d/nipype,arokem/nipype,iglpdc/nipype,christianbrodbeck/nipype,rameshvs/nipype,FCP-INDI/nipype,grlee77/nipype,glatard/nipype,rameshvs/nipype,mick-d/nipype,carolFrohlich/nipype,glatard/nipype,FCP-INDI/nipype,JohnGriffiths/nipype,dgellis90/nipype,rameshvs/nipype,dgellis90/nipype,dmordom/nipype,blakedewey/nipype,Leoniela/nipype,FredLoney/nipype,carlohamalainen/nipype,FCP-INDI/nipype,wanderine/nipype,christianbrodbeck/nipype,Leoniela/nipype,JohnGriffiths/nipype,arokem/nipype,wanderine/nipype,sgiavasis/nipype,fprados/nipype,grlee77/nipype,iglpdc/nipype,sgiavasis/nipype,dmordom/nipype,dgellis90/nipype,arokem/nipype,JohnGriffiths/nipype,mick-d/nipype_source,gerddie/nipype,glatard/nipype,arokem/nipype,JohnGriffiths/nipype,rameshvs/nipype,satra/NiPypeold,pearsonlab/nipype,wanderine/nipype,blakedewey/nipype,gerddie/nipype,mick-d/nipype_source,carlohamalainen/nipype,dgellis90/nipype,pearsonlab/nipype | nipype/interfaces/spm/setup.py | nipype/interfaces/spm/setup.py | def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('spm', parent_package, top_path)
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| bsd-3-clause | Python | |
4797918eec0c43ada3f6eb9a63ec2f275eced253 | Add spider for State Farm Agents; closes #519 | iandees/all-the-places,iandees/all-the-places,iandees/all-the-places | locations/spiders/statefarm.py | locations/spiders/statefarm.py | import json
import re
import scrapy
from locations.items import GeojsonPointItem
class StateFarmSpider(scrapy.Spider):
name = "statefarm"
allowed_domains = ["statefarm.com"]
download_delay = 0.2
start_urls = [
'https://www.statefarm.com/agent/us',
]
def parse_location(self, response):
name = response.xpath('//*[@id="AgentNameLabelId"]//span[@itemprop="name"]/text()').extract_first()
if name:
name += ' - State Farm Insurance Agent'
lat = response.xpath('//*[@id="agentOfficePrimaryLocLat"]/@value').extract_first()
lon = response.xpath('//*[@id="agentOfficePrimaryLocLong"]/@value').extract_first()
properties = {
'ref': "_".join(response.url.split('/')[-3:]),
'name': name,
'addr_full': response.xpath('normalize-space(//div[@itemtype="http://schema.org/PostalAddress"]//span[@id="locStreetContent_mainLocContent"]/text())').extract_first(),
'city': response.xpath('//div[@itemtype="http://schema.org/PostalAddress"]/div[2]/span/span[1]/text()').extract_first().strip(', '),
'state': response.xpath('//div[@itemtype="http://schema.org/PostalAddress"]/div[2]/span/span[2]/text()').extract_first(),
'postcode': response.xpath('//div[@itemtype="http://schema.org/PostalAddress"]/div[2]/span/span[3]/text()').extract_first(),
'phone': response.xpath('normalize-space(//span[@id="offNumber_mainLocContent"]/span/text())').extract_first(),
'lat': float(lat) if lat else None,
'lon': float(lon) if lon else None,
'website': response.url,
}
yield GeojsonPointItem(**properties)
def parse(self, response):
agents = response.xpath('//div[contains(@id, "agent-details")]')
# agent_sites = response.xpath('//a[contains(text(), "Visit agent site")]/@href').extract()
if agents:
for agent in agents:
agent_site = agent.xpath('.//a[contains(text(), "Visit agent site")]/@href').extract_first()
if not agent_site:
raise Exception('no agent site found')
yield scrapy.Request(response.urljoin(agent_site), callback=self.parse_location)
else:
urls = response.xpath('//li/div/a/@href').extract()
for url in urls:
yield scrapy.Request(response.urljoin(url))
| mit | Python | |
ebec02461bd341d49a499572d56bdef4520a650e | Add a missing migration | Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org | Instanssi/store/migrations/0007_storeitem_is_ticket.py | Instanssi/store/migrations/0007_storeitem_is_ticket.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-11 22:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('store', '0006_auto_20161209_0015'),
]
operations = [
migrations.AddField(
model_name='storeitem',
name='is_ticket',
field=models.BooleanField(default=False, help_text='Tuote on lipputuote, ja sitä voi käyttää esim. kompomaatissa äänestysoikeuden hankkimiseen', verbose_name='Tuote on lipputuote'),
),
]
| mit | Python | |
cb01c58e0d11999331eb01e33bf970db8742f2f8 | Create VertexGlyphFilter.py | lorensen/VTKExamples,lorensen/VTKExamples,lorensen/VTKExamples,lorensen/VTKExamples,lorensen/VTKExamples,lorensen/VTKExamples,lorensen/VTKExamples | src/Python/Filtering/VertexGlyphFilter.py | src/Python/Filtering/VertexGlyphFilter.py | #!/usr/bin/env python
import vtk
def main():
colors = vtk.vtkNamedColors()
points = vtk.vtkPoints()
points.InsertNextPoint(0,0,0)
points.InsertNextPoint(1,1,1)
points.InsertNextPoint(2,2,2)
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
vertexGlyphFilter = vtk.vtkVertexGlyphFilter()
vertexGlyphFilter.AddInputData(polydata)
vertexGlyphFilter.Update()
# Create a mapper and actor
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(vertexGlyphFilter.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetPointSize(10)
actor.GetProperty().SetColor(colors.GetColor3d("Yellow"))
# Create a renderer, render window, and interactor
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
# Add the actor to the scene
renderer.AddActor(actor)
renderer.SetBackground(colors.GetColor3d("Green")) # Background color green
# Render and interact
renderWindow.Render()
renderWindowInteractor.Start()
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
d2536ce3ded4fc2ea5648025f04efa093629b70f | test rapapasswords table | sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint | test/rapapasswordstest.py | test/rapapasswordstest.py | #!/usr/bin/python2.4
#
# Copyright (c) 2005-2007 rPath, Inc.
#
import testsuite
testsuite.setup()
import os
import sys
import time
import tempfile
import fixtures
from mint import rapapasswords
class rAPAPasswordsTest(fixtures.FixturedUnitTest):
@fixtures.fixture("Full")
def testPasswords(self, db, data):
client = self.getClient('admin')
self.failIf(client.getrAPAPassword('foo.bar.baz', 'role'),
'Value returned when not present.')
client.setrAPAPassword('blah.bar.baz', 'foo_bar_baz', 'passwd', 'role')
client.setrAPAPassword('foo.bar.baz', 'foo_bar_baz', 'passwd', 'role')
client.setrAPAPassword('foo.bar.baz', 'foo_bar_baz2', 'passwd2', 'role2')
user, passwd = client.getrAPAPassword('foo.bar.baz', 'role')
self.failIf(user != 'foo_bar_baz' or passwd != 'passwd', "Incorrect user returned")
user, passwd = client.getrAPAPassword('foo.bar.baz', 'role2')
self.failIf(user != 'foo_bar_baz2' or passwd != 'passwd2', "Incorrect user returned")
client.setrAPAPassword('foo.bar.baz', 'foo_bar_baz', 'passwd_changed', 'role')
user, passwd = client.getrAPAPassword('foo.bar.baz', 'role')
self.failIf(user != 'foo_bar_baz' or passwd != 'passwd_changed', "Password not updated.")
if __name__ == "__main__":
testsuite.main()
| apache-2.0 | Python | |
45e86e49e845ef25df6e1db3bcb336809ffb5f5f | Disable IPv6 on wireless (Extension Attribute for Casper) | killahquam/JAMF,killahquam/JAMF | ipv6_Checker.py | ipv6_Checker.py | #!/usr/bin/python
#Copyright 2014 Quam Sodji
import subprocess
def getinfo(hardware): #Return network info on select interface
info = subprocess.check_output(["networksetup", "-getinfo", hardware])
return info
wireless = ["Airport", "Wi-Fi"] #The two type of interfaces that refers to wireless
list_network = subprocess.check_output(["networksetup", "-listallnetworkservices"])
list_network = list_network.split('\n')
for device in wireless:
if device in list_network:
response = getinfo(device)
response_check = response.split("\n")
if "IPv6: Off" not in response_check:
check = subprocess.check_output(["networksetup", "-setv6off", device])
Status = "Off"
else:
for setting in response_check:
if setting.startswith("IPv6:"):
if setting != "IPv6: Off":
Status = setting
else:
Status = "Off"
else:
Status = "No wireless interfaces configured"
continue
print "<result>%s</result>"%Status
| mit | Python | |
c68872453a0c4a28e31d5ee38faf11d8a0486b62 | add drone_setup.py | dronesmith/Radiation-Detection-Example,dronesmith/Radiation-Detection-Example | drone_setup.py | drone_setup.py | import requests
import json
# Open user-account.json and create a json object containing
# user credential fields.
with open('user-account.json', "r") as jsonFile:
jsonUser = json.load(jsonFile)
jsonFile.close()
# Assign user credentials to variables
USER_EMAIL = jsonUser[0]['email']
USER_API_KEY = jsonUser[0]['api_key']
DRONE_NAME = ""
# Create headers object for API requests
headers = {
'user-email': USER_EMAIL,
'user-key': USER_API_KEY,
'Content-Type': 'application/json'
}
# This request will create a virtual drone on your account with a random
# name. The server should respond with a JSON formatted Drone
# object that contains the name of the new drone.
print "\nCreate new virtual drone...\n"
response = requests.post('http://api.dronesmith.io/api/drone', headers=headers)
obj = json.loads(response.text)
print json.dumps(obj, indent=2, sort_keys=True)
# Update DRONE_NAME
DRONE_NAME = obj['name']
# Update drone_name field in jsonUser object
jsonUser[0]["drone_name"] = DRONE_NAME
# Write jsonUser object to user-account.json
with open('user-account.json', "w") as jsonFile:
jsonFile.write(json.dumps(jsonUser,indent=2, sort_keys=True))
jsonFile.close()
# Add a sensor named radiation_sensor to drone and initialize its intensity field
print "\nAdd radiation sensor to drone...\n"
response = requests.post('http://api.dronesmith.io/api/drone/' + DRONE_NAME \
+ '/sensor/radiation_sensor', json={
"intensity": 0
}, headers=headers)
jsonText = json.loads(response.text)
print json.dumps(jsonText, indent=2, sort_keys=True)
# Get drone object to make sure radiation sensor was properly
# added. There should be a sensors field containing radiation_sensor object.
print "\nGet Drone object..\n"
response = requests.get('http://api.dronesmith.io/api/drone/' \
+ DRONE_NAME, headers=headers)
jsonText = json.loads(response.text)
print json.dumps(jsonText, indent=2, sort_keys=True)
| bsd-3-clause | Python | |
a5ff02a696c553dbd4038e1cb1c0fd0668b30006 | Create ets2look.py | NixillUmbreon/Scripts | FreePIE/ets2look.py | FreePIE/ets2look.py | hSen = 750
vSen = 200
if starting:
lastX = 0
lastY = 0
thisX = xbox360[0].rightStickX * hSen
thisY = xbox360[0].rightStickY * vSen
mouse.deltaX = thisX - lastX
mouse.deltaY = lastY - thisY
lastX = thisX
lastY = thisY
| mit | Python | |
7bea7133d8b069e784b8e35e045a6411cac8882c | add movielens (#1027) | intel-analytics/BigDL,intel-analytics/BigDL,yangw1234/BigDL,yangw1234/BigDL,intel-analytics/BigDL,yangw1234/BigDL,intel-analytics/BigDL,yangw1234/BigDL | python/dllib/src/bigdl/dllib/feature/dataset/movielens.py | python/dllib/src/bigdl/dllib/feature/dataset/movielens.py | #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import zipfile
import numpy as np
from bigdl.dataset import base
SOURCE_URL = 'http://files.grouplens.org/datasets/movielens/'
def read_data_sets(data_dir):
"""
Parse or download movielens 1m data if train_dir is empty.
:param data_dir: The directory storing the movielens data
:return: a 2D numpy array with user index and item index in each row
"""
WHOLE_DATA = 'ml-1m.zip'
local_file = base.maybe_download(WHOLE_DATA, data_dir, SOURCE_URL + WHOLE_DATA)
zip_ref = zipfile.ZipFile(local_file, 'r')
extracted_to = os.path.join(data_dir, "ml-1m")
if not os.path.exists(extracted_to):
print("Extracting %s to %s" % (local_file, data_dir))
zip_ref.extractall(data_dir)
zip_ref.close()
rating_files = os.path.join(extracted_to,"ratings.dat")
rating_list = [i.strip().split("::")[:2] for i in open(rating_files,"r").readlines()]
movielens_data = np.array(rating_list).astype(int)
return movielens_data
if __name__ == "__main__":
movielens_data = read_data_sets("/tmp/movielens/")
| apache-2.0 | Python | |
46b7dd2c389d2bf020e2c413518e0f960fa28ba4 | Add test for current_locale expression | rmoorman/sqlalchemy-i18n,kvesteri/sqlalchemy-i18n | tests/test_expressions.py | tests/test_expressions.py | from sqlalchemy_i18n.expressions import current_locale
class TestCurrentLocaleExpression(object):
def test_render(self):
assert str(current_locale()) == ':current_locale'
| bsd-3-clause | Python | |
3d33aeb24018943ffcc1fdc7a537d871f804a3ab | Add test file | blink1073/pexpect,blink1073/pexpect,blink1073/pexpect | tests/test_popen_spawn.py | tests/test_popen_spawn.py | #!/usr/bin/env python
'''
PEXPECT LICENSE
This license is approved by the OSI and FSF as GPL-compatible.
http://opensource.org/licenses/isc-license.txt
Copyright (c) 2012, Noah Spurrier <noah@noah.org>
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
import unittest
import subprocess
import pexpect
from pexpect.popen_spawn import PopenSpawn
from . import PexpectTestCase
class ExpectTestCase (PexpectTestCase.PexpectTestCase):
def test_expect(self):
the_old_way = subprocess.Popen(args=['ls', '-l', '/bin'],
stdout=subprocess.PIPE).communicate()[0].rstrip()
p = PopenSpawn('ls -l /bin')
the_new_way = b''
while 1:
i = p.expect([b'\n', pexpect.EOF])
the_new_way = the_new_way + p.before
if i == 1:
break
the_new_way += b'\n'
the_new_way = the_new_way.rstrip()
assert the_old_way == the_new_way, len(the_old_way) - len(the_new_way)
def test_expect_exact(self):
the_old_way = subprocess.Popen(args=['ls', '-l', '/bin'],
stdout=subprocess.PIPE).communicate()[0].rstrip()
p = PopenSpawn('ls -l /bin')
the_new_way = b''
while 1:
i = p.expect_exact([b'\n', pexpect.EOF])
the_new_way = the_new_way + p.before
if i == 1:
break
the_new_way += b'\n'
the_new_way = the_new_way.rstrip()
assert the_old_way == the_new_way, len(the_old_way) - len(the_new_way)
p = PopenSpawn('echo hello.?world')
i = p.expect_exact(b'.?')
self.assertEqual(p.before, b'hello')
self.assertEqual(p.after, b'.?')
def test_expect_eof(self):
the_old_way = subprocess.Popen(args=['/bin/ls', '-l', '/bin'],
stdout=subprocess.PIPE).communicate()[0].rstrip()
p = PopenSpawn('/bin/ls -l /bin')
# This basically tells it to read everything. Same as pexpect.run()
# function.
p.expect(pexpect.EOF)
the_new_way = p.before.rstrip()
assert the_old_way == the_new_way, len(the_old_way) - len(the_new_way)
def test_expect_timeout(self):
p = PopenSpawn('cat', timeout=5)
p.expect(pexpect.TIMEOUT) # This tells it to wait for timeout.
self.assertEqual(p.after, pexpect.TIMEOUT)
def test_unexpected_eof(self):
p = PopenSpawn('ls -l /bin')
try:
p.expect('_Z_XY_XZ') # Probably never see this in ls output.
except pexpect.EOF:
pass
else:
self.fail('Expected an EOF exception.')
def test_bad_arg(self):
p = PopenSpawn('cat')
with self.assertRaisesRegexp(TypeError, '.*must be one of'):
p.expect(1)
with self.assertRaisesRegexp(TypeError, '.*must be one of'):
p.expect([1, b'2'])
with self.assertRaisesRegexp(TypeError, '.*must be one of'):
p.expect_exact(1)
with self.assertRaisesRegexp(TypeError, '.*must be one of'):
p.expect_exact([1, b'2'])
def test_timeout_none(self):
p = PopenSpawn('echo abcdef', timeout=None)
p.expect('abc')
p.expect_exact('def')
p.expect(pexpect.EOF)
if __name__ == '__main__':
unittest.main()
suite = unittest.makeSuite(ExpectTestCase, 'test')
| isc | Python | |
633f5ad8064395ec3805e38ea1f73a9aa7475878 | Use the caller's unpickler as well | dongguangming/jsonpickle,dongguangming/jsonpickle,dongguangming/jsonpickle,mandx/jsonpickle,mandx/jsonpickle,dongguangming/jsonpickle,mandx/jsonpickle,mandx/jsonpickle | jsonpickle/_handlers.py | jsonpickle/_handlers.py | import datetime
import jsonpickle
class DatetimeHandler(jsonpickle.handlers.BaseHandler):
"""
Datetime objects use __reduce__, and they generate binary strings encoding
the payload. This handler encodes that payload to reconstruct the
object.
"""
def flatten(self, obj, data):
pickler = self._base
if not pickler.unpicklable:
return unicode(obj)
cls, args = obj.__reduce__()
args = [args[0].encode('base64')] + map(pickler.flatten, args[1:])
data['__reduce__'] = (pickler.flatten(cls), args)
return data
def restore(self, obj):
cls, args = obj['__reduce__']
value = args[0].decode('base64')
unpickler = self._base
cls = unpickler.restore(cls)
params = map(unpickler.restore, args[1:])
params = (value,) + tuple(params)
return cls.__new__(cls, *params)
class SimpleReduceHandler(jsonpickle.handlers.BaseHandler):
"""
Follow the __reduce__ protocol to pickle an object. As long as the factory
and its arguments are pickleable, this should pickle any object that
implements the reduce protocol.
"""
def flatten(self, obj, data):
pickler = self._base
if not pickler.unpicklable:
return unicode(obj)
data['__reduce__'] = map(pickler.flatten, obj.__reduce__())
return data
def restore(self, obj):
unpickler = self._base
cls, args = map(unpickler.restore, obj['__reduce__'])
return cls.__new__(cls, *args)
jsonpickle.handlers.registry.register(datetime.datetime, DatetimeHandler)
jsonpickle.handlers.registry.register(datetime.date, DatetimeHandler)
jsonpickle.handlers.registry.register(datetime.time, DatetimeHandler)
jsonpickle.handlers.registry.register(datetime.timedelta, SimpleReduceHandler)
| import datetime
import jsonpickle
class DatetimeHandler(jsonpickle.handlers.BaseHandler):
"""
Datetime objects use __reduce__, and they generate binary strings encoding
the payload. This handler encodes that payload to reconstruct the
object.
"""
def flatten(self, obj, data):
pickler = self._base
if not pickler.unpicklable:
return unicode(obj)
cls, args = obj.__reduce__()
args = [args[0].encode('base64')] + map(pickler.flatten, args[1:])
data['__reduce__'] = (pickler.flatten(cls), args)
return data
def restore(self, obj):
cls, args = obj['__reduce__']
value = args[0].decode('base64')
unpickler = jsonpickle.Unpickler()
cls = unpickler.restore(cls)
params = map(unpickler.restore, args[1:])
params = (value,) + tuple(params)
return cls.__new__(cls, *params)
class SimpleReduceHandler(jsonpickle.handlers.BaseHandler):
"""
Follow the __reduce__ protocol to pickle an object. As long as the factory
and its arguments are pickleable, this should pickle any object that
implements the reduce protocol.
"""
def flatten(self, obj, data):
pickler = self._base
if not pickler.unpicklable:
return unicode(obj)
data['__reduce__'] = map(pickler.flatten, obj.__reduce__())
return data
def restore(self, obj):
unpickler = jsonpickle.Unpickler()
cls, args = map(unpickler.restore, obj['__reduce__'])
return cls.__new__(cls, *args)
jsonpickle.handlers.registry.register(datetime.datetime, DatetimeHandler)
jsonpickle.handlers.registry.register(datetime.date, DatetimeHandler)
jsonpickle.handlers.registry.register(datetime.time, DatetimeHandler)
jsonpickle.handlers.registry.register(datetime.timedelta, SimpleReduceHandler)
| bsd-3-clause | Python |
f1f654d823ee8454b53f27372bbaab85f4d01631 | add analyzer | DBCDK/serviceprovider,DBCDK/serviceprovider | performancetest/rec-analyze.py | performancetest/rec-analyze.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division
import re
import sys
import os
import json
import numpy as np
def f(n):
return int(round(n))
def report(name, seq):
print '===', name, '[ms] ==='
print "mean:", f(np.mean(seq)), "\tstd dev:", f(np.std(seq))
print "95%:", f(np.percentile(seq, 95)), "\t99%:", f(np.percentile(seq, 99))
print "min:", f(min(seq)), "\tmax:", f(max(seq))
if __name__ == "__main__":
errors = dict()
total_times = []
service_times = []
overhead_times = []
response = None
curltime_ms = None
totaltime = None
externaltime = None
n = 0
for line in sys.stdin:
if line.startswith('{'):
# assume json response
response = json.loads(line)
#totaltime = response["msecs"]
externaltime = response["msecs"]
continue
match = re.search(r'CURL HTTPCODE=(\d+) SECS=(\d+[.,]\d+)', line)
if match:
n+=1
print "n:", n
(httpcode, time) = match.group(1,2)
if not httpcode == "200":
if not httpcode in errors:
errors[ httpcode ] = 1
else:
errors[ httpcode ] += 1
else:
time = time.replace(',','.')
curltime_ms = float(time)*1000
print "CURL:", curltime_ms
print "TOTAL REC:", externaltime
overhead = curltime_ms - externaltime
print "TOTAL OVERHEAD:", overhead
total_times.append(curltime_ms)
overhead_times.append(overhead)
service_times.append(externaltime)
continue
if errors:
print "ERRORS:", errors
else:
print "[no errors]"
print "samples:", len(total_times)
report("total time", total_times)
report("service time", service_times)
report("overhead time", overhead_times)
| agpl-3.0 | Python | |
e7809f307610e98cb8356110eec7e8c1f41e9d46 | Backup script. | couchbaselabs/ep-engine,teligent-ru/ep-engine,owendCB/ep-engine,sriganes/ep-engine,daverigby/kv_engine,jimwwalker/ep-engine,membase/ep-engine,zbase/ep-engine,zbase/ep-engine,membase/ep-engine,abhinavdangeti/ep-engine,couchbase/ep-engine,abhinavdangeti/ep-engine,jimwwalker/ep-engine,teligent-ru/ep-engine,daverigby/ep-engine,membase/ep-engine,zbase/ep-engine,jimwwalker/ep-engine,jimwwalker/ep-engine,sriganes/ep-engine,couchbaselabs/ep-engine,owendCB/ep-engine,daverigby/ep-engine,teligent-ru/ep-engine,abhinavdangeti/ep-engine,daverigby/kv_engine,daverigby/ep-engine,membase/ep-engine,hisundar/ep-engine,daverigby/kv_engine,couchbase/ep-engine,sriganes/ep-engine,couchbase/ep-engine,couchbaselabs/ep-engine,owendCB/ep-engine,hisundar/ep-engine,daverigby/ep-engine,couchbase/ep-engine,hisundar/ep-engine,zbase/ep-engine,abhinavdangeti/ep-engine,sriganes/ep-engine,owendCB/ep-engine,zbase/ep-engine,teligent-ru/ep-engine,couchbaselabs/ep-engine,daverigby/kv_engine,abhinavdangeti/ep-engine,hisundar/ep-engine,couchbaselabs/ep-engine | management/backup.py | management/backup.py | #!/usr/bin/env python
import sys
import os
import glob
import shutil
import mc_bin_client
def usage():
print >> sys.stderr, """
Usage: %s <dest_dir>
""" % os.path.basename(sys.argv[0])
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
cmd_dir = os.path.dirname(sys.argv[0])
dest_dir = sys.argv[1]
flushctl = os.path.join(cmd_dir, 'flushctl.py')
mc = mc_bin_client.MemcachedClient('127.0.0.1')
db_path = mc.stats()['ep_dbname']
db_files = glob.glob('%s*' % db_path)
print 'Pausing persistence... ',
os.system('"%s" 127.0.0.1:11211 stop' % flushctl)
print 'paused.'
try:
for fn in db_files:
dest_fn = os.path.join(dest_dir, os.path.basename(fn))
print 'Copying %s to %s' % (fn, dest_fn)
shutil.copyfile(fn, dest_fn)
finally:
print 'Unpausing persistence.'
os.system('"%s" 127.0.0.1:11211 start' % flushctl)
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
2945b68d5a8b6505f1a8516dd8b5f7d4b85aac5a | Add tests for storeslice | sklam/numba,IntelLabs/numba,gmarkall/numba,pitrou/numba,stuartarchibald/numba,GaZ3ll3/numba,cpcloud/numba,gmarkall/numba,stuartarchibald/numba,stefanseefeld/numba,stonebig/numba,pombredanne/numba,pombredanne/numba,jriehl/numba,jriehl/numba,stonebig/numba,jriehl/numba,GaZ3ll3/numba,ssarangi/numba,GaZ3ll3/numba,pombredanne/numba,sklam/numba,numba/numba,stonebig/numba,sklam/numba,gdementen/numba,numba/numba,seibert/numba,stuartarchibald/numba,jriehl/numba,sklam/numba,cpcloud/numba,numba/numba,GaZ3ll3/numba,stuartarchibald/numba,ssarangi/numba,pombredanne/numba,stefanseefeld/numba,gmarkall/numba,gdementen/numba,gmarkall/numba,stuartarchibald/numba,seibert/numba,jriehl/numba,pitrou/numba,seibert/numba,ssarangi/numba,gdementen/numba,cpcloud/numba,IntelLabs/numba,sklam/numba,pitrou/numba,numba/numba,stefanseefeld/numba,ssarangi/numba,GaZ3ll3/numba,numba/numba,cpcloud/numba,seibert/numba,stefanseefeld/numba,pitrou/numba,gmarkall/numba,gdementen/numba,IntelLabs/numba,stonebig/numba,ssarangi/numba,gdementen/numba,stefanseefeld/numba,pombredanne/numba,pitrou/numba,seibert/numba,IntelLabs/numba,IntelLabs/numba,cpcloud/numba,stonebig/numba | numba/tests/test_storeslice.py | numba/tests/test_storeslice.py | from __future__ import print_function
import numba.unittest_support as unittest
import numpy as np
from numba.compiler import compile_isolated, Flags
def usecase(obs, nPoints, B, sigB, A, sigA, M, sigM):
center = nPoints / 2
print(center)
obs[0:center] = np.arange(center)
obs[center] = 321
obs[(center + 1):] = np.arange(nPoints - center - 1)
class TestStoreSlice(unittest.TestCase):
def test_usecase(self):
n = 10
obs_got = np.zeros(n)
obs_expected = obs_got.copy()
flags = Flags()
flags.set("enable_pyobject")
cres = compile_isolated(usecase, (), flags=flags)
cres.entry_point(obs_got, n, 10.0, 1.0, 2.0, 3.0, 4.0, 5.0)
usecase(obs_expected, n, 10.0, 1.0, 2.0, 3.0, 4.0, 5.0)
print(obs_got, obs_expected)
self.assertTrue(np.allclose(obs_got, obs_expected))
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | Python | |
c468f49e50b7a55995ef1fc447c04993c2171ca2 | Add lc0242_valid_anagram.py | bowen0701/algorithms_data_structures | lc0242_valid_anagram.py | lc0242_valid_anagram.py | """Leetcode 242. Valid Anagram
Easy
URL: https://leetcode.com/problems/valid-anagram/
Given two strings s and t,
write a function to determine if t is an anagram of s.
Example 1:
Input: s = "anagram", t = "nagaram"
Output: true
Example 2:
Input: s = "rat", t = "car"
Output: false
Note:
You may assume the string contains only lowercase alphabets.
Follow up:
What if the inputs contain unicode characters? How would you adapt your solution to such case?
"""
class Solution(object):
def isAnagram(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python | |
dbb812621c58d92bc3c2aed6135ddb9b91cd9181 | implement bucket compaction test | EricACooper/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,EricACooper/perfrunner,hsharsha/perfrunner,mikewied/perfrunner,mikewied/perfrunner,hsharsha/perfrunner,PaintScratcher/perfrunner,vmx/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,PaintScratcher/perfrunner,vmx/perfrunner,pavel-paulau/perfrunner,dkao-cb/perfrunner,EricACooper/perfrunner,pavel-paulau/perfrunner,thomas-couchbase/perfrunner,thomas-couchbase/perfrunner,EricACooper/perfrunner,couchbase/perfrunner,dkao-cb/perfrunner,couchbase/perfrunner | perfrunner/tests/compaction.py | perfrunner/tests/compaction.py | from perfrunner.tests import TargetIterator
from perfrunner.tests.kv import KVTest
class DbCompactionTest(KVTest):
def compact(self):
self.reporter.start()
for target_settings in TargetIterator(self.cluster_spec,
self.test_config):
self.rest.trigger_bucket_compaction(target_settings.node,
target_settings.bucket)
self.monitor.monitor_bucket_fragmentation(target_settings)
self.reporter.finish('Bucket compaction')
def run(self):
self._run_load_phase() # initial load
self._run_load_phase() # extra mutations for fragmentation
| apache-2.0 | Python | |
c2982060ff7ffbbf784a37675f2caec381e9aa48 | Create quicksort.py | saru95/DSA,saru95/DSA,saru95/DSA,saru95/DSA,saru95/DSA | Python/quicksort.py | Python/quicksort.py | def quickSort(arr):
less = []
pivotList = []
more = []
if len(arr) <= 1:
return arr
else:
pivot = arr[0]
for i in arr:
if i < pivot:
less.append(i)
elif i > pivot:
more.append(i)
else:
pivotList.append(i)
less = quickSort(less)
more = quickSort(more)
return less + pivotList + more
if __name__ == "__main__":
arr=[int(x) for x in input("Enter the array elements : ").split()]
a = quickSort(arr)
print(a)
| mit | Python | |
592f9901f9125534f59efc2cb36bb4fb2bab351e | Fix typo (#8754) | jnewland/home-assistant,persandstrom/home-assistant,nugget/home-assistant,LinuxChristian/home-assistant,fbradyirl/home-assistant,GenericStudent/home-assistant,tchellomello/home-assistant,nugget/home-assistant,mKeRix/home-assistant,persandstrom/home-assistant,mezz64/home-assistant,mKeRix/home-assistant,DavidLP/home-assistant,tboyce1/home-assistant,jawilson/home-assistant,balloob/home-assistant,tboyce021/home-assistant,stefan-jonasson/home-assistant,tboyce1/home-assistant,stefan-jonasson/home-assistant,ct-23/home-assistant,aequitas/home-assistant,HydrelioxGitHub/home-assistant,mezz64/home-assistant,jamespcole/home-assistant,kennedyshead/home-assistant,soldag/home-assistant,jabesq/home-assistant,leppa/home-assistant,nkgilley/home-assistant,leppa/home-assistant,jawilson/home-assistant,nugget/home-assistant,partofthething/home-assistant,toddeye/home-assistant,kennedyshead/home-assistant,joopert/home-assistant,PetePriority/home-assistant,Cinntax/home-assistant,tchellomello/home-assistant,jamespcole/home-assistant,robbiet480/home-assistant,adrienbrault/home-assistant,turbokongen/home-assistant,HydrelioxGitHub/home-assistant,jnewland/home-assistant,auduny/home-assistant,sander76/home-assistant,tboyce1/home-assistant,toddeye/home-assistant,molobrakos/home-assistant,Teagan42/home-assistant,DavidLP/home-assistant,aronsky/home-assistant,MungoRae/home-assistant,soldag/home-assistant,rohitranjan1991/home-assistant,ct-23/home-assistant,ewandor/home-assistant,jabesq/home-assistant,stefan-jonasson/home-assistant,titilambert/home-assistant,balloob/home-assistant,postlund/home-assistant,Teagan42/home-assistant,ewandor/home-assistant,molobrakos/home-assistant,auduny/home-assistant,LinuxChristian/home-assistant,jnewland/home-assistant,nkgilley/home-assistant,MungoRae/home-assistant,lukas-hetzenecker/home-assistant,sdague/home-assistant,robbiet480/home-assistant,tinloaf/home-assistant,pschmitt/home-assistant,fbradyirl/home-assistant,stefan-jonasson/home-assistant,MartinHjelmare/home-assistant,home-assistant/home-assistant,ct-23/home-assistant,ct-23/home-assistant,joopert/home-assistant,balloob/home-assistant,rohitranjan1991/home-assistant,PetePriority/home-assistant,adrienbrault/home-assistant,Danielhiversen/home-assistant,titilambert/home-assistant,aequitas/home-assistant,FreekingDean/home-assistant,pschmitt/home-assistant,ct-23/home-assistant,tboyce1/home-assistant,Danielhiversen/home-assistant,MungoRae/home-assistant,rohitranjan1991/home-assistant,auduny/home-assistant,FreekingDean/home-assistant,molobrakos/home-assistant,qedi-r/home-assistant,ewandor/home-assistant,MungoRae/home-assistant,w1ll1am23/home-assistant,fbradyirl/home-assistant,MartinHjelmare/home-assistant,aequitas/home-assistant,LinuxChristian/home-assistant,Cinntax/home-assistant,home-assistant/home-assistant,jamespcole/home-assistant,LinuxChristian/home-assistant,LinuxChristian/home-assistant,DavidLP/home-assistant,MartinHjelmare/home-assistant,persandstrom/home-assistant,mKeRix/home-assistant,GenericStudent/home-assistant,w1ll1am23/home-assistant,mKeRix/home-assistant,sdague/home-assistant,HydrelioxGitHub/home-assistant,tinloaf/home-assistant,partofthething/home-assistant,PetePriority/home-assistant,postlund/home-assistant,sander76/home-assistant,MungoRae/home-assistant,tinloaf/home-assistant,jabesq/home-assistant,lukas-hetzenecker/home-assistant,tboyce021/home-assistant,aronsky/home-assistant,qedi-r/home-assistant,turbokongen/home-assistant | homeassistant/scripts/__init__.py | homeassistant/scripts/__init__.py | """Home Assistant command line scripts."""
import argparse
import importlib
import logging
import os
import sys
from typing import List
from homeassistant.bootstrap import mount_local_lib_path
from homeassistant.config import get_default_config_dir
from homeassistant.const import CONSTRAINT_FILE
from homeassistant.util.package import (
install_package, running_under_virtualenv)
def run(args: List) -> int:
"""Run a script."""
scripts = []
path = os.path.dirname(__file__)
for fil in os.listdir(path):
if fil == '__pycache__':
continue
elif os.path.isdir(os.path.join(path, fil)):
scripts.append(fil)
elif fil != '__init__.py' and fil.endswith('.py'):
scripts.append(fil[:-3])
if not args:
print('Please specify a script to run.')
print('Available scripts:', ', '.join(scripts))
return 1
if args[0] not in scripts:
print('Invalid script specified.')
print('Available scripts:', ', '.join(scripts))
return 1
script = importlib.import_module('homeassistant.scripts.' + args[0])
config_dir = extract_config_dir()
deps_dir = mount_local_lib_path(config_dir)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
for req in getattr(script, 'REQUIREMENTS', []):
if running_under_virtualenv():
returncode = install_package(req, constraints=os.path.join(
os.path.dirname(__file__), os.pardir, CONSTRAINT_FILE))
else:
returncode = install_package(
req, target=deps_dir, constraints=os.path.join(
os.path.dirname(__file__), os.pardir, CONSTRAINT_FILE))
if not returncode:
print('Aborting script, could not install dependency', req)
return 1
return script.run(args[1:]) # type: ignore
def extract_config_dir(args=None) -> str:
"""Extract the config dir from the arguments or get the default."""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('-c', '--config', default=None)
args = parser.parse_known_args(args)[0]
return (os.path.join(os.getcwd(), args.config) if args.config
else get_default_config_dir())
| """Home Assistant command line scripts."""
import argparse
import importlib
import logging
import os
import sys
from typing import List
from homeassistant.bootstrap import mount_local_lib_path
from homeassistant.config import get_default_config_dir
from homeassistant.const import CONSTRAINT_FILE
from homeassistant.util.package import (
install_package, running_under_virtualenv)
def run(args: List) -> int:
"""Run a script."""
scripts = []
path = os.path.dirname(__file__)
for fil in os.listdir(path):
if fil == '__pycache__':
continue
elif os.path.isdir(os.path.join(path, fil)):
scripts.append(fil)
elif fil != '__init__.py' and fil.endswith('.py'):
scripts.append(fil[:-3])
if not args:
print('Please specify a script to run.')
print('Available scripts:', ', '.join(scripts))
return 1
if args[0] not in scripts:
print('Invalid script specified.')
print('Available scripts:', ', '.join(scripts))
return 1
script = importlib.import_module('homeassistant.scripts.' + args[0])
config_dir = extract_config_dir()
deps_dir = mount_local_lib_path(config_dir)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
for req in getattr(script, 'REQUIREMENTS', []):
if running_under_virtualenv():
returncode = install_package(req, constraints=os.path.join(
os.path.dirname(__file__), os.pardir, CONSTRAINT_FILE))
else:
returncode = install_package(
req, target=deps_dir, constraints=os.path.join(
os.path.dirname(__file__), os.pardir, CONSTRAINT_FILE))
if not returncode:
print('Aborting scipt, could not install dependency', req)
return 1
return script.run(args[1:]) # type: ignore
def extract_config_dir(args=None) -> str:
"""Extract the config dir from the arguments or get the default."""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('-c', '--config', default=None)
args = parser.parse_known_args(args)[0]
return (os.path.join(os.getcwd(), args.config) if args.config
else get_default_config_dir())
| apache-2.0 | Python |
0aaed7764d743afe46af503fe5938fa718fe3abc | Set up contextmanager for db cals | nathanljustin/teamwork-analysis,nathanljustin/teamwork-analysis,nathanljustin/teamwork-analysis,nathanljustin/teamwork-analysis | teamworkApp/lib/dbCalls.py | teamworkApp/lib/dbCalls.py | # muddersOnRails()
# Sara McAllister November 5, 2-17
# Last updated: 11-5-2017
# library for SQLite database calls for teamwork analysis app
import contextlib
import sqlite3
DB = 'db/development.sqlite3'
def connect(sqlite_file):
""" Make connection to an SQLite database file """
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
return conn, c
def close(conn):
""" Commit changes and close connection to the database """
conn.commit()
conn.close()
@contextlib.contextmanager
def dbconnect(sqlite_file=DB):
conn, cursor = connect(sqlite_file)
try:
yield cursor
finally:
close(conn)
def getAllStyles():
"""Get all style entries in db ordered based on entry in db"""
with dbconnect() as cursor:
scores = cursor.execute('SELECT * FROM styles').fetchall()
return scores | mit | Python | |
6e5074cf969f8667e633ab2fa3373e83402e7610 | Add DigitalOcean | jpaugh/agithub,mozilla/agithub | agithub/DigitalOcean.py | agithub/DigitalOcean.py | # Copyright 2012-2016 Jonathan Paugh and contributors
# See COPYING for license details
from base import *
class DigitalOcean(API):
'''
Digital Ocean API
'''
def __init__(self, token=None, *args, **kwargs):
props = ConnectionProperties(
api_url = 'api.digitalocean.com',
url_prefix = '/v2',
secure_http = True,
extra_headers = {
'authorization' : self.generateAuthHeader(token)
})
self.setClient(Client(*args, **kwargs))
self.setConnectionProperties(props)
def generateAuthHeader(self, token):
if token is not None:
return "Bearer " + token
return None
| mit | Python | |
18aeea496175cb73ccf0d9f164359f75f854b512 | add background_helper | DOAJ/doaj,DOAJ/doaj,DOAJ/doaj,DOAJ/doaj | portality/background_helper.py | portality/background_helper.py | """ collections of wrapper function for helping you to create BackgroundTask
"""
from typing import Callable, Type
from portality import models
from portality.background import BackgroundApi, BackgroundTask
from portality.core import app
def execute_by_job_id(job_id,
task_factory: Callable[[models.BackgroundJob], BackgroundTask]):
""" Common way to execute BackgroundTask by job_id
"""
job = models.BackgroundJob.pull(job_id)
task = task_factory(job)
BackgroundApi.execute(task)
def submit_task_basic(background_task: Type[BackgroundTask]):
""" Common way to submit task by BackgroundTask Class
"""
user = app.config.get("SYSTEM_USERNAME")
job = background_task.prepare(user)
background_task.submit(job)
def create_job(username, action):
""" Common way to create BackgroundJob
"""
job = models.BackgroundJob()
job.user = username
job.action = action
return job
| apache-2.0 | Python | |
caef73c6d7d9853e32f5a75b321f515f3c138b6d | Create nzbget.py | Comandarr/Comandarr | comandarr/nzbget.py | comandarr/nzbget.py | apache-2.0 | Python | ||
853972cac73c3837c37a5682c2057a0aab500961 | Add tests for VAE framework | lisa-lab/pylearn2,nouiz/pylearn2,CIFASIS/pylearn2,msingh172/pylearn2,JesseLivezey/pylearn2,fyffyt/pylearn2,hyqneuron/pylearn2-maxsom,CIFASIS/pylearn2,abergeron/pylearn2,lunyang/pylearn2,TNick/pylearn2,kastnerkyle/pylearn2,JesseLivezey/pylearn2,goodfeli/pylearn2,goodfeli/pylearn2,woozzu/pylearn2,se4u/pylearn2,lancezlin/pylearn2,kose-y/pylearn2,fyffyt/pylearn2,woozzu/pylearn2,w1kke/pylearn2,chrish42/pylearn,sandeepkbhat/pylearn2,pombredanne/pylearn2,chrish42/pylearn,JesseLivezey/plankton,lamblin/pylearn2,fulmicoton/pylearn2,Refefer/pylearn2,TNick/pylearn2,junbochen/pylearn2,matrogers/pylearn2,cosmoharrigan/pylearn2,mclaughlin6464/pylearn2,JesseLivezey/plankton,jeremyfix/pylearn2,jamessergeant/pylearn2,mclaughlin6464/pylearn2,fyffyt/pylearn2,kastnerkyle/pylearn2,mkraemer67/pylearn2,fulmicoton/pylearn2,lamblin/pylearn2,JesseLivezey/pylearn2,Refefer/pylearn2,mclaughlin6464/pylearn2,alexjc/pylearn2,mclaughlin6464/pylearn2,w1kke/pylearn2,lisa-lab/pylearn2,ddboline/pylearn2,lamblin/pylearn2,goodfeli/pylearn2,caidongyun/pylearn2,lamblin/pylearn2,msingh172/pylearn2,theoryno3/pylearn2,pombredanne/pylearn2,hyqneuron/pylearn2-maxsom,theoryno3/pylearn2,chrish42/pylearn,se4u/pylearn2,hyqneuron/pylearn2-maxsom,pombredanne/pylearn2,pkainz/pylearn2,bartvm/pylearn2,caidongyun/pylearn2,fishcorn/pylearn2,TNick/pylearn2,TNick/pylearn2,se4u/pylearn2,fishcorn/pylearn2,hantek/pylearn2,hantek/pylearn2,ddboline/pylearn2,ashhher3/pylearn2,bartvm/pylearn2,pombredanne/pylearn2,aalmah/pylearn2,sandeepkbhat/pylearn2,jamessergeant/pylearn2,jeremyfix/pylearn2,fishcorn/pylearn2,fulmicoton/pylearn2,pkainz/pylearn2,junbochen/pylearn2,jamessergeant/pylearn2,kose-y/pylearn2,aalmah/pylearn2,w1kke/pylearn2,matrogers/pylearn2,sandeepkbhat/pylearn2,JesseLivezey/plankton,fyffyt/pylearn2,ddboline/pylearn2,hantek/pylearn2,theoryno3/pylearn2,CIFASIS/pylearn2,msingh172/pylearn2,alexjc/pylearn2,bartvm/pylearn2,junbochen/pylearn2,matrogers/pylearn2,mkraemer67/pylearn2,hantek/pylearn2,alexjc/pylearn2,lunyang/pylearn2,daemonmaker/pylearn2,abergeron/pylearn2,daemonmaker/pylearn2,hyqneuron/pylearn2-maxsom,se4u/pylearn2,fishcorn/pylearn2,lunyang/pylearn2,Refefer/pylearn2,msingh172/pylearn2,cosmoharrigan/pylearn2,caidongyun/pylearn2,daemonmaker/pylearn2,kastnerkyle/pylearn2,lisa-lab/pylearn2,lancezlin/pylearn2,aalmah/pylearn2,bartvm/pylearn2,lancezlin/pylearn2,chrish42/pylearn,w1kke/pylearn2,mkraemer67/pylearn2,pkainz/pylearn2,mkraemer67/pylearn2,kastnerkyle/pylearn2,JesseLivezey/plankton,nouiz/pylearn2,ashhher3/pylearn2,woozzu/pylearn2,caidongyun/pylearn2,abergeron/pylearn2,alexjc/pylearn2,nouiz/pylearn2,ashhher3/pylearn2,jeremyfix/pylearn2,sandeepkbhat/pylearn2,theoryno3/pylearn2,jeremyfix/pylearn2,nouiz/pylearn2,CIFASIS/pylearn2,lunyang/pylearn2,JesseLivezey/pylearn2,ashhher3/pylearn2,jamessergeant/pylearn2,lisa-lab/pylearn2,kose-y/pylearn2,fulmicoton/pylearn2,lancezlin/pylearn2,abergeron/pylearn2,daemonmaker/pylearn2,pkainz/pylearn2,Refefer/pylearn2,aalmah/pylearn2,cosmoharrigan/pylearn2,cosmoharrigan/pylearn2,woozzu/pylearn2,goodfeli/pylearn2,matrogers/pylearn2,junbochen/pylearn2,kose-y/pylearn2,ddboline/pylearn2 | pylearn2/models/tests/test_vae.py | pylearn2/models/tests/test_vae.py | import numpy
import theano
import theano.tensor as T
from pylearn2.models.mlp import MLP
from pylearn2.models.mlp import Linear, ConvRectifiedLinear
from pylearn2.models.vae import VAE
from pylearn2.models.vae.visible import BinaryVisible
from pylearn2.models.vae.latent import DiagonalGaussianPrior
from pylearn2.space import Conv2DSpace
def test_one_sample_allowed():
"""
VAE allows one sample per data point
"""
encoding_model = MLP(nvis=10, layers=[Linear(layer_name='h', dim=10,
irange=0.01)])
decoding_model = MLP(nvis=5, layers=[Linear(layer_name='h', dim=10,
irange=0.01)])
visible = BinaryVisible(decoding_model=decoding_model)
latent = DiagonalGaussianPrior(encoding_model=encoding_model,
num_samples=1)
vae = VAE(nvis=10, visible=visible, latent=latent, nhid=5)
X = T.matrix('X')
lower_bound = vae.log_likelihood_lower_bound(X)
f = theano.function(inputs=[X], outputs=lower_bound)
f(numpy.random.uniform(size=(10, 10)))
def test_multiple_samples_allowed():
"""
VAE allows multiple samples per data point
"""
encoding_model = MLP(nvis=10, layers=[Linear(layer_name='h', dim=10,
irange=0.01)])
decoding_model = MLP(nvis=5, layers=[Linear(layer_name='h', dim=10,
irange=0.01)])
visible = BinaryVisible(decoding_model=decoding_model)
latent = DiagonalGaussianPrior(encoding_model=encoding_model,
num_samples=10)
vae = VAE(nvis=10, visible=visible, latent=latent, nhid=5)
X = T.matrix('X')
lower_bound = vae.log_likelihood_lower_bound(X)
f = theano.function(inputs=[X], outputs=lower_bound)
f(numpy.random.uniform(size=(10, 10)))
def test_convolutional_compatible():
"""
VAE allows convolutional encoding networks
"""
encoding_model = MLP(
input_space=Conv2DSpace(shape=[4, 4], num_channels=1),
layers=[ConvRectifiedLinear(
layer_name='h',
output_channels=2,
kernel_shape=[2, 2],
kernel_stride=[1, 1],
pool_shape=[1, 1],
pool_stride=[1, 1],
pool_type='max',
irange=0.01
)]
)
decoding_model = MLP(nvis=5, layers=[Linear(layer_name='h', dim=16,
irange=0.01)])
visible = BinaryVisible(decoding_model=decoding_model)
latent = DiagonalGaussianPrior(encoding_model=encoding_model,
num_samples=10)
vae = VAE(nvis=16, visible=visible, latent=latent, nhid=5)
X = T.matrix('X')
lower_bound = vae.log_likelihood_lower_bound(X)
f = theano.function(inputs=[X], outputs=lower_bound)
f(numpy.random.uniform(size=(10, 16)))
| bsd-3-clause | Python | |
bf1e9434afc03a21cab5b274401a755c3b84196c | add event.brochure migration | Ircam-Web/mezzanine-organization,Ircam-Web/mezzanine-organization | migrations/mezzanine_agenda/0014_event_brochure.py | migrations/mezzanine_agenda/0014_event_brochure.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-05-10 13:56
from __future__ import unicode_literals
from django.db import migrations
import mezzanine.core.fields
class Migration(migrations.Migration):
dependencies = [
('mezzanine_agenda', '0013_auto_20160510_1542'),
]
operations = [
migrations.AddField(
model_name='event',
name='brochure',
field=mezzanine.core.fields.FileField(blank=True, max_length=1024, verbose_name='brochure'),
),
]
| agpl-3.0 | Python | |
fa95e31128e7dcfbbbbab5f48675536ba1b5ebf2 | Add amazon/ec2_instance_status_checks | sailthru/ansible-oss,sailthru/ansible-oss | modules/cloud/amazon/ec2_instance_status_checks.py | modules/cloud/amazon/ec2_instance_status_checks.py | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
module: ec2_instance_status_checks
short description: Perform instance status checks with wait option
description:
- Perform instance status checks
- Wait for x seconds for the status to be in 'ok' state
- Check for system status and / or instance status
- Returns list of instances that passed and failed the checks
options:
id:
description:
- list of instance ID's to check
required: True
wait_timeout:
description:
- Amount of seconds to wait for instance status to chage to 'ok' state
required: False
default: 0
system_status:
description:
- Perform system_status check
required: False
default: True
instance_status:
description:
- Perform instance_status check
required: False
default: True
fail:
description:
- Fail if any instances checks did not pass
required: False
default: False
author: "Taras Lipatov tlipatov@sailthru.com"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Get instance status
- ec2_instance_status_check
id:
- i-123456
# Wait for both instnce checks to be in 'ok' state for 300 seconds
- ec2_instance_status_check
id:
- i-123456
wait_timeout: 300
# Wait 300 seconds for system status check to be in 'ok' state and fail if it is not
# after the wait_timeout is exceeded
- ec2_instance_status_check
id:
- i-123456
wait_timeout: 300
instance_status: False
system_status: True
fail: True
'''
import boto.ec2
from boto.exception import BotoServerError
# import json
import time
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
def get_status(instance):
status = {}
system_status = instance.system_status.status
instance_status = instance.instance_status.status
instance_id = instance.id
status = {
'id': instance_id,
'instance_status': instance_status,
'system_status': system_status
}
return status
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
id = dict(type='list', default=[]),
wait_timeout = dict(type='int', default=0),
system_status = dict(type='bool', default=True),
instance_status = dict(type='bool', default=True),
fail = dict(type='bool', default=False)
)
)
module = AnsibleModule(
argument_spec=argument_spec
)
id = module.params.get('id')
wait_timeout = module.params.get('wait_timeout')
filters = module.params.get("filters")
system_status_check = module.params.get("system_status")
instance_status_check = module.params.get("instance_status")
fail = module.params.get("fail")
try:
ec2_conn = ec2_connect(module)
except BotoServerError as e:
module.fail_json(msg=e.message)
status = {}
status['passed'] = []
status['failed'] = []
timeout = time.time() + wait_timeout
while True:
if len(id) == 0:
break
try:
instances = ec2_conn.get_all_instance_status(instance_ids=id, filters=filters)
except BotoServerError as e:
module.fail_json(msg=e.message)
for i in instances[:]:
instance_status = get_status(i)
if system_status_check and instance_status_check:
if instance_status['system_status'] == 'ok' and instance_status['instance_status'] == 'ok':
status['passed'].append(instance_status)
id.remove(i.id)
instances.remove(i)
if system_status_check and not instance_status_check:
if instance_status['system_status'] == 'ok':
status['passed'].append(instance_status)
id.remove(i.id)
instances.remove(i)
if not system_status_check and instance_status_check:
if instance_status['instance_status'] == 'ok':
status['passed'].append(instance_status)
id.remove(i.id)
instances.remove(i)
# test = 0
if time.time() >= timeout:
break
# test = test - 1
# Sleep for 1 second so we dont hammer the AWS API
time.sleep(1)
for i in instances[:]:
instance_status = get_status(i)
status['failed'].append(instance_status)
if len(instances) > 0 and fail is True:
module.fail_json(msg="Timeout when waiting for instance status checks", status=status)
else:
module.exit_json(status=status)
if __name__ == '__main__':
main() | apache-2.0 | Python | |
cfa474b489bc02307cc4944d30fbb34b57f843f6 | Add base connection tests | tych0/pylxd,lxc/pylxd,ivuk/pylxd,Saviq/pylxd,javacruft/pylxd,lxc/pylxd | pylxd/tests/test_connection.py | pylxd/tests/test_connection.py | # Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ddt import ddt
import mock
from six.moves import http_client
import socket
import unittest
from pylxd import connection
from pylxd.tests import annotated_data
@ddt
class LXDInitConnectionTest(unittest.TestCase):
@mock.patch('socket.socket')
@mock.patch.object(http_client.HTTPConnection, '__init__')
def test_http_connection(self, mc, ms):
conn = connection.UnixHTTPConnection('/', 'host', 1234)
mc.assert_called_once_with(
conn, 'host', port=1234, strict=None, timeout=None)
conn.connect()
ms.assert_called_once_with(socket.AF_UNIX, socket.SOCK_STREAM)
ms.return_value.connect.assert_called_once_with('/')
@mock.patch('os.environ', {'HOME': '/home/foo'})
@mock.patch('ssl.wrap_socket')
@mock.patch('socket.create_connection')
def test_https_connection(self, ms, ml):
conn = connection.HTTPSConnection('host', 1234)
with mock.patch.object(conn, '_tunnel') as mc:
conn.connect()
self.assertFalse(mc.called)
ms.assert_called_once_with(
('host', 1234), socket._GLOBAL_DEFAULT_TIMEOUT, None)
ml.assert_called_once_with(
ms.return_value,
certfile='/home/foo/.config/lxc/client.crt',
keyfile='/home/foo/.config/lxc/client.key'
)
@mock.patch('os.environ', {'HOME': '/home/foo'})
@mock.patch('ssl.wrap_socket')
@mock.patch('socket.create_connection')
def test_https_proxy_connection(self, ms, ml):
conn = connection.HTTPSConnection('host', 1234)
conn._tunnel_host = 'host'
with mock.patch.object(conn, '_tunnel') as mc:
conn.connect()
mc.assert_called()
ms.assert_called_once_with(
('host', 1234), socket._GLOBAL_DEFAULT_TIMEOUT, None)
ml.assert_called_once_with(
ms.return_value,
certfile='/home/foo/.config/lxc/client.crt',
keyfile='/home/foo/.config/lxc/client.key'
)
@mock.patch('pylxd.connection.HTTPSConnection')
@mock.patch('pylxd.connection.UnixHTTPConnection')
@annotated_data(
('unix', (None,), {}, '/var/lib/lxd/unix.socket'),
('unix_path', (None,),
{'LXD_DIR': '/fake/'}, '/fake/unix.socket'),
('https', ('host',), {}, ''),
('https_port', ('host', 1234), {}, ''),
)
def test_get_connection(self, mode, args, env, path, mc, ms):
with mock.patch('os.environ', env):
conn = connection.LXDConnection(*args).get_connection()
if mode.startswith('unix'):
self.assertEqual(mc.return_value, conn)
mc.assert_called_once_with(path)
elif mode.startswith('https'):
self.assertEqual(ms.return_value, conn)
ms.assert_called_once_with(
args[0], len(args) == 2 and args[1] or 8443)
| apache-2.0 | Python | |
d0afa54fa01a2981c10f0bbdbc0f5eab4b5ad710 | test that test fullscreen/resize with 3d actions | google-code-export/los-cocos,google-code-export/los-cocos | test/test_3d_fullscreen.py | test/test_3d_fullscreen.py | # This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
import pyglet
import cocos
from cocos.director import director
from cocos.actions import *
from cocos.layer import *
class BackgroundLayer( cocos.layer.Layer ):
def __init__(self):
super( BackgroundLayer, self ).__init__()
self.img = pyglet.resource.image('background_image.png')
def on_draw( self ):
self.img.blit(0,0)
def toggle_fullscreen():
director.window.set_fullscreen( not director.window.fullscreen )
if __name__ == "__main__":
director.init( resizable=True )
director.set_depth_test()
main_scene = cocos.scene.Scene()
main_scene.add( BackgroundLayer(), z=0 )
# set a 3d grid with a grid3d action
e = WavesTiles3D( amplitude=60, waves=2, grid=(32,24), duration=3)
f = ShuffleTiles( duration=3, grid=(32,24) )
main_scene.do( e + \
CallFunc( toggle_fullscreen ) + \
Reverse(e) + \
CallFunc(toggle_fullscreen) + \
f + \
CallFunc(toggle_fullscreen) + \
Reverse(f) + \
StopGrid() \
)
director.run (main_scene)
| bsd-3-clause | Python | |
7b4a23758e40dbbd131df930b617a5c08659cca9 | Create app.py | msmexplorer/msmexplorer-d3,msmexplorer/msmexplorer-d3,cxhernandez/msmexplorer-d3,cxhernandez/msmexplorer-d3,msmexplorer/msmexplorer-d3,cxhernandez/msmexplorer-d3 | app.py | app.py | #!/Users/cu3alibre/anaconda/bin/python
import os, optparse, uuid, urlparse, time
from threading import Lock
from urllib import urlencode
from pymongo import Connection
import tornado.ioloop
from tornado.web import (RequestHandler, StaticFileHandler, Application,asynchronous)
from tornado.websocket import WebSocketHandler
from tornado.httpclient import AsyncHTTPClient
__UPLOADS__ = "./public/uploads/"
HTTP_CLIENT = AsyncHTTPClient()
def urldecode(s):
return dict(urlparse.parse_qsl(s))
def connect_to_mongo():
if 'MONGOHQ_URL' in os.environ:
c = Connection(os.environ['MONGOHQ_URL'])
else:
print "if youre developing locally, you ned to get the MONGOHQ_URL"
print 'env variable. run "heroku config" at the command line and'
print 'it should give you the right string'
c = Connection()
return c.app14240963
DATABASE = connect_to_mongo()
print DATABASE.collection_names()
def parse_cmdln():
parser=optparse.OptionParser()
parser.add_option('-p','--port',dest='port',type='int', default=8000)
(options, args) = parser.parse_args()
return (options, args)
class Session(object):
"""REALLLY CRAPPY SESSIONS FOR TORNADO VIA MONGODB
"""
collection = DATABASE.sessions
# mongo db database
def __init__(self, request):
data = {
'ip_address': request.remote_ip,
'user_agent': request.headers.get('User-Agent')
}
result = self.collection.find_one(data)
if result is None:
# create new data
self.collection.insert(data)
self.data = data
else:
self.data = result
def get(self, attr, default=None):
return self.data.get(attr, default)
def put(self, attr, value):
self.collection.remove(self.data)
self.data[attr] = value
self.collection.insert(self.data)
def __repr__(self):
return str(self.data)
class RunHandler(RequestHandler):
# how often should we allow execution
max_request_frequency = 10 # seconds
def log(self, msg):
print msg
def get(self):
if self.validate_request_frequency():
request_id = str(uuid.uuid4())
HTTP_CLIENT.fetch('localhost', method='POST', callback=self.log)
self.write()
def validate_request_frequency(self):
"""Check that the user isn't requesting to run too often"""
session = Session(self.request)
last_run = session.get('last_run')
if last_run is not None:
if (time.time() - last_run) < self.max_request_frequency:
self.write("You're being a little too eager, no?")
return False
session.put('last_run', time.time())
return True
class IndexHandler(StaticFileHandler):
def get(self):
session = Session(self.request)
session.put('indexcounts', session.get('indexcounts', 0) + 1)
return super(IndexHandler, self).get('index.html')
class UploadHandler(tornado.web.RequestHandler):
def post(self):
fileinfo = self.request.files['filearg'][0]
fname = fileinfo['filename']
extn = os.path.splitext(fname)[1]
cname = str(uuid.uuid4()) + extn
fh = open(__UPLOADS__ + cname, 'w')
fh.write(fileinfo['body'])
self.finish(cname + " has uploaded. Check %s folder" %__UPLOADS__)
application = tornado.web.Application([
(r'/run', RunHandler),
(r"/upload", UploadHandler),
(r'/', IndexHandler, {'path': 'public'}),
(r'/js/(.*)', StaticFileHandler, {'path': 'public/js'}),
(r'/css/(.*)', StaticFileHandler, {'path': 'public/css'}),
(r'/assets/(.*)', StaticFileHandler, {'path': 'public/assets'}),
(r'/uploads/(.*)', StaticFileHandler, {'path': 'public/uploads'}),
], debug=True)
if __name__ == "__main__":
(options,args)=parse_cmdln()
port = int(os.environ.get('PORT', options.port))
application.listen(port)
print "MSMExplorer is starting on port %s" % options.port
tornado.ioloop.IOLoop.instance().start()
| mit | Python | |
126af71599e14866501e2e9f479b2658fff56526 | Create ipc_lista2.06.py | any1m1c/ipc20161 | lista2/ipc_lista2.06.py | lista2/ipc_lista2.06.py | apache-2.0 | Python | ||
dac5f9e406f3c205d6ed212d4414ca55c94b8f15 | Add test for exact search with album query | pacificIT/mopidy,hkariti/mopidy,bencevans/mopidy,jmarsik/mopidy,swak/mopidy,swak/mopidy,diandiankan/mopidy,vrs01/mopidy,dbrgn/mopidy,jodal/mopidy,tkem/mopidy,quartz55/mopidy,hkariti/mopidy,ali/mopidy,swak/mopidy,mokieyue/mopidy,mokieyue/mopidy,swak/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,tkem/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,bacontext/mopidy,dbrgn/mopidy,kingosticks/mopidy,hkariti/mopidy,pacificIT/mopidy,rawdlite/mopidy,jcass77/mopidy,jodal/mopidy,kingosticks/mopidy,quartz55/mopidy,pacificIT/mopidy,jcass77/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,ZenithDK/mopidy,mokieyue/mopidy,bencevans/mopidy,mopidy/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,adamcik/mopidy,vrs01/mopidy,jodal/mopidy,ali/mopidy,vrs01/mopidy,ali/mopidy,mopidy/mopidy,quartz55/mopidy,rawdlite/mopidy,bacontext/mopidy,bacontext/mopidy,diandiankan/mopidy,jmarsik/mopidy,dbrgn/mopidy,tkem/mopidy,jmarsik/mopidy,pacificIT/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,bacontext/mopidy,diandiankan/mopidy,adamcik/mopidy,quartz55/mopidy,tkem/mopidy,vrs01/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,rawdlite/mopidy,ali/mopidy,hkariti/mopidy,jmarsik/mopidy,bencevans/mopidy,ZenithDK/mopidy,jcass77/mopidy,mopidy/mopidy,rawdlite/mopidy,glogiotatidis/mopidy,bencevans/mopidy,adamcik/mopidy | tests/local/test_search.py | tests/local/test_search.py | from __future__ import unicode_literals
import unittest
from mopidy.local import search
from mopidy.models import Album, Track
class LocalLibrarySearchTest(unittest.TestCase):
def test_find_exact_with_album_query(self):
expected_tracks = [Track(album=Album(name='foo'))]
tracks = [Track(), Track(album=Album(name='bar'))] + expected_tracks
search_result = search.find_exact(tracks, {'album': ['foo']})
self.assertEqual(search_result.tracks, tuple(expected_tracks))
| apache-2.0 | Python | |
a99e92756a10529ca1e52d1d351bc43fea067b35 | Add fabfile for API | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | packaging/radar-api/fabfile.py | packaging/radar-api/fabfile.py | import os
import re
from fabric.api import task, put, run, cd
@task
def deploy(archive=None, name='radar-api'):
if archive is None:
# Use the latest archive by default
archive = sorted(x for x in os.listdir('.') if x.endswith('.tar.gz'))[-1]
version = re.search('-([^-]+-[^-]+)\.tar\.gz$', archive).group(1)
tmp_archive_path = '/tmp/%s.tar.gz' % name
put(archive, tmp_archive_path)
tmp_path = '/tmp/%s' % name
run('rm -rf {path} && mkdir -p {path}'.format(path=tmp_path))
current_version = '/srv/{name}/current'.format(name=name)
new_version = '/srv/{name}/{version}'.format(name=name, version=version)
with cd(tmp_path):
run('tar --strip-components=1 -xzf %s' % tmp_archive_path)
run('./install.sh %s' % new_version)
run('ln -sf %s %s' % (new_version, current_version))
run('rm -rf %s' % tmp_archive_path)
| agpl-3.0 | Python | |
246ed2ba33f21b696af2a00793a521bc77da2a45 | add excel-sheet-column-title | zeyuanxy/leet-code,zeyuanxy/leet-code,EdisonAlgorithms/LeetCode,EdisonAlgorithms/LeetCode,EdisonAlgorithms/LeetCode,zeyuanxy/leet-code | vol4/excel-sheet-column-title/excel-sheet-column-title.py | vol4/excel-sheet-column-title/excel-sheet-column-title.py | import string
class Solution(object):
def convertToTitle(self, n):
"""
:type n: int
:rtype: str
"""
alphabet = string.uppercase
ret = ''
while n > 0:
ret = alphabet[(n - 1) % 26] + ret
n = (n - 1) / 26
return ret
| mit | Python | |
2674b886b786086ec62a18b953e80ec6fceaa59d | Bump subminor version (2.0.5 -> 2.0.6) | inklesspen/endpoints-python,inklesspen/endpoints-python,cloudendpoints/endpoints-python,cloudendpoints/endpoints-python | endpoints/__init__.py | endpoints/__init__.py | #!/usr/bin/python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Endpoints module."""
# pylint: disable=wildcard-import
from api_config import api
from api_config import API_EXPLORER_CLIENT_ID
from api_config import AUTH_LEVEL
from api_config import EMAIL_SCOPE
from api_config import Issuer
from api_config import method
from api_exceptions import *
from apiserving import *
from endpoints_dispatcher import *
import message_parser
from resource_container import ResourceContainer
from users_id_token import get_current_user
from users_id_token import InvalidGetUserCall
from users_id_token import SKIP_CLIENT_ID_CHECK
__version__ = '2.0.6'
| #!/usr/bin/python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Endpoints module."""
# pylint: disable=wildcard-import
from api_config import api
from api_config import API_EXPLORER_CLIENT_ID
from api_config import AUTH_LEVEL
from api_config import EMAIL_SCOPE
from api_config import Issuer
from api_config import method
from api_exceptions import *
from apiserving import *
from endpoints_dispatcher import *
import message_parser
from resource_container import ResourceContainer
from users_id_token import get_current_user
from users_id_token import InvalidGetUserCall
from users_id_token import SKIP_CLIENT_ID_CHECK
__version__ = '2.0.5'
| apache-2.0 | Python |
0ca298f6706706637dccd4f27c56eed6e91c98ba | Rename new test class correctly and flesh out first passing tests | mattrobenolt/invoke,pyinvoke/invoke,mattrobenolt/invoke,pyinvoke/invoke,kejbaly2/invoke,frol/invoke,tyewang/invoke,pfmoore/invoke,mkusz/invoke,mkusz/invoke,pfmoore/invoke,singingwolfboy/invoke,kejbaly2/invoke,frol/invoke | tests/runners.py | tests/runners.py | import sys
from spec import Spec, trap, eq_
from invoke import Local, Context
from _utils import mock_subprocess
class Local_(Spec):
class run:
@trap
@mock_subprocess(out="sup")
def out_stream_defaults_to_sys_stdout(self):
"out_stream defaults to sys.stdout"
Local(Context()).run("command")
eq_(sys.stdout.getvalue(), "sup")
@trap
@mock_subprocess(err="sup")
def err_stream_defaults_to_sys_stderr(self):
"err_stream defaults to sys.stderr"
Local(Context()).run("command")
eq_(sys.stderr.getvalue(), "sup")
def out_stream_can_be_overridden(self):
"out_stream can be overridden"
def err_stream_can_be_overridden(self):
"err_stream can be overridden"
| from spec import Spec
class Runner_(Spec):
class run:
def out_stream_defaults_to_sys_stdout(self):
"out_stream defaults to sys.stdout"
def err_stream_defaults_to_sys_stderr(self):
"err_stream defaults to sys.stderr"
def out_stream_can_be_overridden(self):
"out_stream can be overridden"
def err_stream_can_be_overridden(self):
"err_stream can be overridden"
| bsd-2-clause | Python |
f6bd8bcf45d182e3aa8edd3cf0fef5aa35125e31 | create ccc.py | piraaa/VideoDigitalWatermarking | src/ccc.py | src/ccc.py | #
# ccc.py
# Created by pira on 2017/07/28.
#
| mit | Python | |
0843eba3476809e833ea52611d9e193bf0872dbd | Add Back Builder Code | littleskunk/dataserv,F483/dataserv,Storj/dataserv | tools/Builder.py | tools/Builder.py | import os
import hashlib
import RandomIO
# config vars
my_address = "1CutsncbjcCtZKeRfvQ7bnYFVj28zeU6fo"
my_store_path = "C://Farm/"
my_shard_size = 1024*1024*128 # 128 MB
my_max_size = 1024*1024*640 # 640 MB
class Builder:
def __init__(self, address, shard_size, max_size):
self.address = address
self.shard_size = shard_size
self.max_size = max_size
@staticmethod
def sha256(content):
"""Finds the SHA-256 hash of the content."""
content = content.encode('utf-8')
return hashlib.sha256(content).hexdigest()
def build_seed(self, height):
"""Deterministically build a seed."""
seed = self.sha256(self.address)
for i in range(height):
seed = self.sha256(seed)
return seed
def generate_shard(self, seed, store_path, cleanup=False):
"""Save a shard, and return its SHA-256 hash."""
tmp_file = RandomIO.RandomIO(seed).read(self.shard_size) # temporarily generate file
file_hash = hashlib.sha256(tmp_file).hexdigest() # get SHA-256 hash
RandomIO.RandomIO(seed).genfile(self.shard_size, store_path+file_hash) # save the shard
if cleanup:
os.remove(store_path+file_hash)
return file_hash
def build(self, store_path, debug=False, cleanup=False):
"""Fill the farmer with data up to their max."""
for shard_num in range(int(self.max_size/self.shard_size)):
seed = self.build_seed(shard_num)
file_hash = self.generate_shard(seed, store_path, cleanup)
if debug:
print("Saving seed {0} with SHA-256 hash {1}.".format(seed, file_hash))
if __name__ == "__main__": # pragma: no cover
bucket = Builder(my_address, my_shard_size, my_max_size)
bucket.build(my_store_path, True)
| mit | Python | |
8e74d76a96b0b259bf7d8a4022fae8293749f37d | Add module uniprocessing.py | kolyat/chainsyn,kolyat/chainsyn | uniprocessing.py | uniprocessing.py | # Copyright (c) 2016 Kirill 'Kolyat' Kiselnikov
# This file is the part of chainsyn, released under modified MIT license
# See the file LICENSE.txt included in this distribution
"""
Module "uniprocessing" with patterns and universal processing function
process() - function that process one chain to another using given pattern
Valid patterns:
pattern_dna - to replicate another DNA chain
pattern_mrna - to transcript mRNA from DNA
pattern_dna_rev - to transcript DNA from mRNA (reverted transcription)
"""
pattern_dna = {
'A': 'T', # Adenine associates with thymine (A-T)
'T': 'A', # Thymine associates with adenine (T-A)
'C': 'G', # Cytosine associates with guanine (C-G)
'G': 'C' # Guanine associates with cytosine (G-C)
}
pattern_mrna = {
'A': 'U', # Adenine associates with uracil (A-U)
'T': 'A', # Thymine associates with adenine (T-A)
'C': 'G', # Cytosine associates with guanine (C-G)
'G': 'C' # Guanine associates with cytosine (G-C)
}
pattern_dna_rev = {
'A': 'T', # Adenine associates with thymine (A-T)
'U': 'A', # Uracil associates with adenine (U-A)
'C': 'G', # Cytosine associates with guanine (C-G)
'G': 'C' # Guanine associates with cytosine (G-C)
}
valid_patterns = (pattern_dna, pattern_mrna, pattern_dna_rev)
def process(chain, pattern):
"""
Function of universal processing
Arguments:
chain -- list of codons (strings) with nucleotides
pattern -- any pattern (dictionary) which is used to build another
chain
Returns list with codons (strings) of second chain
Raises an exception if fails:
- TypeError: when chain is not list, codon is not a string, pattern is
not a dictionary;
- ValueError: when chain or pattern is empty, number of nucleotides is
not equal to 3;
- KeyError: when codon contains not correct nucleotides or pattern is
not valid;
"""
# Check if input chain is correct type and not empty
if type(chain) != list:
raise TypeError('Input chain must be list of codons')
if not chain:
raise ValueError('Input chain is empty')
# Check if input pattern is correct type and valid
if type(pattern) != dict:
raise TypeError('Input pattern must be dictionary type')
if not pattern:
raise ValueError('Input pattern is empty')
if pattern not in valid_patterns:
raise KeyError('Input pattern is not valid')
# Check every codon of input chain
for c in range(len(chain)):
if type(chain[c]) != str:
raise TypeError('Error in codon ' + str(c+1) + ': codon must be '
'string, not ' + type(chain[c]))
if len(chain[c]) != 3:
raise ValueError('Error in codon ' + str(c+1) + ': number of '
'nucleotides equal to ' + str(len(chain[c])) +
', must be 3')
for n in range(len(chain[c])):
if chain[c][n].upper() not in valid_patterns:
raise KeyError('Error in codon ' + str(c+1) + ', nucleotide ' +
str(n+1) + ': unexpected nucleotide - ' +
chain[c][n])
# Process input chain
processed_chain = []
for c in chain:
codon = ''
for n in c:
codon += pattern[n.upper()]
processed_chain.append(codon)
return processed_chain
| mit | Python | |
8b7d1ffb2461e12b5cbce6873e51ca14f9d8cf90 | Revert "Accidentally deleted manage.py" | eSmelser/SnookR,eSmelser/SnookR,eSmelser/SnookR,eSmelser/SnookR | SnookR/manage.py | SnookR/manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "SnookR.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| mit | Python | |
202c8472298780cbf80bf3253550e4277236c0c9 | add simple noise fitting code | ColumbiaCMB/kid_readout,ColumbiaCMB/kid_readout | kid_readout/analysis/noise_fit.py | kid_readout/analysis/noise_fit.py | import numpy as np
import lmfit
from kid_readout.analysis import fitter
print "k"
def lorenz(f,fc,a):
return a/(1+(f/fc)**2)
def simple_noise_model(params,f):
A = params['A'].value
fc = params['fc'].value
nw = params['nw'].value
return lorenz(f,fc,A) + nw
def simple_noise_guess(f,S):
params = lmfit.Parameters()
params.add('A',(S.max()-S.min()),min=0,max=S.max())
params.add('nw',(S.max()+S.min())/2.0,min=S.min()/2,max=S.max())
params.add('fc',500.0,min=10,max=1e4)
return params
class SingleLorenzModel(fitter.Fitter):
def __init__(self, f, data, model=simple_noise_model, guess=simple_noise_guess, functions={},
mask=None, errors=None, weight_by_errors=True):
super(SingleLorenzModel,self).__init__(f,data,model=model,guess=guess,functions=functions,mask=mask,
errors=errors,weight_by_errors=weight_by_errors)
| bsd-2-clause | Python | |
5879e59f34e31707f207c588143711dbdf18ee8b | remove login_required for register page | gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin | app/main/views/index.py | app/main/views/index.py | from flask import render_template
from flask_login import login_required
from app.main import main
@main.route('/')
def index():
return render_template('signedout.html')
@main.route("/govuk")
def govuk():
return render_template('govuk_template.html')
@main.route("/register")
def register():
return render_template('register.html')
@main.route("/register-from-invite")
@login_required
def registerfrominvite():
return render_template('register-from-invite.html')
@main.route("/verify")
@login_required
def verify():
return render_template('verify.html')
@main.route("/verify-mobile")
@login_required
def verifymobile():
return render_template('verify-mobile.html')
@main.route("/text-not-received-2")
def textnotreceived2():
return render_template('text-not-received-2.html')
@main.route("/dashboard")
@login_required
def dashboard():
return render_template('dashboard.html')
@main.route("/add-service")
@login_required
def addservice():
return render_template('add-service.html')
@main.route("/two-factor")
@login_required
def twofactor():
return render_template('two-factor.html')
@main.route("/send-sms")
def sendsms():
return render_template('send-sms.html')
@main.route("/check-sms")
def checksms():
return render_template('check-sms.html')
@main.route("/email-not-received")
def emailnotreceived():
return render_template('email-not-received.html')
@main.route("/text-not-received")
def textnotreceived():
return render_template('text-not-received.html')
@main.route("/send-email")
def sendemail():
return render_template('send-email.html')
@main.route("/check-email")
def checkemail():
return render_template('check-email.html')
@main.route("/jobs")
def showjobs():
return render_template('jobs.html')
@main.route("/jobs/job")
def showjob():
return render_template('job.html')
@main.route("/jobs/job/notification")
def shownotification():
return render_template('notification.html')
@main.route("/forgot-password")
def forgotpassword():
return render_template('forgot-password.html')
@main.route("/new-password")
def newpassword():
return render_template('new-password.html')
@main.route("/user-profile")
def userprofile():
return render_template('user-profile.html')
@main.route("/manage-users")
def manageusers():
return render_template('manage-users.html')
@main.route("/service-settings")
def servicesettings():
return render_template('service-settings.html')
@main.route("/api-keys")
def apikeys():
return render_template('api-keys.html')
@main.route("/verification-not-received")
def verificationnotreceived():
return render_template('verification-not-received.html')
| from flask import render_template
from flask_login import login_required
from app.main import main
@main.route('/')
def index():
return render_template('signedout.html')
@main.route("/govuk")
def govuk():
return render_template('govuk_template.html')
@main.route("/register")
@login_required
def register():
return render_template('register.html')
@main.route("/register-from-invite")
@login_required
def registerfrominvite():
return render_template('register-from-invite.html')
@main.route("/verify")
@login_required
def verify():
return render_template('verify.html')
@main.route("/verify-mobile")
@login_required
def verifymobile():
return render_template('verify-mobile.html')
@main.route("/text-not-received-2")
def textnotreceived2():
return render_template('text-not-received-2.html')
@main.route("/dashboard")
@login_required
def dashboard():
return render_template('dashboard.html')
@main.route("/add-service")
@login_required
def addservice():
return render_template('add-service.html')
@main.route("/two-factor")
@login_required
def twofactor():
return render_template('two-factor.html')
@main.route("/send-sms")
def sendsms():
return render_template('send-sms.html')
@main.route("/check-sms")
def checksms():
return render_template('check-sms.html')
@main.route("/email-not-received")
def emailnotreceived():
return render_template('email-not-received.html')
@main.route("/text-not-received")
def textnotreceived():
return render_template('text-not-received.html')
@main.route("/send-email")
def sendemail():
return render_template('send-email.html')
@main.route("/check-email")
def checkemail():
return render_template('check-email.html')
@main.route("/jobs")
def showjobs():
return render_template('jobs.html')
@main.route("/jobs/job")
def showjob():
return render_template('job.html')
@main.route("/jobs/job/notification")
def shownotification():
return render_template('notification.html')
@main.route("/forgot-password")
def forgotpassword():
return render_template('forgot-password.html')
@main.route("/new-password")
def newpassword():
return render_template('new-password.html')
@main.route("/user-profile")
def userprofile():
return render_template('user-profile.html')
@main.route("/manage-users")
def manageusers():
return render_template('manage-users.html')
@main.route("/service-settings")
def servicesettings():
return render_template('service-settings.html')
@main.route("/api-keys")
def apikeys():
return render_template('api-keys.html')
@main.route("/verification-not-received")
def verificationnotreceived():
return render_template('verification-not-received.html')
| mit | Python |
42c496c78fe2dcc06df65641ba1df33c02e41533 | Revert "updates" | jamesacampbell/python-examples,jamesacampbell/python-examples | cvlib_example.py | cvlib_example.py | """Example using cvlib."""
import cvlib as cv
from cvlib.object_detection import draw_bbox
import cv2
img = image = cv2.imread("assets/sv.jpg")
bbox, label, conf = cv.detect_common_objects(img, model="largess")
print(label)
output_image = draw_bbox(img, bbox, label, conf)
cv2.imwrite("cvlib-example-out.jpg", output_image)
| mit | Python | |
5831dee7a6c14c85933658610ae991fbc0af9442 | Add basic tests for stream.me plugin (#391) | wlerin/streamlink,javiercantero/streamlink,gravyboat/streamlink,bastimeyer/streamlink,mmetak/streamlink,beardypig/streamlink,bastimeyer/streamlink,melmorabity/streamlink,chhe/streamlink,beardypig/streamlink,streamlink/streamlink,back-to/streamlink,javiercantero/streamlink,chhe/streamlink,gravyboat/streamlink,wlerin/streamlink,mmetak/streamlink,streamlink/streamlink,back-to/streamlink,melmorabity/streamlink | tests/test_plugin_streamme.py | tests/test_plugin_streamme.py | import unittest
from streamlink.plugins.streamme import StreamMe
class TestPluginStreamMe(unittest.TestCase):
def test_can_handle_url(self):
# should match
self.assertTrue(StreamMe.can_handle_url("http://www.stream.me/nameofstream"))
# shouldn't match
self.assertFalse(StreamMe.can_handle_url("http://www.livestream.me/nameofstream"))
self.assertFalse(StreamMe.can_handle_url("http://www.streamme.com/nameofstream"))
self.assertFalse(StreamMe.can_handle_url("http://www.streamme.me/nameofstream"))
self.assertFalse(StreamMe.can_handle_url("http://www.youtube.com/"))
| bsd-2-clause | Python | |
2b9909004a761047fd935ad51b06102032dbe30a | Create __init__.py | numenta/nupic.research,numenta/nupic.research | src/nupic/research/frameworks/htm/temporal_memory/__init__.py | src/nupic/research/frameworks/htm/temporal_memory/__init__.py | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2022, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from .temporal_memory_apical_tiebreak import TemporalMemoryApicalTiebreak
from .sequence_memory_apical_tiebreak import SequenceMemoryApicalTiebreak
#from temporal_memory.pair_memory_apical_tiebreak import PairMemoryApicalTiebreak
| agpl-3.0 | Python | |
09ad948086712793cc0bbf81d5515ef31823a085 | test for committee attendance | Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2 | tests/unit/test_attendance.py | tests/unit/test_attendance.py | from tests import PMGTestCase
from pmg.models import db, CommitteeMeeting, CommitteeMeetingAttendance, Committee, House, Province, Party, Member
class TestCommitteeMeetingAttendance(PMGTestCase):
def setUp(self):
super(TestCommitteeMeetingAttendance, self).setUp()
province = Province(name='Western Cape')
db.session.add(province)
party = Party(name='Global Party')
db.session.add(party)
house = House(
name='National Assembly', sphere='national', name_short='na')
db.session.add(house)
db.session.commit()
committee = Committee(name='Arts and Culture', house=house)
db.session.add(committee)
db.session.commit()
old_parliament_meeting = CommitteeMeeting(
title='Jan Arts 1', date='2019-01-01', committee=committee)
db.session.add(old_parliament_meeting)
old_parliament_meeting_two = CommitteeMeeting(
title='Feb Arts 2', date='2019-02-01', committee=committee)
db.session.add(old_parliament_meeting_two)
new_parliament_meeting = CommitteeMeeting(
title='Arts 2', date='2019-08-01', committee=committee)
db.session.add(new_parliament_meeting)
db.session.commit()
jabu = Member(
name='Jabu',
current=True,
party=party,
house=house,
province=province)
mike = Member(
name='Mike',
current=True,
party=party,
house=house,
province=province)
db.session.add(jabu)
db.session.add(mike)
db.session.commit()
attendance_one_jabu = CommitteeMeetingAttendance(
attendance='P',
member=jabu,
meeting=old_parliament_meeting,
created_at='2019-01-01')
db.session.add(attendance_one_jabu)
attendance_one_mike = CommitteeMeetingAttendance(
attendance='P',
member=mike,
meeting=old_parliament_meeting,
created_at='2019-01-01')
db.session.add(attendance_one_mike)
feb_attend_jabu = CommitteeMeetingAttendance(
attendance='A',
member=jabu,
meeting=old_parliament_meeting_two,
created_at='2019-02-01')
db.session.add(feb_attend_jabu)
feb_attend_mike = CommitteeMeetingAttendance(
attendance='A',
member=mike,
meeting=old_parliament_meeting_two,
created_at='2019-02-01')
db.session.add(feb_attend_mike)
attendance_two_jabu = CommitteeMeetingAttendance(
attendance='P',
member=jabu,
meeting=new_parliament_meeting,
created_at='2019-08-01')
db.session.add(attendance_two_jabu)
attendance_two_mike = CommitteeMeetingAttendance(
attendance='A',
member=mike,
meeting=new_parliament_meeting,
created_at='2019-08-01')
db.session.add(attendance_two_mike)
db.session.commit()
def test_attendance_rank_for_committee(self):
return True
def test_committee_attendance_trends(self):
committee = Committee.query.filter_by(name='Arts and Culture').first()
current_attendance = CommitteeMeetingAttendance.committee_attendence_trends(
committee.id, 'current')
historical_attendance = CommitteeMeetingAttendance.committee_attendence_trends(
committee.id, 'historical')
self.assertEqual([(2019.0, 1L, 0.5, 2.0)], current_attendance)
self.assertEqual([(2019.0, 2L, 0.5, 2.0)], historical_attendance)
| apache-2.0 | Python | |
4ebfc2e6ffb21fd55ef1fc4f1fd836153b2da545 | Add tests for all exceptions | rainmattertech/pykiteconnect | tests/unit/test_exceptions.py | tests/unit/test_exceptions.py | # coding: utf-8
import pytest
import responses
import kiteconnect.exceptions as ex
@responses.activate
def test_wrong_json_response(kiteconnect):
responses.add(
responses.GET,
"%s%s" % (kiteconnect.root, kiteconnect._routes["portfolio.positions"]),
body="{a:b}",
content_type="application/json"
)
with pytest.raises(ex.DataException) as exc:
positions = kiteconnect.positions()
assert exc.message == "Couldn't parse the JSON response "\
"received from the server: {a:b}"
@responses.activate
def test_wrong_content_type(kiteconnect):
rdf_data = "<rdf:Description rdf:about=''><rdfs:label>zerodha</rdfs:label></rdf:Description"
responses.add(
responses.GET,
"%s%s" % (kiteconnect.root, kiteconnect._routes["portfolio.positions"]),
body=rdf_data,
content_type="application/rdf+xml"
)
with pytest.raises(ex.DataException) as exc:
positions = kiteconnect.positions()
assert exc.message == "Unknown Content-Type ({content_type}) with response: ({content})".format(
content_type='application/rdf+xml',
content=rdf_data
)
@pytest.mark.parametrize("error_type,message", [
('PermissionException', 'oops! permission issue'),
('OrderException', 'oops! cannot place order'),
('InputException', 'missing or invalid params'),
('NetworkException', 'oopsy doopsy network issues damn!'),
('CustomException', 'this is an exception i just created')
])
@responses.activate
def test_native_exceptions(error_type, message, kiteconnect):
responses.add(
responses.GET,
"%s%s" % (kiteconnect.root, kiteconnect._routes["portfolio.positions"]),
body='{"error_type": "%s", "message": "%s"}' % (error_type, message),
content_type="application/json"
)
with pytest.raises(getattr(ex, error_type, ex.GeneralException)) as exc:
positions = kiteconnect.positions()
assert exc.message == message
| mit | Python | |
cec436eba6174fbf52dc7908e1d5218cd9bea1e7 | add tests around internal classes of Vcf class | ernfrid/svtools,abelhj/svtools,ernfrid/svtools,abelhj/svtools,abelhj/svtools,abelhj/svtools,hall-lab/svtools,hall-lab/svtools,hall-lab/svtools | tests/vcf_tests/file_tests.py | tests/vcf_tests/file_tests.py | from unittest import TestCase, main
from svtools.vcf.file import Vcf
class Test_Format(TestCase):
def test_init(self):
f = Vcf.Format('GT', 1, 'String', '"Genotype"')
self.assertEqual(f.id, 'GT')
self.assertEqual(f.number, '1')
self.assertEqual(f.type, 'String')
self.assertEqual(f.desc, 'Genotype')
self.assertEqual(f.hstring, '##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">')
class Test_Info(TestCase):
def test_init(self):
i = Vcf.Info('NS', 1, 'Integer', '"Number of Samples With Data"')
self.assertEqual(i.id, 'NS')
self.assertEqual(i.number, '1')
self.assertEqual(i.type, 'Integer')
self.assertEqual(i.desc, 'Number of Samples With Data')
self.assertEqual(i.hstring, '##INFO=<ID=NS,Number=1,Type=Integer,Description="Number of Samples With Data">')
class Test_Alt(TestCase):
def test_init(self):
a = Vcf.Alt('DEL:ME:ALU', '"Deletion of ALU element"')
self.assertEqual(a.id, 'DEL:ME:ALU')
self.assertEqual(a.desc, 'Deletion of ALU element')
self.assertEqual(a.hstring, '##ALT=<ID=DEL:ME:ALU,Description="Deletion of ALU element">')
if __name__ == "__main__":
main()
| mit | Python | |
f131cd221b2ce6fc144b2aa9882cb0ad1b116675 | Add (failing) tests for the dashboard | SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange | tests/views/test_dashboard.py | tests/views/test_dashboard.py | #!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for dashboard view.
"""
__authors__ = [
'"Sverre Rabbelier" <sverre@rabbelier.nl>',
]
import httplib
from tests.profile_utils import GSoCProfileHelper
from tests.test_utils import DjangoTestCase
# TODO: perhaps we should move this out?
from soc.modules.seeder.logic.seeder import logic as seeder_logic
class DashboardTest(DjangoTestCase):
"""Tests dashboard page.
"""
def setUp(self):
from soc.modules.gsoc.models.program import GSoCProgram
self.gsoc = seeder_logic.seed(GSoCProgram)
self.data = GSoCProfileHelper(self.gsoc)
def assertDashboardTemplatesUsed(self, response):
"""Asserts that all the templates from the dashboard were used.
"""
self.assertGSoCTemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gsoc/dashboard/base.html')
def testDasbhoardNoRole(self):
url = '/gsoc/dashboard/' + self.gsoc.key().name()
response = self.client.get(url)
self.assertDashboardTemplatesUsed(response)
def testDashboardWithProfile(self):
self.data.createProfile()
url = '/gsoc/dashboard/' + self.gsoc.key().name()
response = self.client.get(url)
self.assertDashboardTemplatesUsed(response)
| apache-2.0 | Python | |
920870a310a3c32b851dfe5927aad48d7b86b0c0 | Update model verbose names | dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api | project/bhs/migrations/0008_auto_20190514_0904.py | project/bhs/migrations/0008_auto_20190514_0904.py | # Generated by Django 2.1.8 on 2019-05-14 16:04
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('bhs', '0007_auto_20190513_1349'),
]
operations = [
migrations.AlterModelOptions(
name='group',
options={'ordering': ['tree_sort'], 'verbose_name_plural': 'Groups'},
),
migrations.AlterModelOptions(
name='member',
options={'verbose_name_plural': 'Members'},
),
migrations.AlterModelOptions(
name='officer',
options={'verbose_name_plural': 'Officers'},
),
migrations.AlterModelOptions(
name='person',
options={'verbose_name_plural': 'Persons'},
),
]
| bsd-2-clause | Python | |
226c2f36b9cc8257ce99bd15648be4aba2ccb606 | Move utility functions for checking passported benefits into separate module | ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public | cla_public/apps/checker/utils.py | cla_public/apps/checker/utils.py | from cla_public.apps.checker.constants import PASSPORTED_BENEFITS, \
NASS_BENEFITS
def passported(benefits):
return bool(set(benefits).intersection(PASSPORTED_BENEFITS))
def nass(benefits):
return bool(set(benefits).intersection(NASS_BENEFITS))
| mit | Python | |
6185e7bbc12c3bc9aba1efcfd53275cc109f2e91 | Add a snippet. | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/pyqt/pyqt5/widget_QPainter_draw_polygon.py | python/pyqt/pyqt5/widget_QPainter_draw_polygon.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# See https://www.youtube.com/watch?v=96FBrNR3XOY
import sys
from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QLabel, QVBoxLayout
from PyQt5.QtGui import QPainter, QBrush, QColor, QPen, QPolygon
from PyQt5.QtCore import Qt, QPoint
class MyPaintWidget(QWidget):
def __init__(self):
super().__init__()
# Set window background color
self.setAutoFillBackground(True)
palette = self.palette()
palette.setColor(self.backgroundRole(), Qt.white)
self.setPalette(palette)
def paintEvent(self, event):
qp = QPainter(self)
qp.setRenderHint(QPainter.Antialiasing) # <- Set anti-aliasing See https://wiki.python.org/moin/PyQt/Painting%20and%20clipping%20demonstration
qp.setPen(QPen(Qt.black, 5, Qt.SolidLine))
qp.setBrush(QBrush(Qt.red, Qt.SolidPattern))
#qp.setBrush(QBrush(Qt.red, Qt.DiagCrossPattern))
points = QPolygon([
QPoint(10, 10),
QPoint(20, 100),
QPoint(100, 50),
QPoint(150, 10),
QPoint(100, 100)
])
qp.drawPolygon(points)
if __name__ == '__main__':
app = QApplication(sys.argv)
widget = MyPaintWidget()
widget.show()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
| mit | Python | |
d56b94ef5acaafcaf11ebcb4ccb5b61390448974 | Update loading to use results.AddValue(...) | M4sse/chromium.src,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,Jonekee/chromium.src,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,Jonekee/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,jaruba/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,chuan9/chromium-crosswalk,jaruba/chromium.src,littlstar/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,Fireblend/chromium-crosswalk,ltilve/chromium,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,littlstar/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,dednal/chromium.src,Just-D/chromium-1,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,Chilledheart/chromium,axinging/chromium-crosswalk,dednal/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,jaruba/chromium.src,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,Just-D/chromium-1,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,dednal/chromium.src,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,markYoungH/chromium.src,dushu1203/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,ltilve/chromium,Pluto-tv/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,Fireblend/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,chuan9/chromium-crosswalk,Chilledheart/chromium,littlstar/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl | tools/perf/metrics/loading.py | tools/perf/metrics/loading.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from metrics import Metric
from telemetry.value import scalar
class LoadingMetric(Metric):
"""A metric for page loading time based entirely on window.performance"""
def Start(self, page, tab):
raise NotImplementedError()
def Stop(self, page, tab):
raise NotImplementedError()
def AddResults(self, tab, results):
load_timings = tab.EvaluateJavaScript('window.performance.timing')
# NavigationStart relative markers in milliseconds.
load_start = (
float(load_timings['loadEventStart']) - load_timings['navigationStart'])
results.AddValue(scalar.ScalarValue(
results.current_page, 'load_start', 'ms', load_start))
dom_content_loaded_start = (
float(load_timings['domContentLoadedEventStart']) -
load_timings['navigationStart'])
results.AddValue(scalar.ScalarValue(
results.current_page, 'dom_content_loaded_start', 'ms',
dom_content_loaded_start))
fetch_start = (
float(load_timings['fetchStart']) - load_timings['navigationStart'])
results.AddValue(scalar.ScalarValue(
results.current_page, 'fetch_start', 'ms', fetch_start,
important=False))
request_start = (
float(load_timings['requestStart']) - load_timings['navigationStart'])
results.AddValue(scalar.ScalarValue(
results.current_page, 'request_start', 'ms', request_start,
important=False))
# Phase measurements in milliseconds.
domain_lookup_duration = (
float(load_timings['domainLookupEnd']) -
load_timings['domainLookupStart'])
results.AddValue(scalar.ScalarValue(
results.current_page, 'domain_lookup_duration', 'ms',
domain_lookup_duration, important=False))
connect_duration = (
float(load_timings['connectEnd']) - load_timings['connectStart'])
results.AddValue(scalar.ScalarValue(
results.current_page, 'connect_duration', 'ms', connect_duration,
important=False))
request_duration = (
float(load_timings['responseStart']) - load_timings['requestStart'])
results.AddValue(scalar.ScalarValue(
results.current_page, 'request_duration', 'ms', request_duration,
important=False))
response_duration = (
float(load_timings['responseEnd']) - load_timings['responseStart'])
results.AddValue(scalar.ScalarValue(
results.current_page, 'response_duration', 'ms', response_duration,
important=False))
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from metrics import Metric
class LoadingMetric(Metric):
"""A metric for page loading time based entirely on window.performance"""
def Start(self, page, tab):
raise NotImplementedError()
def Stop(self, page, tab):
raise NotImplementedError()
def AddResults(self, tab, results):
load_timings = tab.EvaluateJavaScript('window.performance.timing')
# NavigationStart relative markers in milliseconds.
load_start = (
float(load_timings['loadEventStart']) - load_timings['navigationStart'])
results.Add('load_start', 'ms', load_start)
dom_content_loaded_start = (
float(load_timings['domContentLoadedEventStart']) -
load_timings['navigationStart'])
results.Add('dom_content_loaded_start', 'ms', dom_content_loaded_start)
fetch_start = (
float(load_timings['fetchStart']) - load_timings['navigationStart'])
results.Add('fetch_start', 'ms', fetch_start, data_type='unimportant')
request_start = (
float(load_timings['requestStart']) - load_timings['navigationStart'])
results.Add('request_start', 'ms', request_start, data_type='unimportant')
# Phase measurements in milliseconds.
domain_lookup_duration = (
float(load_timings['domainLookupEnd']) -
load_timings['domainLookupStart'])
results.Add('domain_lookup_duration', 'ms', domain_lookup_duration,
data_type='unimportant')
connect_duration = (
float(load_timings['connectEnd']) - load_timings['connectStart'])
results.Add('connect_duration', 'ms', connect_duration,
data_type='unimportant')
request_duration = (
float(load_timings['responseStart']) - load_timings['requestStart'])
results.Add('request_duration', 'ms', request_duration,
data_type='unimportant')
response_duration = (
float(load_timings['responseEnd']) - load_timings['responseStart'])
results.Add('response_duration', 'ms', response_duration,
data_type='unimportant')
| bsd-3-clause | Python |
84009965977b9b0508ae5ec50b28acd0c2828ae2 | Add untested script to download fasta files from ncbi with the GI | maubarsom/biotico-tools,maubarsom/biotico-tools,maubarsom/biotico-tools,maubarsom/biotico-tools,maubarsom/biotico-tools | python/download_fastas_ncbi.py | python/download_fastas_ncbi.py | #!/usr/bin/env python
"""
Download fastas from a list of gi identifiers of the form
gi|xxxx| from a specified NCBI database
Author: Mauricio Barrientos-Somarribas
Email: mauricio.barrientos@ki.se
Copyright 2014 Mauricio Barrientos-Somarribas
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import requests
import argparse
import os.path
import sys
#Time of script execution and logging module
import time
import logging
import re
#****************Begin of Main ***************
def main(args):
#Read gis from file
gi_regex = re.compile(r"gi\|(.+?)\|")
numeric_gi_regex = re.compile(r"^[0-9]+$")
processed_gi_list = []
for line in args.input_file:
#If id is just a number, assume it is a gi
if numeric_gi_regex.match(line):
processed_gi_list.append(line.rstrip("\n"))
else: #If id has a gi|xxx| structure, extract the gi number
regex_hit = gi_regex.search(line)
if regex_hit:
processed_gi_list.append(regex_hit.group(1))
#Send the request to the NCBI via eutils
gis_to_get = ",".join(set(processed_gi_list))
logging.info("Extracting [{}] from NCBI {}".format(gis_to_get,args.db))
r = requests.get("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db={}&id={}&rettype=fasta&retmode=text".format(args.db,gis_to_get))
if r.status_code == 200:
try:
args.output_file.write(str(r.content.decode()))
except Exception as e:
logging.error("There was an error writing the output")
logging.error(e.str())
else:
logging.error("The request was returned with status code {}".format(r.statuscode))
#*****************End of Main*********************
def validate_args(args):
return True
if __name__ == '__main__':
#Process command line arguments
parser = argparse.ArgumentParser(description="WRITE DESCRIPTION HERE",epilog= "TEXT AFTER THE HELP")
parser.add_argument("input_file",help="Input file",nargs="?", type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument("-o","--output-file", type=argparse.FileType('w'), default=sys.stdout, help="Name of the output file" )
parser.add_argument("-l","--log-file", default=None, help="Name of the log file")
parser.add_argument("--db", default="nuccore", help="Name of the log file")
args = parser.parse_args()
if validate_args(args):
#Initialize log
log_level = logging.INFO
if args.log_file:
logging.basicConfig(filename=args.log_file,level=log_level)
else:
logging.basicConfig(stream=sys.stderr,level=log_level)
time_start = time.time()
main( args )
logging.info("Time elapsed: "+str(time.time() - time_start)+"\n")
else:
logging.error("Invalid arguments. Exiting script\n")
sys.exit(1)
| apache-2.0 | Python | |
7d917f7cbf6f00eec93d97adcdb545ae55c5b345 | Add core actions file | opps/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,jeanmask/opps | opps/core/actions.py | opps/core/actions.py | # coding: utf-8
import csv
from django.http import HttpResponse
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
def export_to_csv(modeladmin, request, queryset):
"""Exporting queryset results and filter into CSV"""
# limit only staff and super_user accounts
if request.user.is_staff or request.user.is_superuser:
if queryset.count() > 0:
# generate response and filename
response = HttpResponse(mimetype="text/csv")
today = timezone.now().strftime("%Y-%M-%d_%H:%M:%S")
filename = "{}-{}.csv".format(queryset.model, today)
response["Content-Disposition"] = ('attachment; filename="%s"' %
filename)
writer = csv.writer(response)
# Get column name
columns = [field.name for field in queryset[0]._meta.fields]
writer.writerow(columns)
# Write data
for obj in queryset:
fields = map(lambda x: _generate_value(obj, x), columns)
writer.writerow(fields)
return response
export_to_csv.short_description = _(u"Export results in CSV")
def _generate_value(obj, column):
"""Get fields value and convert to ASCIC for string type"""
row = getattr(obj, column)
if isinstance(row, basestring):
row = row.encode('ascii', 'ignore')
return row
| mit | Python | |
65245e2ef91952f3d9383f520f8e875b8a2a2648 | add translate type | CenterForOpenScience/modular-odm,sloria/modular-odm,chrisseto/modular-odm,icereval/modular-odm | modularodm/fields/BooleanField.py | modularodm/fields/BooleanField.py | from . import Field
from ..validators import validate_boolean
class BooleanField(Field):
# default = False
validate = validate_boolean
translate_type = bool
def __init__(self, *args, **kwargs):
super(BooleanField, self).__init__(*args, **kwargs) | from . import Field
from ..validators import validate_boolean
class BooleanField(Field):
# default = False
validate = validate_boolean
def __init__(self, *args, **kwargs):
super(BooleanField, self).__init__(*args, **kwargs) | apache-2.0 | Python |
3fb4146b53c88695a9941b591c22362ccd5b211d | Create Flyweight.py | QuantumFractal/Python-Scripts,QuantumFractal/Python-Scripts | DesignPatterns/Flyweight.py | DesignPatterns/Flyweight.py | '''
Flyweight. A design pattern used solely for efficiency with
large arrays of nearly indentical objects.
A simple world generator
Written in python for simplicity sake.
For more info >> http://gameprogrammingpatterns.com/command.html
'''
from random import *
class World:
def __init__(self, width, height):
self.grass = Terrain(1, False, "grass.png")
self.hill = Terrain(3, False, "hill.png")
self.water = Terrain(2, True, "water.png")
self.WIDTH, self.HEIGHT = width, height
self.grid = [[None for i in range(height)] for j in range(width)]
def __str__(self):
output = ""
for x in xrange(0, self.WIDTH):
for y in xrange(0, self.HEIGHT):
output+= "["+str(self.grid[x][y].get_movement_cost())+"]"
output+= "\n"
return output
def paint_terrain(self):
for x in range(self.WIDTH):
for y in range(self.HEIGHT):
if randint(1,10) == 10:
self.grid[x][y] = self.hill
else:
self.grid[x][y] = self.grass
def get_movement_cost(self,x,y):
return self.grid[x][x].get_movement_cost()
class Terrain:
def __init__(self, movement_cost, is_water, texture):
self.movement_cost, self.is_water, self.texture = movement_cost , is_water, texture
def get_movement_cost(self):
return self.movement_cost
def is_water(self):
return is_water
new_world = World(20, 15)
new_world.paint_terrain()
print new_world
'''
Output:
[1][1][3][1][3][1][1][3][1][3][1][1][3][1][1]
[1][1][1][1][1][1][1][1][1][1][1][1][1][1][1]
[1][1][1][1][3][1][1][1][1][1][3][1][1][1][1]
[1][3][3][1][3][1][1][1][1][1][1][1][1][3][1]
[1][1][1][1][1][1][1][3][1][1][1][1][1][1][1]
[1][1][1][1][3][1][3][1][1][1][1][3][1][1][1]
[1][1][1][1][1][1][1][1][1][1][1][1][1][1][3]
[1][1][1][3][1][1][1][1][1][1][1][3][1][1][1]
[1][1][1][1][1][1][1][1][1][1][1][1][1][1][1]
[3][1][1][1][1][1][1][1][1][1][1][1][1][1][1]
[1][1][1][1][1][1][1][1][1][1][1][1][1][1][1]
[1][1][1][1][1][1][1][1][1][1][1][1][1][1][1]
[3][1][3][1][1][3][1][1][1][1][1][1][1][1][1]
[1][3][3][1][3][3][1][1][1][1][1][1][1][1][1]
[1][1][1][1][1][1][1][1][1][1][1][1][1][1][1]
[1][1][1][3][3][3][1][1][1][1][1][1][1][1][1]
[1][1][1][3][1][3][1][1][3][1][1][1][1][1][1]
[1][1][1][1][1][1][1][1][1][1][1][1][1][1][1]
[1][1][1][1][1][1][1][3][1][1][1][1][1][1][1]
[1][1][1][3][1][1][1][1][1][3][1][1][1][1][1]
'''
| mit | Python | |
ef2bad889159941b344808cb88179135d3908f19 | Add missing file | FabriceSalvaire/grouped-purchase-order,FabriceSalvaire/grouped-purchase-order,FabriceSalvaire/grouped-purchase-order,FabriceSalvaire/grouped-purchase-order | GroupedPurchaseOrder/api.py | GroupedPurchaseOrder/api.py | ####################################################################################################
#
# GroupedPurchaseOrder - A Django Application.
# Copyright (C) 2014 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
from tastypie.authentication import SessionAuthentication, ApiKeyAuthentication
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization, ReadOnlyAuthorization
####################################################################################################
from .models import Order
####################################################################################################
class OrderResource(ModelResource):
class Meta:
queryset = Order.objects.all()
resource_name = 'order'
# authentication = ApiKeyAuthentication()
authentication = SessionAuthentication()
authorization = ReadOnlyAuthorization()
##############################################
def dehydrate(self, bundle):
bundle.data['name'] = bundle.obj.name()
return bundle
####################################################################################################
#
# End
#
####################################################################################################
| agpl-3.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.