commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
2ee6c3c890f236eb7dff0a8094ca3207df119b49 | add new unittest for issue #28 | NLeSC/pyxenon,NLeSC/pyxenon | tests/test_issue28.py | tests/test_issue28.py | import pytest
from xenon import Path
from xenon.exceptions import NoSuchPathException
def test_file_does_not_exist(local_filesystem, tmpdir):
tmpdir = Path(str(tmpdir))
with pytest.raises(NoSuchPathException):
filename = tmpdir / 'this-file-does-not-exist'
result = bytearray()
for chunk in local_filesystem.read_from_file(filename):
result.extend(chunk)
| apache-2.0 | Python | |
5e0b80cada5ebae0412347471723af9b444007a6 | Add sticker tests | carpedm20/fbchat | tests/test_sticker.py | tests/test_sticker.py | import pytest
from fbchat._sticker import Sticker
def test_from_graphql_none():
assert None == Sticker._from_graphql(None)
def test_from_graphql_minimal():
assert Sticker(uid=1) == Sticker._from_graphql({"id": 1})
def test_from_graphql_normal():
assert Sticker(
uid="369239383222810",
pack="227877430692340",
is_animated=False,
medium_sprite_image=None,
large_sprite_image=None,
frames_per_row=None,
frames_per_col=None,
frame_rate=None,
url="https://scontent-arn2-1.xx.fbcdn.net/v/redacted.png",
width=274,
height=274,
label="Like, thumbs up",
) == Sticker._from_graphql(
{
"id": "369239383222810",
"pack": {"id": "227877430692340"},
"label": "Like, thumbs up",
"frame_count": 1,
"frame_rate": 83,
"frames_per_row": 1,
"frames_per_column": 1,
"sprite_image_2x": None,
"sprite_image": None,
"padded_sprite_image": None,
"padded_sprite_image_2x": None,
"url": "https://scontent-arn2-1.xx.fbcdn.net/v/redacted.png",
"height": 274,
"width": 274,
}
)
def test_from_graphql_animated():
assert Sticker(
uid="144885035685763",
pack="350357561732812",
is_animated=True,
medium_sprite_image="https://scontent-arn2-1.xx.fbcdn.net/v/redacted2.png",
large_sprite_image="https://scontent-arn2-1.fbcdn.net/v/redacted3.png",
frames_per_row=2,
frames_per_col=2,
frame_rate=142,
url="https://scontent-arn2-1.fbcdn.net/v/redacted1.png",
width=240,
height=293,
label="Love, cat with heart",
) == Sticker._from_graphql(
{
"id": "144885035685763",
"pack": {"id": "350357561732812"},
"label": "Love, cat with heart",
"frame_count": 4,
"frame_rate": 142,
"frames_per_row": 2,
"frames_per_column": 2,
"sprite_image_2x": {
"uri": "https://scontent-arn2-1.fbcdn.net/v/redacted3.png"
},
"sprite_image": {
"uri": "https://scontent-arn2-1.xx.fbcdn.net/v/redacted2.png"
},
"padded_sprite_image": {
"uri": "https://scontent-arn2-1.xx.fbcdn.net/v/unused1.png"
},
"padded_sprite_image_2x": {
"uri": "https://scontent-arn2-1.xx.fbcdn.net/v/unused2.png"
},
"url": "https://scontent-arn2-1.fbcdn.net/v/redacted1.png",
"height": 293,
"width": 240,
}
)
| bsd-3-clause | Python | |
6cd7bd0d304c751bd40ce292074a034676ce0a30 | Add setupeggscons script, to use scons build under setuptools. | sonnyhu/scipy,grlee77/scipy,jor-/scipy,ChanderG/scipy,e-q/scipy,ndchorley/scipy,WillieMaddox/scipy,lhilt/scipy,josephcslater/scipy,anntzer/scipy,vanpact/scipy,ogrisel/scipy,aman-iitj/scipy,person142/scipy,hainm/scipy,pnedunuri/scipy,aeklant/scipy,pyramania/scipy,gertingold/scipy,behzadnouri/scipy,person142/scipy,sargas/scipy,argriffing/scipy,fernand/scipy,efiring/scipy,cpaulik/scipy,Gillu13/scipy,Gillu13/scipy,mingwpy/scipy,jakevdp/scipy,andyfaff/scipy,zxsted/scipy,nonhermitian/scipy,jakevdp/scipy,Gillu13/scipy,jsilter/scipy,fredrikw/scipy,giorgiop/scipy,newemailjdm/scipy,minhlongdo/scipy,ortylp/scipy,maciejkula/scipy,pyramania/scipy,jamestwebber/scipy,pbrod/scipy,witcxc/scipy,mdhaber/scipy,fredrikw/scipy,sauliusl/scipy,njwilson23/scipy,vanpact/scipy,pnedunuri/scipy,Dapid/scipy,behzadnouri/scipy,arokem/scipy,person142/scipy,nvoron23/scipy,njwilson23/scipy,gertingold/scipy,zerothi/scipy,rgommers/scipy,perimosocordiae/scipy,zerothi/scipy,vhaasteren/scipy,Dapid/scipy,mortada/scipy,Shaswat27/scipy,endolith/scipy,zxsted/scipy,anielsen001/scipy,dch312/scipy,mingwpy/scipy,ortylp/scipy,mtrbean/scipy,tylerjereddy/scipy,nvoron23/scipy,hainm/scipy,perimosocordiae/scipy,grlee77/scipy,niknow/scipy,ilayn/scipy,Newman101/scipy,nonhermitian/scipy,futurulus/scipy,Srisai85/scipy,petebachant/scipy,mingwpy/scipy,dominicelse/scipy,mdhaber/scipy,giorgiop/scipy,jonycgn/scipy,woodscn/scipy,bkendzior/scipy,surhudm/scipy,pnedunuri/scipy,ChanderG/scipy,felipebetancur/scipy,WarrenWeckesser/scipy,sonnyhu/scipy,lhilt/scipy,ogrisel/scipy,Dapid/scipy,endolith/scipy,futurulus/scipy,jseabold/scipy,behzadnouri/scipy,richardotis/scipy,larsmans/scipy,Stefan-Endres/scipy,anielsen001/scipy,Gillu13/scipy,gfyoung/scipy,nmayorov/scipy,aeklant/scipy,matthewalbani/scipy,Newman101/scipy,pnedunuri/scipy,nmayorov/scipy,apbard/scipy,sargas/scipy,ndchorley/scipy,Eric89GXL/scipy,mortada/scipy,befelix/scipy,Shaswat27/scipy,aarchiba/scipy,maciejkula/scipy,lhilt/scipy,jonycgn/scipy,grlee77/scipy,apbard/scipy,mhogg/scipy,arokem/scipy,sriki18/scipy,fernand/scipy,WarrenWeckesser/scipy,newemailjdm/scipy,vhaasteren/scipy,befelix/scipy,jsilter/scipy,gef756/scipy,nmayorov/scipy,sriki18/scipy,mhogg/scipy,matthewalbani/scipy,pschella/scipy,tylerjereddy/scipy,ogrisel/scipy,mgaitan/scipy,cpaulik/scipy,ilayn/scipy,pizzathief/scipy,haudren/scipy,giorgiop/scipy,mtrbean/scipy,rgommers/scipy,petebachant/scipy,endolith/scipy,apbard/scipy,kleskjr/scipy,vberaudi/scipy,andyfaff/scipy,piyush0609/scipy,maniteja123/scipy,mhogg/scipy,piyush0609/scipy,cpaulik/scipy,mortonjt/scipy,trankmichael/scipy,efiring/scipy,perimosocordiae/scipy,scipy/scipy,vberaudi/scipy,rmcgibbo/scipy,WillieMaddox/scipy,pschella/scipy,mgaitan/scipy,vanpact/scipy,Kamp9/scipy,mikebenfield/scipy,rmcgibbo/scipy,FRidh/scipy,jonycgn/scipy,Newman101/scipy,petebachant/scipy,vigna/scipy,trankmichael/scipy,Srisai85/scipy,giorgiop/scipy,teoliphant/scipy,jamestwebber/scipy,pnedunuri/scipy,maniteja123/scipy,jseabold/scipy,perimosocordiae/scipy,vanpact/scipy,giorgiop/scipy,jamestwebber/scipy,mortonjt/scipy,efiring/scipy,gdooper/scipy,mhogg/scipy,ogrisel/scipy,giorgiop/scipy,pnedunuri/scipy,zerothi/scipy,haudren/scipy,matthew-brett/scipy,e-q/scipy,niknow/scipy,vberaudi/scipy,maniteja123/scipy,trankmichael/scipy,dch312/scipy,aeklant/scipy,mortonjt/scipy,kleskjr/scipy,vberaudi/scipy,rgommers/scipy,gef756/scipy,pizzathief/scipy,sonnyhu/scipy,scipy/scipy,raoulbq/scipy,jjhelmus/scipy,zxsted/scipy,jor-/scipy,arokem/scipy,chatcannon/scipy,mhogg/scipy,mortada/scipy,surhudm/scipy,anielsen001/scipy,Dapid/scipy,befelix/scipy,arokem/scipy,ndchorley/scipy,woodscn/scipy,niknow/scipy,zerothi/scipy,aeklant/scipy,jseabold/scipy,grlee77/scipy,jjhelmus/scipy,Gillu13/scipy,nvoron23/scipy,dch312/scipy,perimosocordiae/scipy,pizzathief/scipy,ales-erjavec/scipy,chatcannon/scipy,cpaulik/scipy,piyush0609/scipy,hainm/scipy,hainm/scipy,anielsen001/scipy,zaxliu/scipy,tylerjereddy/scipy,WillieMaddox/scipy,pyramania/scipy,mtrbean/scipy,zaxliu/scipy,andim/scipy,raoulbq/scipy,mdhaber/scipy,vanpact/scipy,jseabold/scipy,kalvdans/scipy,befelix/scipy,ales-erjavec/scipy,gfyoung/scipy,maniteja123/scipy,Kamp9/scipy,aman-iitj/scipy,haudren/scipy,anntzer/scipy,aman-iitj/scipy,fredrikw/scipy,piyush0609/scipy,ilayn/scipy,lukauskas/scipy,richardotis/scipy,jonycgn/scipy,Kamp9/scipy,aarchiba/scipy,woodscn/scipy,WarrenWeckesser/scipy,andim/scipy,surhudm/scipy,sauliusl/scipy,ortylp/scipy,teoliphant/scipy,jsilter/scipy,zaxliu/scipy,dominicelse/scipy,WarrenWeckesser/scipy,futurulus/scipy,pbrod/scipy,fredrikw/scipy,dominicelse/scipy,rmcgibbo/scipy,petebachant/scipy,befelix/scipy,maniteja123/scipy,nvoron23/scipy,hainm/scipy,minhlongdo/scipy,chatcannon/scipy,rmcgibbo/scipy,sriki18/scipy,argriffing/scipy,mortonjt/scipy,kalvdans/scipy,zaxliu/scipy,haudren/scipy,matthewalbani/scipy,dominicelse/scipy,mtrbean/scipy,futurulus/scipy,lukauskas/scipy,lukauskas/scipy,sauliusl/scipy,felipebetancur/scipy,gdooper/scipy,ndchorley/scipy,teoliphant/scipy,gef756/scipy,jjhelmus/scipy,felipebetancur/scipy,apbard/scipy,juliantaylor/scipy,ales-erjavec/scipy,vigna/scipy,vigna/scipy,efiring/scipy,gertingold/scipy,hainm/scipy,sargas/scipy,bkendzior/scipy,mortada/scipy,niknow/scipy,newemailjdm/scipy,futurulus/scipy,rmcgibbo/scipy,maciejkula/scipy,mortada/scipy,mikebenfield/scipy,aman-iitj/scipy,jor-/scipy,mtrbean/scipy,FRidh/scipy,anielsen001/scipy,felipebetancur/scipy,sonnyhu/scipy,dominicelse/scipy,larsmans/scipy,pyramania/scipy,rgommers/scipy,mingwpy/scipy,vigna/scipy,minhlongdo/scipy,sriki18/scipy,Srisai85/scipy,vhaasteren/scipy,vigna/scipy,jor-/scipy,gfyoung/scipy,mdhaber/scipy,mgaitan/scipy,argriffing/scipy,jsilter/scipy,jamestwebber/scipy,arokem/scipy,surhudm/scipy,sonnyhu/scipy,Dapid/scipy,fredrikw/scipy,Stefan-Endres/scipy,trankmichael/scipy,sriki18/scipy,Stefan-Endres/scipy,sargas/scipy,mgaitan/scipy,efiring/scipy,jseabold/scipy,Srisai85/scipy,nvoron23/scipy,andyfaff/scipy,gfyoung/scipy,haudren/scipy,person142/scipy,mikebenfield/scipy,larsmans/scipy,anntzer/scipy,argriffing/scipy,kleskjr/scipy,Stefan-Endres/scipy,mortonjt/scipy,WarrenWeckesser/scipy,cpaulik/scipy,e-q/scipy,richardotis/scipy,ilayn/scipy,pbrod/scipy,ales-erjavec/scipy,petebachant/scipy,juliantaylor/scipy,fernand/scipy,aarchiba/scipy,witcxc/scipy,kleskjr/scipy,raoulbq/scipy,aeklant/scipy,kalvdans/scipy,e-q/scipy,mikebenfield/scipy,sriki18/scipy,anntzer/scipy,ndchorley/scipy,zxsted/scipy,andyfaff/scipy,zerothi/scipy,sargas/scipy,fernand/scipy,njwilson23/scipy,mdhaber/scipy,njwilson23/scipy,anielsen001/scipy,ilayn/scipy,nvoron23/scipy,kalvdans/scipy,sauliusl/scipy,efiring/scipy,endolith/scipy,juliantaylor/scipy,maciejkula/scipy,surhudm/scipy,Gillu13/scipy,vhaasteren/scipy,rmcgibbo/scipy,jakevdp/scipy,vberaudi/scipy,jseabold/scipy,pizzathief/scipy,felipebetancur/scipy,juliantaylor/scipy,aarchiba/scipy,nonhermitian/scipy,gef756/scipy,scipy/scipy,Eric89GXL/scipy,minhlongdo/scipy,andim/scipy,pbrod/scipy,surhudm/scipy,mdhaber/scipy,petebachant/scipy,argriffing/scipy,pyramania/scipy,raoulbq/scipy,witcxc/scipy,futurulus/scipy,matthew-brett/scipy,jakevdp/scipy,ogrisel/scipy,andyfaff/scipy,Shaswat27/scipy,ChanderG/scipy,zaxliu/scipy,mgaitan/scipy,grlee77/scipy,jamestwebber/scipy,nonhermitian/scipy,Shaswat27/scipy,pschella/scipy,WarrenWeckesser/scipy,juliantaylor/scipy,larsmans/scipy,vhaasteren/scipy,andyfaff/scipy,scipy/scipy,rgommers/scipy,lukauskas/scipy,josephcslater/scipy,FRidh/scipy,zxsted/scipy,Srisai85/scipy,perimosocordiae/scipy,teoliphant/scipy,Eric89GXL/scipy,endolith/scipy,njwilson23/scipy,mortada/scipy,mhogg/scipy,witcxc/scipy,Newman101/scipy,ChanderG/scipy,ales-erjavec/scipy,witcxc/scipy,piyush0609/scipy,anntzer/scipy,ilayn/scipy,FRidh/scipy,fernand/scipy,aarchiba/scipy,scipy/scipy,bkendzior/scipy,gef756/scipy,andim/scipy,fernand/scipy,matthew-brett/scipy,nonhermitian/scipy,ChanderG/scipy,larsmans/scipy,Newman101/scipy,jakevdp/scipy,pschella/scipy,bkendzior/scipy,minhlongdo/scipy,bkendzior/scipy,matthew-brett/scipy,ortylp/scipy,kleskjr/scipy,jsilter/scipy,chatcannon/scipy,dch312/scipy,Dapid/scipy,jjhelmus/scipy,pbrod/scipy,njwilson23/scipy,minhlongdo/scipy,sauliusl/scipy,gdooper/scipy,ChanderG/scipy,lukauskas/scipy,matthew-brett/scipy,pbrod/scipy,argriffing/scipy,Kamp9/scipy,Srisai85/scipy,Shaswat27/scipy,kleskjr/scipy,Kamp9/scipy,WillieMaddox/scipy,andim/scipy,Eric89GXL/scipy,gdooper/scipy,mingwpy/scipy,Stefan-Endres/scipy,trankmichael/scipy,piyush0609/scipy,e-q/scipy,raoulbq/scipy,jonycgn/scipy,pizzathief/scipy,woodscn/scipy,mortonjt/scipy,newemailjdm/scipy,lukauskas/scipy,fredrikw/scipy,josephcslater/scipy,josephcslater/scipy,jor-/scipy,chatcannon/scipy,maniteja123/scipy,dch312/scipy,tylerjereddy/scipy,vanpact/scipy,raoulbq/scipy,cpaulik/scipy,Kamp9/scipy,endolith/scipy,jonycgn/scipy,felipebetancur/scipy,behzadnouri/scipy,scipy/scipy,josephcslater/scipy,tylerjereddy/scipy,Shaswat27/scipy,gdooper/scipy,ndchorley/scipy,niknow/scipy,teoliphant/scipy,aman-iitj/scipy,ortylp/scipy,andim/scipy,anntzer/scipy,gertingold/scipy,zerothi/scipy,zaxliu/scipy,maciejkula/scipy,richardotis/scipy,zxsted/scipy,vhaasteren/scipy,WillieMaddox/scipy,mikebenfield/scipy,lhilt/scipy,nmayorov/scipy,aman-iitj/scipy,apbard/scipy,gertingold/scipy,matthewalbani/scipy,ales-erjavec/scipy,Newman101/scipy,sauliusl/scipy,gfyoung/scipy,richardotis/scipy,Eric89GXL/scipy,Eric89GXL/scipy,haudren/scipy,chatcannon/scipy,richardotis/scipy,nmayorov/scipy,FRidh/scipy,behzadnouri/scipy,mingwpy/scipy,matthewalbani/scipy,woodscn/scipy,jjhelmus/scipy,woodscn/scipy,sonnyhu/scipy,newemailjdm/scipy,vberaudi/scipy,larsmans/scipy,person142/scipy,gef756/scipy,WillieMaddox/scipy,niknow/scipy,kalvdans/scipy,pschella/scipy,behzadnouri/scipy,lhilt/scipy,trankmichael/scipy,Stefan-Endres/scipy,newemailjdm/scipy,FRidh/scipy,mtrbean/scipy,ortylp/scipy,mgaitan/scipy | setupeggscons.py | setupeggscons.py | #!/usr/bin/env python
"""
A setup.py script to use setuptools, which gives egg goodness, etc.
"""
from setuptools import setup
execfile('setupscons.py')
| bsd-3-clause | Python | |
163b29a07c4d25ee9d3157eb5c517f06a170f42c | Update autocons.py | xcat2/confluent,jjohnson42/confluent,xcat2/confluent,jjohnson42/confluent,xcat2/confluent,xcat2/confluent,jjohnson42/confluent,jjohnson42/confluent,jjohnson42/confluent,xcat2/confluent | misc/autocons.py | misc/autocons.py | import os
import struct
import termios
addrtoname = {
0x3f8: '/dev/ttyS0',
0x2f8: '/dev/ttyS1',
0x3e8: '/dev/ttyS2',
0x2e8: '/dev/ttyS3',
}
speedmap = {
0: None,
3: 9600,
4: 19200,
6: 57600,
7: 115200,
}
termiobaud = {
9600: termios.B9600,
19200: termios.B19200,
57600: termios.B57600,
115200: termios.B115200,
}
def do_serial_config():
if 'console=ttyS' in open('/proc/cmdline').read():
return None # Do not do autoconsole if manually configured
spcr = open("/sys/firmware/acpi/tables/SPCR", "rb")
spcr = bytearray(spcr.read())
if spcr[8] != 2 or spcr[36] != 0 or spcr[40] != 1:
return None
address = struct.unpack('<Q', spcr[44:52])[0]
tty = None
try:
tty = addrtoname[address]
except KeyError:
return None
retval = { 'tty': tty }
try:
retval['speed'] = speedmap[spcr[58]]
except KeyError:
return None
if retval['speed']:
ttyf = os.open(tty, os.O_RDWR | os.O_NOCTTY)
currattr = termios.tcgetattr(ttyf)
currattr[4:6] = [0, termiobaud[retval['speed']]]
termios.tcsetattr(ttyf, termios.TCSANOW, currattr)
return retval
if __name__ == '__main__':
serialinfo = do_serial_config()
if serialinfo:
os.execl(
'/bin/setsid', 'setsid', 'sh', '-c',
'exec screen -x console <> {0} >&0 2>&1'.format(serialinfo['tty']))
| apache-2.0 | Python | |
871405e3e4721ae4f31efb5add8dc0e6d48500df | add test cases for inference new X for bayesian GPLVM | strongh/GPy,mikecroucher/GPy,jameshensman/GPy,dhhjx880713/GPy,PredictiveScienceLab/GPy,befelix/GPy,ysekky/GPy,mikecroucher/GPy,Dapid/GPy,AlexGrig/GPy,esiivola/GPYgradients,AlexGrig/GPy,beckdaniel/GPy,ysekky/GPy,mikecroucher/GPy,beckdaniel/GPy,TianpeiLuke/GPy,buntyke/GPy,beckdaniel/GPy,AlexGrig/GPy,jameshensman/GPy,ptonner/GPy,avehtari/GPy,ysekky/GPy,maaskola/GPy,esiivola/GPYgradients,dhhjx880713/GPy,SheffieldML/GPy,esiivola/GPYgradients,dhhjx880713/GPy,ysekky/GPy,esiivola/GPYgradients,mikecroucher/GPy,befelix/GPy,jameshensman/GPy,dhhjx880713/GPy,buntyke/GPy,TianpeiLuke/GPy,strongh/GPy,Dapid/GPy,gusmaogabriels/GPy,Dapid/GPy,avehtari/GPy,befelix/GPy,SheffieldML/GPy,gusmaogabriels/GPy,Dapid/GPy,PredictiveScienceLab/GPy,maaskola/GPy,ptonner/GPy,jameshensman/GPy,PredictiveScienceLab/GPy,buntyke/GPy,beckdaniel/GPy,fivejjs/GPy,AlexGrig/GPy,fivejjs/GPy,avehtari/GPy,ptonner/GPy,buntyke/GPy,TianpeiLuke/GPy,avehtari/GPy,fivejjs/GPy,ptonner/GPy,befelix/GPy,SheffieldML/GPy,maaskola/GPy,gusmaogabriels/GPy,PredictiveScienceLab/GPy,strongh/GPy,SheffieldML/GPy,maaskola/GPy | GPy/testing/inference_tests.py | GPy/testing/inference_tests.py |
"""
The test cases for various inference algorithms
"""
import unittest, itertools
import numpy as np
import GPy
class InferenceXTestCase(unittest.TestCase):
def genData(self):
D1,D2,N = 12,12,50
np.random.seed(1234)
x = np.linspace(0, 4 * np.pi, N)[:, None]
s1 = np.vectorize(lambda x: np.sin(x))
s2 = np.vectorize(lambda x: np.cos(x)**2)
s3 = np.vectorize(lambda x:-np.exp(-np.cos(2 * x)))
sS = np.vectorize(lambda x: np.cos(x))
s1 = s1(x)
s2 = s2(x)
s3 = s3(x)
sS = sS(x)
s1 -= s1.mean(); s1 /= s1.std(0)
s2 -= s2.mean(); s2 /= s2.std(0)
s3 -= s3.mean(); s3 /= s3.std(0)
sS -= sS.mean(); sS /= sS.std(0)
S1 = np.hstack([s1, sS])
S2 = np.hstack([s3, sS])
P1 = np.random.randn(S1.shape[1], D1)
P2 = np.random.randn(S2.shape[1], D2)
Y1 = S1.dot(P1)
Y2 = S2.dot(P2)
Y1 += .01 * np.random.randn(*Y1.shape)
Y2 += .01 * np.random.randn(*Y2.shape)
Y1 -= Y1.mean(0)
Y2 -= Y2.mean(0)
Y1 /= Y1.std(0)
Y2 /= Y2.std(0)
slist = [s1, s2, s3, sS]
slist_names = ["s1", "s2", "s3", "sS"]
Ylist = [Y1, Y2]
return Ylist
def test_inferenceX_BGPLVM(self):
Ys = self.genData()
m = GPy.models.BayesianGPLVM(Ys[0],5,kernel=GPy.kern.Linear(5,ARD=True))
x,mi = m.infer_newX(m.Y, optimize=False)
self.assertTrue(mi.checkgrad())
m.optimize(max_iters=10000)
x,mi = m.infer_newX(m.Y)
self.assertTrue(np.allclose(m.X.mean, mi.X.mean))
self.assertTrue(np.allclose(m.X.variance, mi.X.variance))
if __name__ == "__main__":
unittest.main() | bsd-3-clause | Python | |
883cd30860d881a9d201c088210deb4ee0d6f6d0 | add an example file to show off colorbar types in vispy.plot | jdreaver/vispy,drufat/vispy,ghisvail/vispy,dchilds7/Deysha-Star-Formation,srinathv/vispy,jdreaver/vispy,kkuunnddaannkk/vispy,Eric89GXL/vispy,Eric89GXL/vispy,srinathv/vispy,kkuunnddaannkk/vispy,dchilds7/Deysha-Star-Formation,ghisvail/vispy,QuLogic/vispy,julienr/vispy,QuLogic/vispy,julienr/vispy,inclement/vispy,RebeccaWPerry/vispy,bollu/vispy,julienr/vispy,dchilds7/Deysha-Star-Formation,ghisvail/vispy,RebeccaWPerry/vispy,kkuunnddaannkk/vispy,QuLogic/vispy,inclement/vispy,michaelaye/vispy,Eric89GXL/vispy,RebeccaWPerry/vispy,michaelaye/vispy,jdreaver/vispy,srinathv/vispy,drufat/vispy,inclement/vispy,bollu/vispy,michaelaye/vispy,drufat/vispy,bollu/vispy | examples/basics/plotting/colorbar_types.py | examples/basics/plotting/colorbar_types.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# vispy: gallery 1
"""
Plot different styles of ColorBar
"""
import numpy as np
from vispy import plot as vp
fig = vp.Fig(size=(800, 400), show=False)
plot = fig[0, 0]
centers = [(0, 0), (0, 200), (200, 0), (200, 200)]
dimensions = [(50, 10), (50, 10), (5, 50), (5, 50)]
orientations = ["bottom", "top", "left", "right"]
for i in range(0, len(centers)):
cbar = plot.colorbar(pos=centers[i],
halfdim=dimensions[i],
orientation=orientations[i],
label=orientations[i],
clim=(0, 100),
cmap="winter",
border_width=4,
border_color="#212121")
if __name__ == '__main__':
fig.show(run=True)
| bsd-3-clause | Python | |
e73b31fb03c42873ad553891d3b643c9c9196a62 | add migration file | jamesbeebop/evennia,jamesbeebop/evennia,jamesbeebop/evennia | evennia/typeclasses/migrations/0013_auto_20191015_1922.py | evennia/typeclasses/migrations/0013_auto_20191015_1922.py | # Generated by Django 2.2.6 on 2019-10-15 19:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('typeclasses', '0012_attrs_to_picklev4_may_be_slow'),
]
operations = [
migrations.AlterField(
model_name='tag',
name='db_tagtype',
field=models.CharField(blank=True, db_index=True, help_text='overall type of Tag', max_length=16, null=True, verbose_name='tagtype'),
),
]
| bsd-3-clause | Python | |
36975cb23aa628f4346095c180b627505d1a692e | Add spider for Barnes & Noble | iandees/all-the-places,iandees/all-the-places,iandees/all-the-places | locations/spiders/barnesandnoble.py | locations/spiders/barnesandnoble.py | import scrapy
import re
import json
from urllib.parse import urlencode
from locations.items import GeojsonPointItem
from locations.hours import OpeningHours
DAY_MAPPING = {
'Sun': 'Su',
'Mon': 'Mo',
'Tue': 'Tu',
'Wed': 'We',
'Thu': 'Th',
'Fri': 'Fr',
'Sat': 'Sa'
}
DAY_ORDER = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']
class BarnesAndNobleSpider(scrapy.Spider):
name = "barnesandnoble"
download_delay = 0.5
allowed_domains = [
"stores.barnesandnoble.com",
]
def start_requests(self):
base_url = "https://stores.barnesandnoble.com/stores?"
params = {
"storeFilter": "all",
"v": "1",
"view": "map"
}
with open('./locations/searchable_points/us_centroids_50mile_radius.csv') as points:
next(points)
for point in points:
_, lat, lon = point.strip().split(',')
params.update({"lat": lat, "lng": lon})
yield scrapy.Request(url=base_url + urlencode(params))
def parse_hours(self, hours):
""" Sun-Thu 9-9, Fri&Sat 9-10 """
o = OpeningHours()
ranges = hours.split(',')
for range in ranges:
pattern = r'(.{3})[-&](.{3})\s([\d:]+)-([\d:]+)'
start_day, end_day, start_time, end_time = re.search(pattern, range.strip()).groups()
if ':' not in start_time:
start_time += ":00 AM"
if ':' not in end_time:
end_time += ":00 PM"
for day in DAY_ORDER[DAY_ORDER.index(start_day): DAY_ORDER.index(end_day) + 1]:
o.add_range(day=DAY_MAPPING[day],
open_time=start_time,
close_time=end_time,
time_format='%I:%M %p')
return o.as_opening_hours()
def parse(self, response):
data = response.xpath('//div[@id="mapDiv"]/script/text()').re(r"storesJson\s=\s(.*?);")
if not data:
if "No results found" in response.xpath('//div[@class="content"]/h3/text()').extract_first():
return
stores = json.loads(data[0])
for store in stores:
address = store.get("address2", None)
if not address:
address = store.get("address1") # address1 is usually the venue/mall name
properties = {
'name': store["name"],
'addr_full': address,
'city': store["city"],
'state': store["state"],
'postcode': store["zip"],
'phone': store["phone"],
'ref': store['storeId'],
'website': "https://stores.barnesandnoble.com/store/{}".format(store["storeId"]),
'lat': float(store["location"][1]),
'lon': float(store["location"][0]),
}
try:
opening_hours = self.parse_hours(store['hours'])
except:
opening_hours = None
if opening_hours:
properties["opening_hours"] = opening_hours
yield GeojsonPointItem(**properties)
| mit | Python | |
ff03cab2f3164af68719466df7049ec4cd272c72 | Implement idbe client | Kinnay/NintendoClients | nintendo/idbe.py | nintendo/idbe.py |
from Crypto.Cipher import AES
from nintendo.common.streams import StreamIn
import requests
import hashlib
class IDBEStrings:
def __init__(self, stream):
self.short_name = stream.wchars(64).rstrip("\0")
self.long_name = stream.wchars(128).rstrip("\0")
self.publisher = stream.wchars(64).rstrip("\0")
class IDBEFile:
def __init__(self, data):
self.load(data)
def is_wiiu(self, data):
if len(data) == 0x36D0: return False
if len(data) == 0x12080: return True
raise ValueError("IDBE file has unexpected size")
def load(self, data):
sha = data[:32]
if sha != hashlib.sha256(data[32:]).digest():
raise ValueError("Incorrect SHA256 hash")
wup = self.is_wiiu(data)
endian = ">" if wup else "<"
stream = StreamIn(data[32:], endian)
self.title_id = stream.u64()
self.title_version = stream.u32()
self.unk1 = stream.u32()
self.unk2 = stream.u32()
self.unk3 = stream.read(16)
self.unk4 = stream.u32()
self.unk5 = stream.u64()
self.strings = []
for i in range(16):
self.strings.append(IDBEStrings(stream))
if wup:
self.tga = stream.read(0x1002C)
self.unk6 = stream.u32()
else:
self.unk6 = stream.read(0x1680)
KEYS = [
bytes.fromhex("4ab9a40e146975a84bb1b4f3ecefc47b"),
bytes.fromhex("90a0bb1e0e864ae87d13a6a03d28c9b8"),
bytes.fromhex("ffbb57c14e98ec6975b384fcf40786b5"),
bytes.fromhex("80923799b41f36a6a75fb8b48c95f66f")
]
IV = bytes.fromhex("a46987ae47d82bb4fa8abc0450285fa4")
def get_platform(title_id):
platform = title_id >> 48
if platform == 4:
return "ctr"
elif platform == 5:
return "wup"
raise ValueError("Invalid title id")
def download(title_id, title_version=None):
platform = get_platform(title_id)
base_id = (title_id >> 8) & 0xFF
url = "https://idbe-%s.cdn.nintendo.net/icondata/%02X/%016X" %(platform, base_id, title_id)
if title_version is not None:
url += "-%i" %title_version
url += ".idbe"
r = requests.get(url, verify=False)
r.raise_for_status()
return r.content
def check(data):
if len(data) % 16 != 2: return False
if data[0] != 0: return False
if data[1] >= 4: return False
return True
def decrypt(data):
if not check(data):
raise ValueError("IDBE data is invalid")
index = data[1]
key = KEYS[index]
aes = AES.new(key, AES.MODE_CBC, IV)
return aes.decrypt(data[2:])
| mit | Python | |
cdbddcfff74cffe7ca32de64068a5404ae9eae3f | Add tests for i3bar input processing | tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status | tests/test_i3barinput.py | tests/test_i3barinput.py | # pylint: disable=C0103,C0111
import json
import mock
import unittest
import mocks
from bumblebee.input import I3BarInput, LEFT_MOUSE, RIGHT_MOUSE
class TestI3BarInput(unittest.TestCase):
def setUp(self):
self.input = I3BarInput()
self.input.need_event = True
self._stdin = mock.patch("bumblebee.input.sys.stdin")
self.stdin = self._stdin.start()
self._select = mock.patch("bumblebee.input.select")
self.select = self._select.start()
self.popen = mocks.MockPopen()
self.stdin.fileno.return_value = 1
epoll = mock.Mock()
self.select.epoll.return_value = epoll
epoll.poll.return_value = [(self.stdin.fileno.return_value, 2)]
self.anyModule = mocks.MockModule()
self.anotherModule = mocks.MockModule()
self.anyWidget = mocks.MockWidget("some-widget")
self.anotherWidget = mocks.MockWidget("another-widget")
self.anyData = self.invalidData = "any data"
self.invalidEvent = json.dumps({"name": None, "instance": None, "button": 1})
self.incompleteEvent = json.dumps({"button": 1})
self.anyCommand = "this is a command with arguments"
self._called = 0
def tearDown(self):
self._stdin.stop()
self._select.stop()
self.popen.cleanup()
def callback(self, event):
self._called += 1
def calls(self):
rv = self._called
self._called = 0
return rv
def test_read_event(self):
self.stdin.readline.return_value = self.anyData
self.input.start()
self.input.stop()
self.stdin.readline.assert_any_call()
def test_ignore_invalid_input(self):
for data in [ self.invalidData, self.incompleteEvent, self.invalidEvent ]:
self.stdin.readline.return_value = data
self.input.start()
self.assertEquals(self.input.alive(), True)
self.assertEquals(self.input.stop(), True)
self.stdin.readline.assert_any_call()
def test_global_callback(self):
self.input.register_callback(None, button=LEFT_MOUSE, cmd=self.callback)
mocks.mouseEvent(button=LEFT_MOUSE, inp=self.input, stdin=self.stdin)
self.assertTrue(self.calls() > 0)
def test_remove_global_callback(self):
self.test_global_callback()
self.input.deregister_callbacks(None)
mocks.mouseEvent(button=LEFT_MOUSE, inp=self.input, stdin=self.stdin)
self.assertTrue(self.calls() == 0)
def test_global_callback_wrong_button(self):
self.input.register_callback(None, button=LEFT_MOUSE, cmd=self.callback)
mocks.mouseEvent(button=RIGHT_MOUSE, inp=self.input, stdin=self.stdin)
self.assertTrue(self.calls() == 0)
def test_module_callback(self):
self.input.register_callback(self.anyModule, button=LEFT_MOUSE, cmd=self.callback)
mocks.mouseEvent(button=LEFT_MOUSE, inp=self.input, stdin=self.stdin, module=self.anyModule)
self.assertTrue(self.calls() > 0)
mocks.mouseEvent(button=LEFT_MOUSE, inp=self.input, stdin=self.stdin, module=self.anotherModule)
self.assertTrue(self.calls() == 0)
def test_remove_module_callback(self):
self.test_module_callback()
self.input.deregister_callbacks(self.anyModule)
mocks.mouseEvent(button=LEFT_MOUSE, inp=self.input, stdin=self.stdin, module=self.anyModule)
self.assertTrue(self.calls() == 0)
def test_widget_callback(self):
self.input.register_callback(self.anyWidget, button=LEFT_MOUSE, cmd=self.callback)
mocks.mouseEvent(button=LEFT_MOUSE, inp=self.input, stdin=self.stdin, module=self.anyWidget)
self.assertTrue(self.calls() > 0)
mocks.mouseEvent(button=LEFT_MOUSE, inp=self.input, stdin=self.stdin, module=self.anotherWidget)
self.assertTrue(self.calls() == 0)
def test_widget_cmd_callback(self):
self.input.register_callback(self.anyWidget, button=LEFT_MOUSE, cmd=self.anyCommand)
mocks.mouseEvent(button=LEFT_MOUSE, inp=self.input, stdin=self.stdin, module=self.anyWidget)
self.popen.assert_call(self.anyCommand)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | Python | |
d048d02cde1c4eea536bc6348757389e2ff0f994 | add test to load ophiuchus potential | adrn/ophiuchus,adrn/ophiuchus,adrn/ophiuchus,adrn/ophiuchus | ophiuchus/potential/tests/test_load.py | ophiuchus/potential/tests/test_load.py | # coding: utf-8
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os
import sys
import logging
# Third-party
from astropy import log as logger
import matplotlib.pyplot as pl
import numpy as np
# Project
from ..load import load_potential
def test_load():
p = load_potential('barred_mw')
| mit | Python | |
18d4d5febb9143b764e53fabb3503b94836abbf5 | Create restricted-sum.py | Pouf/CodingCompetition,Pouf/CodingCompetition | CiO/restricted-sum.py | CiO/restricted-sum.py | def checkio(d):
eval('+'.join(map(str,d)))
| mit | Python | |
3063044995a14921fd0da2ebbbd57942bb5ca24d | Add the skeleton and docs | basepi/hubble,basepi/hubble | hubblestack/extmods/modules/safecommand.py | hubblestack/extmods/modules/safecommand.py | # -*- encoding: utf-8 -*-
'''
Safe Command
============
The idea behind this module is to allow an arbitrary command to be executed
safely, with the arguments to the specified binary (optionally) coming from
the fileserver.
For example, you might have some internal license auditing application for
which you need the ability to modify the command line arguments from
hubblestack_data. But what you don't want is the ability to execute arbitrary
commands from hubblestack_data. You also want to avoid command injection.
This module allows for this functionality.
'''
from __future__ import absolute_import
import logging
from salt.exceptions import CommandExecutionError
log = logging.getLogger(__name__)
def run(command, args=None, override_file=None):
'''
This function allows a specific command to be run, with the option to have
command-line arguments for the command to be defined in hubblestack_data.
The command is run with python_shell=False, which will prevent command
injection.
command
The command to be run. Usually just the binary name, but can also
include arguments/flags that need to be inserted to make the command
safe, such as sandbox flags.
args
The rest of the args for the command. Can be a string or a list.
override_file
A fileserver location (``salt://this/is/a/path.txt``). The contents
of the file at this location will be used *instead of* ``args``
'''
pass
| apache-2.0 | Python | |
4a73c4faecf3584e6a18861fec8c7c97b1b72e1c | add initial migration | kojdjak/django-reservations,kojdjak/django-reservations | reservations/migrations/0001_initial.py | reservations/migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.dev20160603044730 on 2016-06-03 11:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Field',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Reservation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='reservations.Field')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Venue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
),
migrations.AddField(
model_name='field',
name='venue',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='reservations.Venue'),
),
]
| mit | Python | |
d1f595068cad18695c048a7620cc16098281bc9e | fix option and working on stand-alone python demo | UM-ARM-Lab/sdf_tools | scripts/3d_sdf_demo_rviz.py | scripts/3d_sdf_demo_rviz.py | """
Demonstrates creating a 3d SDF
displays it in rviz
"""
import ros_numpy
import rospy
import numpy as np
from geometry_msgs.msg import Point
from sdf_tools.utils_3d import compute_sdf_and_gradient
from sensor_msgs.msg import PointCloud2
from visualization_msgs.msg import MarkerArray, Marker
def create_point_cloud():
rng = np.random.RandomState(0)
box1_points = rng.uniform([0.5, 0.5, 0], [0.7, 0.6, 0.5], [100, 3])
box2_points = rng.uniform([0.5, 0.2, 0.25], [0.75, 0.4, 0.5], [100, 3])
return np.concatenate([box1_points, box2_points], axis=0)
def point_cloud_to_voxel_grid(pc: np.ndarray, shape, res, origin_point):
vg = np.zeros(shape, dtype=np.float32)
indices = ((pc - origin_point) / res).astype(np.int64)
rows = indices[:, 0]
cols = indices[:, 1]
channels = indices[:, 2]
vg[rows, cols, channels] = 1.0
return vg
def visualize_point_cloud(pub: rospy.Publisher, pc: np.ndarray):
list_of_tuples = [tuple(point) for point in pc]
dtype = [('x', np.float32), ('y', np.float32), ('z', np.float32)]
np_record_array = np.array(list_of_tuples, dtype=dtype)
msg = ros_numpy.msgify(PointCloud2, np_record_array, frame_id='world', stamp=rospy.Time.now())
pub.publish(msg)
def visualize_sdf(pub, sdf: np.ndarray, shape, res, origin_point):
points = get_grid_points(origin_point, res, shape)
list_of_tuples = [(p[0], p[1], p[2], d) for p, d in zip(points.reshape([-1, 3]), sdf.flatten())]
dtype = [('x', np.float32), ('y', np.float32), ('z', np.float32), ('distance', np.float32)]
np_record_array = np.array(list_of_tuples, dtype=dtype)
msg = ros_numpy.msgify(PointCloud2, np_record_array, frame_id='world', stamp=rospy.Time.now())
pub.publish(msg)
def get_grid_points(origin_point, res, shape):
indices = np.meshgrid(np.arange(shape[0]), np.arange(shape[1]), np.arange(shape[2]))
indices = np.stack(indices, axis=-1)
points = (indices * res) - origin_point
return points
def rviz_arrow(position: np.ndarray, target_position: np.ndarray, label: str = 'arrow', **kwargs):
idx = kwargs.get("idx", 0)
arrow = Marker()
arrow.action = Marker.ADD # create or modify
arrow.type = Marker.ARROW
arrow.header.frame_id = "world"
arrow.header.stamp = rospy.Time.now()
arrow.ns = label
arrow.id = idx
arrow.scale.x = kwargs.get('scale', 1.0) * 0.0025
arrow.scale.y = kwargs.get('scale', 1.0) * 0.004
arrow.scale.z = kwargs.get('scale', 1.0) * 0.006
arrow.pose.orientation.w = 1
start = Point()
start.x = position[0]
start.y = position[1]
start.z = position[2]
end = Point()
end.x = target_position[0]
end.y = target_position[1]
end.z = target_position[2]
arrow.points.append(start)
arrow.points.append(end)
arrow.color.a = 1
return arrow
def plot_arrows_rviz(pub, positions, directions):
msg = MarkerArray()
for i, (position, direction) in enumerate(zip(positions, directions)):
msg.markers.append(rviz_arrow(position, position + direction, frame_id='world', idx=i, label='sdf_grad'))
pub.publish(msg)
def main():
rospy.init_node("sdf_demo_rviz")
pc_pub = rospy.Publisher("points", PointCloud2, queue_size=10)
sdf_pub = rospy.Publisher("sdf", PointCloud2, queue_size=10)
sdf_grad_pub = rospy.Publisher("sdf_grad", MarkerArray, queue_size=10)
rospy.sleep(0.1)
pc = create_point_cloud()
res = 0.04
shape = [25, 20, 15]
origin_point = np.array([0, 0, 0], dtype=np.float32)
vg = point_cloud_to_voxel_grid(pc, shape, res, origin_point)
sdf, sdf_grad = compute_sdf_and_gradient(vg, res, origin_point)
grid_points = get_grid_points(origin_point, res, shape)
subsample = 8
grad_scale = 0.02
for i in range(5):
visualize_point_cloud(pc_pub, pc)
visualize_sdf(sdf_pub, sdf, shape, res, origin_point)
plot_arrows_rviz(sdf_grad_pub, grid_points.reshape([-1, 3])[::subsample], sdf_grad.reshape([-1, 3])[::subsample] * grad_scale)
rospy.sleep(0.1)
if __name__ == '__main__':
main()
| bsd-2-clause | Python | |
4e1062ea02ccd99940da18a887e2092b0a9e5650 | add basic test | vyos/vyos-1x,vyos/vyos-1x,vyos/vyos-1x,vyos/vyos-1x | scripts/cli/test_service_mdns-repeater.py | scripts/cli/test_service_mdns-repeater.py | #!/usr/bin/env python3
#
# Copyright (C) 2020 VyOS maintainers and contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 or later as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import unittest
from psutil import process_iter
from vyos.configsession import ConfigSession
base_path = ['service', 'mdns', 'repeater']
intf_base = ['interfaces', 'dummy']
class TestServiceMDNSrepeater(unittest.TestCase):
def setUp(self):
self.session = ConfigSession(os.getpid())
def tearDown(self):
self.session.delete(base_path)
self.session.delete(intf_base + ['dum10'])
self.session.delete(intf_base + ['dum20'])
self.session.commit()
del self.session
def test_service(self):
# Service required a configured IP address on the interface
self.session.set(intf_base + ['dum10', 'address', '192.0.2.1/30'])
self.session.set(intf_base + ['dum20', 'address', '192.0.2.5/30'])
self.session.set(base_path + ['interface', 'dum10'])
self.session.set(base_path + ['interface', 'dum20'])
self.session.commit()
# Check for running process
self.assertTrue("mdns-repeater" in (p.name() for p in process_iter()))
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | Python | |
abac63b3ac4646af52ae7cc2a3cf90c180db9ff1 | Create views2.py | SNAPPETITE/backend,SNAPPETITE/backend,SNAPPETITE/backend,SNAPPETITE/backend,SNAPPETITE/backend | app/views2.py | app/views2.py | from flask import Flask
from flask import render_template
from flask import request, redirect
from flask import json, jsonify
from flask import make_response
app = Flask(__name__)
global email
#renders the main website using the index template.html template
@app.route('/')
def hello_world():
return render_template('index.html')
#using a post method to sign up
@app.route('/signup', methods = ['POST'])
def signup():
email = request.form['email']
print("most recent email address is '" + email + "'")
return redirect('/')
#Making the JSON with fake data.
@app.route('/JSON')
def dispJSON():
varlist = {'email': 'fake-email@example.com'}
methodlist={'signup': 'type: POST',
'hello_world':'request_type:____',
'dispJSON': 'request_type:____'}
return jsonify(Variables=varlist, methods=methodlist)
if __name__ == '__main__':
app.run(debug=True)
| mit | Python | |
f770299a4de9e18c84fe67c3235b82066a9a98c2 | Create boids_init.py | marios-tsiliakos/AgentsGHPython | src/boids_init.py | src/boids_init.py | def Agents_Init(n):
Boids_population=[]#agent population list
for i in range(len(n)):
#instantiate the boid class
#float(u0)
#float(v0)
pos_init= n[i]
t,u0,v0 = Terrain.ClosestPoint(pos_init)
pos_init = Terrain.PointAt(u0,v0)
#print pos_init
vec_init = random_vel()
#print vec_init
Boids_population.append(Boid(pos_init,vec_init,N_distance,N_angle,Al_value,Se_value,Co_value))
return Boids_population
| mit | Python | |
53d0d5886670ba33a645fd8c82479fb4495d25d1 | Add new migrations (use "" as default for hash) | sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz | website/migrations/0002_auto_20150118_2210.py | website/migrations/0002_auto_20150118_2210.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('website', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='query',
name='cacheable',
field=models.BooleanField(default=True, help_text=b'allows this query result to be cached'),
preserve_default=True,
),
migrations.AddField(
model_name='querycache',
name='hash',
field=models.CharField(default='', max_length=1024),
preserve_default=False,
),
migrations.AlterField(
model_name='querycache',
name='run_time',
field=models.DateTimeField(auto_now=True),
preserve_default=True,
),
]
| mit | Python | |
c8a8944f97b746a42d18aee8718b6ac90e0b883a | Introduce prepare_pub_packages.py helper script for uploading pub packages | chinmaygarde/mojo,afandria/mojo,jianglu/mojo,afandria/mojo,jianglu/mojo,afandria/mojo,chinmaygarde/mojo,afandria/mojo,jianglu/mojo,jianglu/mojo,chinmaygarde/mojo,jianglu/mojo,afandria/mojo,chinmaygarde/mojo,afandria/mojo,chinmaygarde/mojo,chinmaygarde/mojo,chinmaygarde/mojo,jianglu/mojo,afandria/mojo,chinmaygarde/mojo,chinmaygarde/mojo,afandria/mojo,jianglu/mojo,jianglu/mojo | mojo/tools/prepare_pub_packages.py | mojo/tools/prepare_pub_packages.py | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prepare pub packages for upload"""
# NOTE: Requires the following build artifacts:
# *) out/Config/gen/dart-pkg
# *) out/Config/apks/
# By default Config is 'android_Release'
import argparse
import os
import shutil
import tempfile
def remove_empty_dirs(root_dir):
for root, dirs, _ in os.walk(root_dir):
for name in dirs:
fname = os.path.join(root, name)
if not os.listdir(fname):
os.removedirs(fname)
def copy_package(src_dir, dst_dir, ignore=None):
# Remove existing destination directory.
shutil.rmtree(dst_dir, True)
shutil.copytree(src_dir, dst_dir, symlinks=False, ignore=ignore)
def install_mojo_license_and_authors_files(sdk_root, dst_dir):
shutil.copy(os.path.join(sdk_root, 'LICENSE'), dst_dir)
shutil.copy(os.path.join(sdk_root, 'AUTHORS'), dst_dir)
def main():
parser = argparse.ArgumentParser(
description='Prepare pub packages for upload')
parser.add_argument('--config',
type=str,
default='android_Release')
parser.add_argument('--sdk-root',
type=str,
default='.')
parser.add_argument('--packages',
default=['mojo', 'sky', 'mojom'])
parser.add_argument('--out-dir',
default=None)
parser.add_argument('build_dir',
type=str)
args = parser.parse_args()
rel_build_dir = os.path.join(args.build_dir, args.config)
build_dir = os.path.abspath(rel_build_dir)
sdk_dir = os.path.abspath(args.sdk_root)
print('Using SDK in %s' % sdk_dir)
print('Using build in %s' % build_dir)
preparing_sky_package = 'sky' in args.packages
apks_dir = os.path.join(build_dir, 'apks')
sky_apk_filename = 'SkyDemo.apk'
sky_apk = os.path.join(apks_dir, sky_apk_filename)
if preparing_sky_package and (not os.path.exists(sky_apk)):
print('Required file %s not found.' % sky_apk)
return -1
temp_dir = args.out_dir
if temp_dir:
try:
shutil.rmtree(temp_dir)
except OSError as e:
pass
os.makedirs(temp_dir)
else:
# Create a temporary directory to copy files into.
temp_dir = tempfile.mkdtemp(prefix='pub_packages-')
print('Packages ready to be uploaded in %s' % temp_dir)
# Copy packages
dart_pkg_dir = os.path.join(build_dir, 'gen', 'dart-pkg')
for package in args.packages:
print('Preparing package %s' % package)
src_dir = os.path.join(dart_pkg_dir, package)
dst_dir = os.path.join(temp_dir, package)
ignore = None
# Special case 'mojom' package to not copy generated mojom.dart files.
if package == 'mojom':
ignore = shutil.ignore_patterns('*.mojom.dart')
copy_package(src_dir, dst_dir, ignore)
# Special case 'mojom' package to remove empty directories.
if package == 'mojom':
remove_empty_dirs(dst_dir)
install_mojo_license_and_authors_files(sdk_dir, dst_dir)
# Copy Sky apk.
if preparing_sky_package:
prepared_apks_dir = os.path.join(temp_dir, 'sky', 'apks')
os.makedirs(prepared_apks_dir)
shutil.copyfile(sky_apk,
os.path.join(prepared_apks_dir, sky_apk_filename))
if __name__ == '__main__':
main()
| bsd-3-clause | Python | |
a91e4313a7cd7d0089dc9d2cef9c77d3f928c1bf | add tests for naff | adrn/SuperFreq | tests/test_naff.py | tests/test_naff.py | # coding: utf-8
""" Test action-angle stuff """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os
import sys
import logging
# Third-party
from astropy import log as logger
from astropy.utils.console import color_print
import astropy.units as u
import matplotlib.pyplot as plt
import numpy as np
import pytest
from scipy.signal import argrelmax
# Project
from ..naff import NAFF
from ...integrate import DOPRI853Integrator
from ... import potential as gp
from ... import dynamics as gd
logger.setLevel(logging.DEBUG)
plot_path = "plots/tests/dynamics/naff"
if not os.path.exists(plot_path):
os.makedirs(plot_path)
print()
color_print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "yellow")
color_print("To view plots:", "green")
print(" open {}".format(plot_path))
color_print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "yellow")
def estimate_axisym_freqs(t, w):
R = np.sqrt(w[:,0,0]**2 + w[:,0,1]**2)
phi = np.arctan2(w[:,0,1], w[:,0,0])
z = w[:,0,2]
ix = argrelmax(R)[0]
fR = np.mean(2*np.pi / (t[ix[1:]] - t[ix[:-1]]))
ix = argrelmax(phi)[0]
fphi = np.mean(2*np.pi / (t[ix[1:]] - t[ix[:-1]]))
ix = argrelmax(z)[0]
fz = np.mean(2*np.pi / (t[ix[1:]] - t[ix[:-1]]))
return fR, fphi, fz
class TestNAFF(object):
def setup(self):
name = self.__class__.__name__
logger.debug("-"*79)
logger.debug(name)
# plot and save orbit
if self.w.shape[-1] == 6:
fig = gd.plot_orbits(self.w, marker='.', linestyle='none', alpha=0.2)
fig.savefig(os.path.join(plot_path,"orbit_{}.png".format(name)))
# plot energy conservation
E = self.potential.total_energy(self.w[:,0,:3],self.w[:,0,3:])
plt.semilogy(self.t[1:], np.abs(E[1:]-E[:-1]), marker=None)
def test_naff(self):
naff = NAFF(self.t)
f,d,ixes = naff.find_fundamental_frequencies(self.w, nvec=15)
# -----------------------------------------------------------------------
# Hand-constructed time-series
#
class TestHandConstructed(TestNAFF):
def setup(self):
Ts = np.array([1., 1.2, 1.105])
As = (1., 0.5, 0.2)
self.t = np.linspace(0,100,50000)
self.true_freqs = (2*np.pi) / Ts
f = np.sum([A*(np.cos(2*np.pi*self.t/T) + 1j*np.sin(2*np.pi*self.t/T)) for T,A in zip(Ts,As)], axis=0)
self.w = np.vstack((f.real, f.imag)).T
super(TestHandConstructed,self).setup()
# -----------------------------------------------------------------------
# Harmonic Oscillator
#
class TestHarmonicOscillator(TestNAFF):
def setup(self):
Ts = np.array([1., 1.2, 1.105])
self.true_freqs = 2*np.pi/Ts
t2 = 100
nsteps = 50000
dt = t2/float(nsteps)
self.potential = gp.HarmonicOscillatorPotential(self.true_freqs)
self.t,self.w = self.potential.integrate_orbit([1,0,0.2,0.,0.1,-0.8],
dt=dt, nsteps=nsteps,
Integrator=DOPRI853Integrator)
super(TestHarmonicOscillator,self).setup()
| mit | Python | |
7e8f110610c6c4d02b042a1f47a7385c0d18c3bb | Create LeetCode-ReverseBits.py | lingcheng99/Algorithm | LeetCode-ReverseBits.py | LeetCode-ReverseBits.py | """
Reverse bits of a given 32 bits unsigned integer.
For example, given input 43261596 (represented in binary as 00000010100101000001111010011100),
return 964176192 (represented in binary as 00111001011110000010100101000000).
"""
class Solution(object):
def reverseBits(self, n):
"""
:type n: int
:rtype: int
"""
bin32=lambda x:bin(x)[2:].zfill(32)
reverse=list(bin32(n))[::-1]
return int(''.join(reverse),base=2)
| mit | Python | |
322a7907c6dbd6f742b19161869d46a13fb691d8 | convert pkl to raw | mjirik/lisa,mjirik/lisa | src/pkl_to_raw.py | src/pkl_to_raw.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module is used for converting data from pkl format to raw.
"""
import argparse
import misc
import qmisc
def main():
parser = argparse.ArgumentParser(description=__doc__) # 'Simple VTK Viewer')
parser.add_argument('-i','--inputfile', default=None,
help='File as .pkl')
parser.add_argument('-o','--outputfile', default=None,
help='File as raw')
args = parser.parse_args()
data = misc.obj_from_file(args.inputfile, filetype = 'pickle')
data3d_uncrop = qmisc.uncrop(data['data3d'], data['crinfo'], data['orig_shape'])
import ipdb; ipdb.set_trace() # BREAKPOINT
import SimpleITK as sitk
sitk_img = sitk.GetImageFromArray(data3d_uncrop, isVector=True)
sitk.WriteImage(sitk_img, args.outputfile)
if __name__ == "__main__":
main()
| bsd-3-clause | Python | |
a76a9bf10450eb7f5f69eb2264f75c0cd3a4d283 | Add example | votti/PyLaTeX,sebastianhaas/PyLaTeX,ovaskevich/PyLaTeX,JelteF/PyLaTeX,JelteF/PyLaTeX,votti/PyLaTeX,bjodah/PyLaTeX,ovaskevich/PyLaTeX,jendas1/PyLaTeX,sebastianhaas/PyLaTeX,bjodah/PyLaTeX,jendas1/PyLaTeX | examples/plt.py | examples/plt.py | #!/usr/bin/python
import matplotlib.pyplot as pyplot
from pylatex import Document, Section, Plt
if __name__ == '__main__':
x = [0, 1, 2, 3, 4, 5, 6]
y = [15, 2, 7, 1, 5, 6, 9]
pyplot.plot(x, y)
doc = Document('matplolib_pdf')
doc.append('Introduction.')
with doc.create(Section('I am a section')):
doc.append('Take a look at this beautiful plot:')
with doc.create(Plt(position='htbp')) as plot:
plot.add_plot(pyplot)
plot.add_caption('I am a caption.')
doc.append('Created using matplotlib.')
doc.append('Conclusion.')
doc.generate_pdf()
| mit | Python | |
a57157352e40439ba4155eaa4a62ba7d62c793dc | Add ielex/lexicon/migrations/0090_issue_236.py | lingdb/CoBL-public,lingdb/CoBL-public,lingdb/CoBL-public,lingdb/CoBL-public | ielex/lexicon/migrations/0090_issue_236.py | ielex/lexicon/migrations/0090_issue_236.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import Max
from datetime import datetime
def forwards_func(apps, schema_editor):
'''
Computes statistics for https://github.com/lingdb/CoBL/issues/236
'''
# Models to work with:
LanguageList = apps.get_model('lexicon', 'LanguageList')
MeaningList = apps.get_model('lexicon', 'MeaningList')
Lexeme = apps.get_model('lexicon', 'Lexeme')
CognateClass = apps.get_model('lexicon', 'CognateClass')
CognateJudgement = apps.get_model('lexicon', 'CognateJudgement')
LanguageClade = apps.get_model('lexicon', 'LanguageClade')
Clade = apps.get_model('lexicon', 'Clade')
# Data to work with:
current = LanguageList.objects.get(name='Current')
jena200 = MeaningList.objects.get(name='Jena200')
languageIds = current.languages.values_list('id', flat=True)
meaningIds = jena200.meanings.values_list('id', flat=True)
lexemeIds = Lexeme.objects.filter(
language_id__in=languageIds,
meaning_id__in=meaningIds).values_list('id', flat=True)
cognateClassIds = CognateJudgement.objects.filter(
lexeme_id__in=lexemeIds).values_list(
'cognate_class_id', flat=True)
cognateClasses = CognateClass.objects.filter(
id__in=cognateClassIds,
root_form='').all() # Only without root_form is wanted.
def compute(lowerBranchBound):
# The computation we want to perform twice
for cognateClass in cognateClasses:
lexemeIds = CognateJudgement.objects.filter(
cognate_class_id=cognateClass.id).values_list(
'lexeme_id', flat=True)
# Need to investigate lexemes:
cladeNamesSet = set()
for lexeme in Lexeme.objects.filter(
id__in=lexemeIds,
meaning_id__in=meaningIds).all():
# Need to investigate clades:
clades = Clade.objects.filter(
id__in=LanguageClade.objects.filter(
language_id=lexeme.language_id).values_list(
'clade_id', flat=True)).all()
cladeNamesSet.add(', '.join([c.cladeName for c in clades]))
# Yield interesting clades:
if len(cladeNamesSet) > lowerBranchBound:
bNames = ', '.join('"%s"' % n for n in cladeNamesSet)
yield("%s: %s" % (cognateClass.id, bNames))
yield('') # EOG
print('Task 1')
wanted = list(compute(2))
print('Number of classes: %s' % len(wanted))
for w in wanted:
print(w)
print('Task 2')
wanted = list(compute(1))
print('Number of classes: %s' % len(wanted))
for w in wanted:
print(w)
print(1/0) # Break migration.
def reverse_func(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [('lexicon', '0089_fix_citation')]
operations = [
migrations.RunPython(forwards_func, reverse_func),
]
| bsd-2-clause | Python | |
d6e12d64341fbdc4fc0fdfc9792de9310ac6d2ff | Add "dsl" library. | PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge | src/puzzle/dsl.py | src/puzzle/dsl.py | """This module is automatically imported into jupyter sessions."""
from puzzle.puzzlepedia import puzzlepedia
solve = puzzlepedia.parse
parse = puzzlepedia.parse
| mit | Python | |
05f9717d4f7ef1f2a4bfeec382cc30d311b1fd21 | Create cat_dog.py | dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey | Python/CodingBat/cat_dog.py | Python/CodingBat/cat_dog.py | # http://codingbat.com/prob/p164876
def cat_dog(str):
cat_count = 0
dog_count = 0
for i in range(len(str)-2):
if str[i:i+3] == "cat":
cat_count += 1
elif str[i:i+3] == "dog":
dog_count += 1
return (cat_count == dog_count)
| mit | Python | |
8422fc90b804e250c3e918f028271cdb0b95d076 | test case for operation access | mission-liao/pyopenapi | pyopenapi/tests/v2_0/test_op_access.py | pyopenapi/tests/v2_0/test_op_access.py | from pyopenapi import SwaggerApp, utils
from ..utils import get_test_data_folder
import unittest
def _check(u, op):
u.assertEqual(op.operationId, 'addPet')
class OperationAccessTestCase(unittest.TestCase):
""" test for methods to access Operation """
@classmethod
def setUpClass(kls):
kls.app = SwaggerApp._create_(get_test_data_folder(version='2.0', which='wordnik'))
def test_resolve(self):
"""
"""
_check(self, self.app.resolve(utils.jp_compose(['#', 'paths', '/pet', 'post'])))
def test_cascade_resolve(self):
"""
"""
path = self.app.resolve(utils.jp_compose(['#', 'paths', '/pet']))
_check(self, path.resolve('post'))
_check(self, path.post)
def test_tag_operationId(self):
"""
"""
_check(self, self.app.op['pet', 'addPet'])
_check(self, self.app.op['addPet'])
| mit | Python | |
fb2a8c9b30360f185f3d56504222efc43c6cc1a0 | Add constants.py module | jamesgk/ufo2ft,googlei18n/ufo2ft,moyogo/ufo2ft,googlefonts/ufo2ft,jamesgk/ufo2fdk | Lib/ufo2ft/constants.py | Lib/ufo2ft/constants.py | from __future__ import absolute_import, unicode_literals
UFO2FT_PREFIX = 'com.github.googlei18n.ufo2ft.'
GLYPHS_PREFIX = 'com.schriftgestaltung.'
USE_PRODUCTION_NAMES = UFO2FT_PREFIX + "useProductionNames"
GLYPHS_DONT_USE_PRODUCTION_NAMES = GLYPHS_PREFIX + "Don't use Production Names"
| mit | Python | |
63f6b3e8b9febd298a8c8b3b0db467c47b7ec5d1 | Create valid_phone_number.py | Kunalpod/codewars,Kunalpod/codewars | valid_phone_number.py | valid_phone_number.py | #Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Valid Phone Number
#Problem level: 6 kyu
import re
def validPhoneNumber(phoneNumber):
return bool(re.match('[(][0-9][0-9][0-9][)] [0-9][0-9][0-9]-[0-9][0-9][0-9][0-9]', phoneNumber)) and len(phoneNumber)==14
| mit | Python | |
37b5531e2cc969e1ee73a46bf372d89871f922a7 | Add array of prime number generator code | everyevery/programming_study,everyevery/algorithm_code,everyevery/algorithm_code,everyevery/programming_study,everyevery/algorithm_code,everyevery/algorithm_code,everyevery/algorithm_code,everyevery/programming_study,everyevery/algorithm_code,everyevery/programming_study,everyevery/algorithm_code,everyevery/programming_study,everyevery/programming_study,everyevery/programming_study,everyevery/algorithm_code,everyevery/programming_study,everyevery/algorithm_code | tools/gen_prime.py | tools/gen_prime.py | import argparse
import sys
# Sieve of Eratosthenes
# Code by David Eppstein, UC Irvine, 28 Feb 2002
# http://code.activestate.com/recipes/117119/
def gen_primes():
""" Generate an infinite sequence of prime numbers.
"""
# Maps composites to primes witnessing their compositeness.
# This is memory efficient, as the sieve is not "run forward"
# indefinitely, but only as long as required by the current
# number being tested.
#
D = {}
# The running integer that's checked for primeness
q = 2
while True:
if q not in D:
# q is a new prime.
# Yield it and mark its first multiple that isn't
# already marked in previous iterations
#
yield q
D[q * q] = [q]
else:
# q is composite. D[q] is the list of primes that
# divide it. Since we've reached q, we no longer
# need it in the map, but we'll mark the next
# multiples of its witnesses to prepare for larger
# numbers
#
for p in D[q]:
D.setdefault(p + q, []).append(p)
del D[q]
q += 1
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate prime number array")
parser.add_argument('n', metavar='N', nargs=1, type=int, help="Limit value")
group = parser.add_mutually_exclusive_group()
group.add_argument('--count', action='store_const', const=True,
default=False, help='limit number of generated prime number (default)')
group.add_argument('--value', action='store_const', const=True,
default=False, help='limit max value of generated prime number')
args = parser.parse_args()
if args.value:
limit = args.n[0]
else:
limit = args.n[0]-2
prime = iter(gen_primes())
sys.stdout.write("{"+str(prime.next()))
for idx, val in enumerate(prime):
if args.value and limit < val:
break
elif limit < idx:
break
sys.stdout.write(", "+str(val))
print("}")
| mit | Python | |
0dfdf0c10be48c220b8c2fd7c18b3a04dbf639ba | add debugging slash.py script | ros/ros,ros/ros,ros/ros | tools/rosbag/scripts/slash.py | tools/rosbag/scripts/slash.py | #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
PKG = 'rosbag'
import roslib; roslib.load_manifest(PKG)
import optparse
import os
import shutil
import sys
import rosbag
from rosbag import bag
if __name__ == '__main__':
parser = optparse.OptionParser(usage='slash.py [options] BAGFILE1 [BAGFILE2 ...]',
description='Slash one or more bag files.')
(options, args) = parser.parse_args(sys.argv[1:])
if len(args) < 1:
parser.error('You must specify at least one bag file.')
for filename in args:
b = rosbag.Bag(filename)
index_pos = b._index_data_pos
b.close()
(root, ext) = os.path.splitext(filename)
slash_filename = '%s.slash%s' % (root, ext)
shutil.copy(filename, slash_filename)
f = open(slash_filename, 'r+b')
f.seek(b._file_header_pos)
header = {
'op': bag._pack_uint8(bag._OP_FILE_HEADER),
'index_pos': bag._pack_uint64(0),
'conn_count': bag._pack_uint32(0),
'chunk_count': bag._pack_uint32(0)
}
bag._write_record(f, header, padded_size=bag._FILE_HEADER_LENGTH)
f.truncate(index_pos / 100)
f.close()
print '%s slashed.' % slash_filename
(root, ext) = os.path.splitext(filename)
reindex_filename = '%s.reindex%s' % (root, ext)
shutil.copy(slash_filename, reindex_filename)
bv = rosbag.Bag(slash_filename, allow_unindexed=True)
version = bv.version
bv.close()
if version == 102:
b = rosbag.Bag(slash_filename, allow_unindexed=True)
reindexed = rosbag.Bag(reindex_filename, 'w')
b.reindex()
try:
for (topic, msg, t) in b.read_messages():
print topic, t
reindexed.write(topic, msg, t)
except:
pass
reindexed.close()
b.close()
else:
try:
b = rosbag.Bag(reindex_filename, 'a', allow_unindexed=True)
except Exception, ex:
print str(ex)
try:
b.reindex()
except:
pass
b.close()
| bsd-3-clause | Python | |
5f58f4cec89570a1b884847aad84bca7a88d5a29 | Create ballDetect.py | ilyajob05/ballDetect | ballDetect.py | ballDetect.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import cv2
import numpy as np
from math import *
import time
cap = cv2.VideoCapture('video.3gp')
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
writer = cv2.VideoWriter('output.avi', fourcc, 30.0, (1280, 720))
# ball tracking class
class CBall:
ballPosition = {'x': 0, 'y': 0, 'r': 0}
distance = 0.0
countFit = 0
distanceThreshold = 15
repeatThreshold = 3
def setPos(self, x, y, r):
newDistance = sqrt(pow(x, 2) + pow(y, 2))
if abs(self.distance - newDistance) < self.distanceThreshold:
self.countFit += 1
print 'addFit'
else:
self.countFit = 0
print 'resetFit'
# update position
self.distance = newDistance
self.ballPosition['x'], self.ballPosition['y'], self.ballPosition['r'] = x, y, r
# is over limit
if self.countFit > self.repeatThreshold:
return True
else:
return False
ball = CBall()
tracker = None
while (True):
ret, frame = cap.read()
if ret:
startTime = time.time()
scale = 0.5
inputImg = cv2.resize(frame, (0, 0), fx=scale, fy=scale)
HSVImg = cv2.cvtColor(inputImg, cv2.COLOR_RGB2HSV)
thresholdImg = cv2.inRange(HSVImg[:, :, 0], 104, 112)
blurImg = cv2.blur(thresholdImg, (7, 7))
circles = cv2.HoughCircles(blurImg, cv2.HOUGH_GRADIENT, 2, 20, param1=400, param2=90)
blurImg = cv2.cvtColor(blurImg, cv2.COLOR_GRAY2BGR)
# draw circles
if circles is not None:
for (x, y, r) in circles[0, :]:
if ball.setPos(x, y, r):
# update tracker
del tracker
tracker = cv2.TrackerKCF_create()
ok = tracker.init(inputImg, (int(x - r), int(y - r), int(r * 2), int(r * 2)))
cv2.rectangle(HSVImg, (int(x - r), int(y - r)), (int(x + r), int(y + r)),
(200, 200, 200), 5, 1)
cv2.circle(blurImg, (x, y), r, (200, 200, 200), 5)
# if not ballFit:
if tracker:
ok, roi = tracker.update(inputImg)
tracker.clear()
if ok:
p1 = (int(roi[0]), int(roi[1]))
p2 = (int(roi[0]) + int(roi[2]), int(roi[1]) + int(roi[3]))
cv2.rectangle(inputImg, p1, p2, (0, 200, 200), 5, 1)
# hsvv = HSVImg[:, :, 0]
h, s, v = cv2.split(HSVImg)
hsvvn = cv2.cvtColor(h, cv2.COLOR_GRAY2BGR)
imccn1 = np.concatenate((inputImg, HSVImg), axis=1)
imccn2 = np.concatenate((hsvvn, blurImg), axis=1)
imccn3 = np.concatenate((imccn1, imccn2), axis=0)
cv2.imshow('imgOut', imccn3)
writer.write(imccn3)
cv2.waitKey(1)
endTime = time.time()
print 'time:', endTime - startTime
else:
break
cap.release()
writer.release()
| mit | Python | |
fbe270c39a355a62630244aa19c16c74368275f1 | add BackupJSON, a BaseJSONMonitor to read the output of a backup script | Audish/sd-agent-plugins | BackupJSON.py | BackupJSON.py | #! /usr/bin/env python
# This is a Server Density plugin which reports what it founds in a JSON file created by a backup script.
# Author: yaniv.aknin@audish.com
import sys
assert sys.version_info[0] == 2 and sys.version_info[1] >= 6 or sys.version_info[0] > 2, 'needs Python >= v2.6'
from datetime import timedelta
from base import BaseJSONMonitor, REQUIRED
class BackupJSON(BaseJSONMonitor):
confValues = (
('backup_json', 'filename', REQUIRED),
)
defaultValues = (
('mediaBackedUpFiles', 0),
('mediaBackedUpBytes', 0),
('databaseDumpDuration', 0),
('databaseDumpSize', 0),
('totalDuration', -1),
)
if __name__ == '__main__':
try:
import argparse
except ImportError:
print('you could run this script to test it, if you had argparse installed')
sys.exit(1)
import logging
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('-a', '--alt-maximum-age', default=60*60*2, type=int) # 60*60*2 == 2 hours
options = parser.parse_args(sys.argv[1:])
logging.basicConfig()
BackupJSON.maximumAge = timedelta(seconds=options.alt_maximum_age)
plugin = BackupJSON(None, logging, dict(Main=dict(backup_json=options.filename)))
print(plugin.run())
| mit | Python | |
27a1632a768028477f54450ccf7aaa7fba8bc428 | Initialize P09_countdown | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/AutomateTheBoringStuffWithPython/Chapter15/P09_countdown.py | books/AutomateTheBoringStuffWithPython/Chapter15/P09_countdown.py | #! python3
# P09_countdown.py - A simple countdown script.
#
# Note:
# - sound file can be downloaded from http://nostarch.com/automatestuff/
import time, subprocess
timeLeft = 60
while timeLeft > 0:
print(timeLeft, end='')
time.sleep(1)
timeLeft = timeLeft - 1
# At the end of the countdown, play a sound file.
subprocess.Popen(['see', 'alarm.wav'])
| mit | Python | |
47775471e2e8f0d88cb79d362114b5f49128a492 | Add example of factory usage | duboviy/zca | factory/main.py | factory/main.py | import random
def get_digit():
return random.randint(1, 9)
from zope.component.factory import Factory
factory = Factory(get_digit, 'random_digit', 'Gives a random digit')
from zope.component import getGlobalSiteManager
from zope.component.interfaces import IFactory
gsm = getGlobalSiteManager()
gsm.registerUtility(factory, IFactory, 'random_digit')
from zope.component import getUtility
assert 1 <= getUtility(IFactory, 'random_digit')() <= 9 # creates digit
from zope.component import createObject
assert 1 <= createObject('random_digit') <= 9 # also creates a digit
| mit | Python | |
49da38f9c3514c8d886e1b6f6b42a29fccaf9e7d | Add split_mdf.py, used for exploring | ajd4096/inject_gba | tools/split_mdf.py | tools/split_mdf.py | #!/bin/env python3
# vim: set fileencoding=latin-1
import sys
import binascii
import getopt
import struct
import zlib
def write_mdf_section(filename, data):
# Write out the raw MDF section
open(filename, 'wb').write(data)
def split_mdf_file(filename):
# Read in the whole file (zelda alldata.bin is 49M)
data = bytes(open(filename, 'rb').read())
section = 0
page_size = 1024
start_offset = 0
next_offset = start_offset + page_size
while next_offset < len(data):
magic = struct.unpack('>4s', data[next_offset: next_offset +4])[0]
if (magic == b'MDF\0' or magic == b'mdf\0'):
# Write out the current section
print("Section", section, "Offset", start_offset, "Length", next_offset - start_offset)
write_mdf_section(filename + '.%4.4d' % section, data[start_offset : next_offset])
# Increment the file section #
section += 1
# Move our start pointer to the start of the next MDF section
start_offset = next_offset
# Check the next page
next_offset += page_size
else:
print("Section", section, "Offset", start_offset, "Length", next_offset - start_offset)
write_mdf_section(filename + '.%4.4d' % section, data[start_offset : next_offset])
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "h", ["help"])
except getopt.GetoptError as err:
print(str(err))
sys.exit(2)
for o, a in opts:
if o in ("-h", "--help"):
print("""
Usage: split_mdf.py [-h] [path/to/alldata.bin]
Splits alldata.bin into each MDF section, written to alldata.bin.0000 etc
""")
sys.exit(2)
else:
assert False, "unhandled option"
for filename in args:
split_mdf_file(filename)
if __name__ == "__main__":
main()
| bsd-2-clause | Python | |
091c82579d52cba080c0e2f4d5852884815551a1 | Create RechercheTD1.py | trinhtuananh/AlgoBioinfo | RechercheTexte/RechercheTD1.py | RechercheTexte/RechercheTD1.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def naif(motif,texte):
reponse=[]
for i in range (len(texte)):
if (motif==texte[i:len(motif)+i]):
reponse.append(i)
def RK(motif,texte):
Lmotif=len(motif)
Ltexte=len(texte)
motifConverti=conversion(motif)
motifConverti=hash(motifConverti,10)
for i in range (Ltexte-Lmotif+1):
texteConverti=conversion(texte[i:Lmotif+i])
texteConverti=hash(texteConverti,10)
if texteConverti==motifConverti:
if texte[i:Lmotif+i]==motif:
print "Gagne à la position ",i
def hash(texte, base):
#for i in range (len(texte)):
# texte[i]=texte[i]%base
#return texte
texte=int(texte)
return texte%base
def conversion(texte):
texteConverti=[]
for i in range (len(texte)):
if texte[i]=='a':
texteConverti.append('1')
elif texte[i]=='t':
texteConverti.append('2')
elif texte[i]=='c':
texteConverti.append('3')
elif texte[i]=='g':
texteConverti.append('4')
else:
texteConverti.append('5') #Erreur
resultat=''
for i in range (len(texte)):
resultat=resultat+texteConverti[i]
return resultat
def KMP(motif,texte):
pi=fonctionPrefixe(motif)
q=0
for i in range (len(texte)):
while (q>0 and motif[q+1]!=texte[i]):
q=pi[q]
if motif[q]==texte[i]:
q=q+1
if q==len(motif):
print ("Le motif apparait en position", i-len(motif)+1)
q=pi[q-1]
def fonctionPrefixe(motif):
pi=[]
for i in range (len(motif)):
pi.append(0)
k=0
for i in range (2,len(motif)):
while (k>0 and motif[k+1]!=motif[q]):
k=pi[k]
if (motif[k+1]==motif[q]):
k=k+1
pi[q]=k
return pi
KMP ("a","atca");
| apache-2.0 | Python | |
b9d167cf1eba2d55ab7710e78f38c3fa010d21ef | Change init to add inverse strategy | marcharper/Axelrod,ranjinidas/Axelrod,ranjinidas/Axelrod,marcharper/Axelrod | axelrod/strategies/__init__.py | axelrod/strategies/__init__.py | from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
from inverse import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
Inverse
]
| from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
]
| mit | Python |
a15eb286e3235ff627bf9d18a4b4b64845ba00c1 | Create Days_Alive.py | kkkkkbruce/MyMiscPiCode | Days_Alive.py | Days_Alive.py | #-------------------------------------------------------------------------------
# Name: Days_Alive
# Purpose:
#
# Author: KKKKKBruce
#
# Created: 13/02/2013
# Copyright: (c) Kevin 2013
# Licence: The MIT License (MIT)
#-------------------------------------------------------------------------------
import sys
from datetime import date
def days_alive(birthdate):
today = date.today()
delta = today - birthdate
return delta.days
def main():
if len(sys.argv) == 4:
birthyear = int(sys.argv[1])
birthmonth = int(sys.argv[2])
birthday = int(sys.argv[3])
else:
print('LETS FIND OUT HOW MANY DAYS OLD YOU ARE')
birthyear = input("Enter your birthday year (use 4 digits):")
birthmonth = input("Enter your birthday month (1-12):")
birthday = input("Enter your birthday day (1-31):")
print('\n You are {:,} days old today. How are you going to spend today? \n'.format(days_alive(date(birthyear,birthmonth,birthday))))
pass
if __name__ == '__main__':
main()
| mit | Python | |
00f0ddced329724ce1b6bf49e86520ce47eaa10a | Add migration for Registry | lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin | src/ansible/migrations/0004_registry.py | src/ansible/migrations/0004_registry.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-15 15:28
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ansible', '0003_auto_20170510_2007'),
]
operations = [
migrations.CreateModel(
name='Registry',
fields=[
('project', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='ansible.Project')),
('name', models.CharField(max_length=200)),
],
),
]
| bsd-3-clause | Python | |
61c3b4f6a0afb1743faa6b3174f0e15ef8d8c043 | add unit tests for the relocate_urls function | camptocamp/c2c.recipe.cssmin,sbrunner/c2c.cssmin | tests/c2c_recipe_cssmin_tests.py | tests/c2c_recipe_cssmin_tests.py | import unittest
from c2c.recipe.cssmin.buildout import relocate_urls
class TestCssminRecipe(unittest.TestCase):
def test_relocate_urls(self):
# Same directory:
self.assertEqual(relocate_urls("url( 'foo.png')", "/a/b/src.css", "/a/b/dest.css"), "url('foo.png')")
# Same directory, relative path:
self.assertEqual(relocate_urls("url( 'foo.png' )", "b/src.css", "b/dest.css"), "url('foo.png')")
# Different directory:
self.assertEqual(relocate_urls("url( 'foo.png' )", "/a/b/src.css", "/a/c/dest.css"), "url('../b/foo.png')")
# Only handle url rules:
self.assertEqual(relocate_urls("foo('foo.png' )", "/a/b/src.css", "/a/c/dest.css"), "foo('foo.png' )")
if __name__ == '__main__':
unittest.main()
| mit | Python | |
4990022a7ee0ee3a99414984f7bc54c1f4fee2f3 | Add Python classification template | a-holm/MachinelearningAlgorithms,a-holm/MachinelearningAlgorithms | Classification/classificationTemplate.py | Classification/classificationTemplate.py | # -*- coding: utf-8 -*-
"""Classification template for machine learning."""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import confusion_matrix
# importing the dataset
dataset = pd.read_csv('Social_Network_Ads.csv')
features = dataset.iloc[:, [2, 3]].values # Country, Age, Salary
labels = dataset.iloc[:, 4].values # Purchased
# Splitting the Dataset into a Training set and a Test set
feature_train, feature_test, label_train, label_test = train_test_split(
features, labels, test_size=0.25)
# Feature scaling, normalize scale is important. Especially on algorithms
# involving euclidian distance. Two main feature scaling formulas are:
# Standardisation: x_stand = (x-mean(x))/(standard_deviation(x))
# Normalisation: x_norm = (x-min(x))/(max(x)-min(x))
sc_feature = StandardScaler()
feature_train = sc_feature.fit_transform(feature_train)
feature_test = sc_feature.transform(feature_test)
# Fitting the Classification model to the dataset
classifier = None # Create
# Predicting the results of the Test set
y_pred = classifier.predict(feature_test)
# Creating the Confusion Matrix
cm = confusion_matrix(label_test, y_pred)
# Visualize the Training set results
"""X_set, y_set = feature_train, label_train
X1, X2 = np.meshgrid(
np.arange(
start=X_set[:, 0].min() - 1, stop=X_set[:, 0].max() + 1, step=0.01
),
np.arange(
start=X_set[:, 1].min() - 1, stop=X_set[:, 1].max() + 1, step=0.01
)
)
plt.contourf(
X1, X2, classifier.predict(
np.array([X1.ravel(), X2.ravel()]).T
).reshape(X1.shape), alpha=0.75, cmap=ListedColormap(('red', 'blue')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c=ListedColormap(('red', 'blue'))(i), label=j)
plt.title('Classification model (Training set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()"""
# Visualize the Test set results
X_set, y_set = feature_test, label_test
X1, X2 = np.meshgrid(
np.arange(
start=X_set[:, 0].min() - 1, stop=X_set[:, 0].max() + 1, step=0.01
),
np.arange(
start=X_set[:, 1].min() - 1, stop=X_set[:, 1].max() + 1, step=0.01
)
)
plt.contourf(
X1, X2, classifier.predict(
np.array([X1.ravel(), X2.ravel()]).T
).reshape(X1.shape), alpha=0.75, cmap=ListedColormap(('red', 'blue')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c=ListedColormap(('red', 'blue'))(i), label=j)
plt.title('Classification model (Testing set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
| mit | Python | |
de5cce3d06f20d21fde766bcb85d128d89df7bf7 | add new example for traffic matrix | pupeng/hone,pupeng/hone,bolshoibooze/hone,bolshoibooze/hone,pupeng/hone,bolshoibooze/hone,pupeng/hone,bolshoibooze/hone | Controller/exp_calculateTrafficMatrix.py | Controller/exp_calculateTrafficMatrix.py | # Copyright (c) 2011-2013 Peng Sun. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the COPYRIGHT file.
# HONE application
# calculate traffic matrix
from hone_lib import *
import time
def TrafficMatrixQuery():
return (Select(['srcIP','srcPort','dstIP','dstPort','BytesSentOut']) *
From('HostConnection') *
Groupby(['srcIP','dstIP']) *
Every(5000))
def PrintHelper(x):
print time.time()
print x
print '*******************\n'
def main():
stream = TrafficMatrixQuery() >> Print(PrintHelper)
return stream | bsd-3-clause | Python | |
ed2af274741d2227c9e6c3a95dee3672e3acb4cf | Add the disabled two views test. | probcomp/cgpm,probcomp/cgpm | tests/disabled_test_two_views.py | tests/disabled_test_two_views.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
from cgpm.crosscat.engine import Engine
from cgpm.utils import general as gu
from cgpm.utils import plots as pu
from cgpm.utils import test as tu
D, Zv, Zc = tu.gen_data_table(
n_rows=150,
view_weights=None,
cluster_weights=[[.2,.2,.2,.4], [.3,.2,.5],],
cctypes=['normal']*6,
distargs=[None]*6,
separation=[0.95]*6,
view_partition=[0,0,0,1,1,1],
rng=gu.gen_rng(12))
@pytest.mark.parametrize('lovecat', [True, False])
def test_two_views_row_partition__ci_(lovecat):
engine = Engine(
D.T, cctypes=['normal']*len(D),
Zv={0:0, 1:0, 2:0, 3:1, 4:1, 5:1},
rng=gu.gen_rng(12), num_states=64)
if lovecat:
engine.transition_lovecat(
N=100,
kernels=[
'row_partition_hyperparameters'
'row_partition_assignments',
'column_hyperparameters',
])
else:
engine.transition(
N=100,
kernels=[
'view_alphas',
'rows',
'column_hypers',
])
R1 = engine.row_similarity_pairwise(cols=[0,1,2])
R2 = engine.row_similarity_pairwise(cols=[3,4,5])
# XXX TODO: Find a way to test the actual row similarity matrices with the
# theoretical Zc structure.
@pytest.mark.parametrize('lovecat', [True, False])
def test_two_views_column_partition__ci_(lovecat):
engine = Engine(
D.T, cctypes=['normal']*len(D), rng=gu.gen_rng(12), num_states=64)
if lovecat:
engine.transition_lovecat(N=200)
else:
engine.transition(N=200, multiprocess=1)
P = engine.dependence_probability_pairwise()
R1 = engine.row_similarity_pairwise(cols=[0,1,2])
R2 = engine.row_similarity_pairwise(cols=[3,4,5])
pu.plot_clustermap(P)
pu.plot_clustermap(R1)
pu.plot_clustermap(R2)
P_THEORY = [
[1,1,1,0,0,0],
[1,1,1,0,0,0],
[1,1,1,0,0,0],
[0,0,0,1,1,1],
[0,0,0,1,1,1],
[0,0,0,1,1,1],
]
# XXX TODO: Find a way to test the actual dependence probability matrix
# with the THEORY matrix.
| apache-2.0 | Python | |
3c5b1d439748363e755c257d75d25e40585410ca | Create process_reads.py | iandriver/RNA-sequence-tools,idbedead/RNA-sequence-tools,iandriver/RNA-sequence-tools,iandriver/RNA-sequence-tools,idbedead/RNA-sequence-tools,idbedead/RNA-sequence-tools | process_reads.py | process_reads.py | import os
from subprocess import call
def tophat_and_cuff(path, out= './'):
annotation_file = 'path to gtf annotation file'
index_gen_loc = 'path to index genome'
pathlist = []
for root, dirs, files in os.walk(path):
if 'fastq' in root:
pathlist.append([root,files])
for p in pathlist:
n = p[0].strip('./').split('_')
name = n[0]
result_file = os.path.join(out, name)
try:
os.mkdir(result_file)
except OSError:
pass
input_files=''
for f in p[1]:
if 'gz' in f:
f_split = f.split('_')
r_name = f_split[3]
en_split = f_split[4].split('.')
r_num = en_split[0]
input_files+=os.path.join(os.getcwd(),p[0].strip('./'),f)+' '
if len(p[1]) > 2:
in_split = input_files.split(' ')
name_build = ''
for i, mul_f in enumerate(in_split):
if 'gz' in mul_f:
if i == len(p[1])-1:
name_build+=mul_f
elif i < (len(p[1])/2)-1 or i > (len(p[1])/2)-1:
name_build+= mul_f+','
elif i == (len(p[1])/2)-1:
name_build+= mul_f+' '
if len(p[1]) >2:
final_files = name_build
else:
final_files = input_files
for h, k, l in os.walk(result_file):
if 'accepted_hits.bam' in l:
cuff_to_call = 'cufflinks -p 4 -G '+annotation_file+' -o '+result_file+' '+result_file+'/'+'accepted_hits.bam'
call(cuff_to_call, shell=True)
break
else:
to_call = 'tophat -p 4 -r 50 -G '+annotation_file+' -o '+result_file+' '+index_gen_loc+' '+final_files
print to_call
call(to_call, shell=True)
cuff_to_call = 'cufflinks -p 4 -G '+annotation_file' -o '+result_file+' '+result_file+'/'+'accepted_hits.bam'
call(cuff_to_call, shell=True)
get_files('./', out = '/Users/idriver/RockLab-files/test')
| mit | Python | |
054941e5251a6ce9477f2ce5cf0c458a5b7faa34 | add tests for zpm.create_project | zerovm/zerovm-cli,zerovm/zerovm-cli,zerovm/zerovm-cli,zerovm/zpm,zerovm/zpm,zerovm/zpm,zerovm/zpm,zerovm/zerovm-cli,zerovm/zpm,zerovm/zerovm-cli,zerovm/zpm,zerovm/zerovm-cli | zpmlib/tests/test_zpm.py | zpmlib/tests/test_zpm.py | # Copyright 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import mock
import os
import pytest
import shutil
import tempfile
from zpmlib import zpm
class TestCreateProject:
"""
Tests for :func:`zpmlib.zpm.create_project`.
"""
def test_path_exists_not_dir(self):
# A RuntimeError should be thrown if the target path exists and is
# not a dir.
_, tf = tempfile.mkstemp()
with mock.patch('zpmlib.zpm._create_zar_json') as czj:
with pytest.raises(RuntimeError):
zpm.create_project(tf)
assert czj.call_count == 0
def test_path_does_not_exist(self):
# If the path does not exist, `create_project` should create the
# directory (including intermediate directories) and bootstrap an empty
# project.
tempdir = tempfile.mkdtemp()
target_dir = os.path.join(tempdir, 'foo', 'bar')
try:
with mock.patch('zpmlib.zpm._create_zar_json') as czj:
zpm.create_project(target_dir)
assert czj.call_count == 1
finally:
shutil.rmtree(tempdir)
def test_target_is_dir(self):
# In this case, the target is a dir and it exists already.
tempdir = tempfile.mkdtemp()
try:
with mock.patch('zpmlib.zpm._create_zar_json') as czj:
zpm.create_project(tempdir)
assert czj.call_count == 1
finally:
shutil.rmtree(tempdir)
class TestCreateZarJSON:
"""
Tests for :func:`zpmlib.zpm._create_zar_json`.
"""
def test_file_already_exists(self):
tempdir = tempfile.mkdtemp()
filepath = os.path.join(tempdir, 'zar.json')
# "touch" the file
open(filepath, 'w').close()
try:
with pytest.raises(RuntimeError):
zpm._create_zar_json(tempdir)
finally:
shutil.rmtree(tempdir)
def test_create_zar_json(self):
# Test the creation of zar.json.
tempdir = tempfile.mkdtemp()
filepath = os.path.join(tempdir, 'zar.json')
try:
assert not os.path.exists(filepath)
zpm._create_zar_json(tempdir)
assert os.path.exists(filepath)
with open(filepath) as fp:
assert zpm.DEFAULT_ZAR_JSON == json.load(fp)
finally:
shutil.rmtree(tempdir)
| apache-2.0 | Python | |
dcbc9ca37406c1d2f25656e11c8a5ad170acd827 | Create Observable feature | PaulieC/sprint1_Council,PaulieC/sprint5-Council,PaulieC/sprint2-Council,PaulieC/rls-c,PaulieC/sprint1_Council_b,PaulieC/sprint1_Council_a,PaulieC/sprint3-Council,geebzter/game-framework | Observable.py | Observable.py | -__author__= 'Dan and Pat'
-#Describes an Observable object.
-#includes methods to notify all observers
-#and to add/delete them
-
-#abstract class for Observable
-class Observable(object):
-
- #list of all observers
- observer_list = []
-
- #notify all observers
- def notify_all(self, msg):
- for obs in self.observer_list:
- obs.notify(msg)
-
- #add observer to the list
- def add_observer(self, observer):
- if observer not in self.observer_list:
- self.observer_list.append(observer)
-
- #delete all observers
- def delete_all_observers(self):
- del self.observer_list[:]
-
| apache-2.0 | Python | |
6fb3e783c25b973f7b49cd3f53274a2c11981192 | Add js integration tests. | chevah/pocket-lint,chevah/pocket-lint | pocketlint/tests/test_javascript.py | pocketlint/tests/test_javascript.py | # Copyright (C) 2011 - Curtis Hovey <sinzui.is at verizon.net>
# This software is licensed under the MIT license (see the file COPYING).
from pocketlint.formatcheck import(
JavascriptChecker,
JS,
)
from pocketlint.tests import CheckerTestCase
from pocketlint.tests.test_text import TestAnyTextMixin
good_js = """\
var a = 1;
"""
invalid_js = """\
a = 1
"""
class TestJavascript(CheckerTestCase):
"""Verify Javascript integration."""
def test_good_js(self):
checker = JavascriptChecker('bogus', good_js, self.reporter)
checker.check()
self.assertEqual([], self.reporter.messages)
def test_invalid_value(self):
if JS is None:
return
checker = JavascriptChecker('bogus', invalid_js, self.reporter)
checker.check()
self.assertEqual(
[('2', "Expected ';' and instead saw '(end)'.")],
self.reporter.messages)
class TestText(CheckerTestCase, TestAnyTextMixin):
"""Verify text integration."""
def create_and_check(self, file_name, text):
"""Used by the TestAnyTextMixin tests."""
checker = JavascriptChecker(file_name, text, self.reporter)
checker.check_text()
| mit | Python | |
7bc5d0eda2c8cbc2b89b71c2510fe9f0d60dce4f | Add forgotten module 'exceptions' | reimandlab/ActiveDriverDB,reimandlab/ActiveDriverDB,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations | website/exceptions.py | website/exceptions.py | class ValidationError(Exception):
pass
@property
def message(self):
return self.args[0]
| lgpl-2.1 | Python | |
725ddbb207f2b1f93be4f8c38504cb41c7515009 | add logentries integration to diamond handler | saucelabs/Diamond,ramjothikumar/Diamond,python-diamond/Diamond,joel-airspring/Diamond,CYBERBUGJR/Diamond,TinLe/Diamond,janisz/Diamond-1,signalfx/Diamond,krbaker/Diamond,Slach/Diamond,ramjothikumar/Diamond,thardie/Diamond,rtoma/Diamond,Netuitive/netuitive-diamond,bmhatfield/Diamond,codepython/Diamond,russss/Diamond,Ensighten/Diamond,jumping/Diamond,bmhatfield/Diamond,janisz/Diamond-1,rtoma/Diamond,stuartbfox/Diamond,Slach/Diamond,jriguera/Diamond,bmhatfield/Diamond,skbkontur/Diamond,acquia/Diamond,EzyInsights/Diamond,Ensighten/Diamond,Nihn/Diamond-1,zoidbergwill/Diamond,janisz/Diamond-1,Netuitive/Diamond,gg7/diamond,signalfx/Diamond,socialwareinc/Diamond,tuenti/Diamond,socialwareinc/Diamond,tellapart/Diamond,Nihn/Diamond-1,socialwareinc/Diamond,actmd/Diamond,works-mobile/Diamond,mzupan/Diamond,TinLe/Diamond,anandbhoraskar/Diamond,CYBERBUGJR/Diamond,h00dy/Diamond,russss/Diamond,dcsquared13/Diamond,MichaelDoyle/Diamond,saucelabs/Diamond,eMerzh/Diamond-1,dcsquared13/Diamond,Precis/Diamond,Netuitive/Diamond,jaingaurav/Diamond,zoidbergwill/Diamond,mfriedenhagen/Diamond,tellapart/Diamond,jriguera/Diamond,cannium/Diamond,dcsquared13/Diamond,joel-airspring/Diamond,Ssawa/Diamond,russss/Diamond,Ormod/Diamond,jumping/Diamond,rtoma/Diamond,codepython/Diamond,anandbhoraskar/Diamond,MichaelDoyle/Diamond,mzupan/Diamond,sebbrandt87/Diamond,jumping/Diamond,sebbrandt87/Diamond,krbaker/Diamond,EzyInsights/Diamond,codepython/Diamond,Ssawa/Diamond,thardie/Diamond,mzupan/Diamond,Precis/Diamond,szibis/Diamond,hvnsweeting/Diamond,skbkontur/Diamond,hamelg/Diamond,Precis/Diamond,sebbrandt87/Diamond,TAKEALOT/Diamond,disqus/Diamond,eMerzh/Diamond-1,cannium/Diamond,MichaelDoyle/Diamond,disqus/Diamond,saucelabs/Diamond,timchenxiaoyu/Diamond,tusharmakkar08/Diamond,Netuitive/netuitive-diamond,Netuitive/netuitive-diamond,jaingaurav/Diamond,tusharmakkar08/Diamond,Basis/Diamond,TAKEALOT/Diamond,Ssawa/Diamond,timchenxiaoyu/Diamond,codepython/Diamond,works-mobile/Diamond,python-diamond/Diamond,anandbhoraskar/Diamond,szibis/Diamond,skbkontur/Diamond,krbaker/Diamond,cannium/Diamond,signalfx/Diamond,dcsquared13/Diamond,Clever/Diamond,tellapart/Diamond,stuartbfox/Diamond,tusharmakkar08/Diamond,tuenti/Diamond,cannium/Diamond,Clever/Diamond,mfriedenhagen/Diamond,thardie/Diamond,acquia/Diamond,CYBERBUGJR/Diamond,signalfx/Diamond,jaingaurav/Diamond,joel-airspring/Diamond,mzupan/Diamond,acquia/Diamond,timchenxiaoyu/Diamond,gg7/diamond,Nihn/Diamond-1,acquia/Diamond,timchenxiaoyu/Diamond,jumping/Diamond,Slach/Diamond,szibis/Diamond,actmd/Diamond,actmd/Diamond,TAKEALOT/Diamond,Clever/Diamond,russss/Diamond,eMerzh/Diamond-1,Netuitive/Diamond,zoidbergwill/Diamond,tuenti/Diamond,CYBERBUGJR/Diamond,works-mobile/Diamond,MichaelDoyle/Diamond,Ensighten/Diamond,h00dy/Diamond,ramjothikumar/Diamond,hamelg/Diamond,zoidbergwill/Diamond,hvnsweeting/Diamond,szibis/Diamond,Ensighten/Diamond,stuartbfox/Diamond,tellapart/Diamond,eMerzh/Diamond-1,janisz/Diamond-1,gg7/diamond,ramjothikumar/Diamond,stuartbfox/Diamond,joel-airspring/Diamond,TinLe/Diamond,Ormod/Diamond,tuenti/Diamond,jriguera/Diamond,Ssawa/Diamond,works-mobile/Diamond,Basis/Diamond,mfriedenhagen/Diamond,Ormod/Diamond,jriguera/Diamond,Basis/Diamond,Basis/Diamond,TinLe/Diamond,socialwareinc/Diamond,hvnsweeting/Diamond,h00dy/Diamond,EzyInsights/Diamond,python-diamond/Diamond,saucelabs/Diamond,Precis/Diamond,hvnsweeting/Diamond,jaingaurav/Diamond,thardie/Diamond,hamelg/Diamond,gg7/diamond,tusharmakkar08/Diamond,h00dy/Diamond,sebbrandt87/Diamond,krbaker/Diamond,disqus/Diamond,hamelg/Diamond,skbkontur/Diamond,actmd/Diamond,Clever/Diamond,anandbhoraskar/Diamond,bmhatfield/Diamond,rtoma/Diamond,TAKEALOT/Diamond,Netuitive/netuitive-diamond,Netuitive/Diamond,mfriedenhagen/Diamond,Ormod/Diamond,Nihn/Diamond-1,EzyInsights/Diamond,Slach/Diamond | src/diamond/handler/logentries_diamond.py | src/diamond/handler/logentries_diamond.py | # coding=utf-8
"""
[Logentries: Log Management & Analytics Made Easy ](https://logentries.com/).
Send Diamond stats to your Logentries Account where you can monitor and alert
based on data in real time.
#### Dependencies
#### Configuration
Enable this handler
* handers = diamond.handler.logentries.logentriesHandler
"""
from Handler import Handler
import logging
import time
import urllib2
import json
class LogentriesDiamondHandler(Handler):
"""
Implements the abstract Handler class
Sending data to a Logentries
"""
def __init__(self, config=None):
"""
New instance of LogentriesDiamondHandler class
"""
Handler.__init__(self, config)
self.log_token = self.config.get('log_token', None)
if self.log_token is None:
raise Exception
def get_default_config_help(self):
"""
Help text
"""
config = super(LogentriesDiamondHandler,
self).get_default_config_help()
config.update({
'log_token': None,
})
return config
def get_default_config(self):
"""
Return default config for the handler
"""
config = super(LogentriesDiamondHandler, self).get_default_config()
config.update({
'log_token': None,
})
return config
def process(self, metric):
"""
Process metric by sending it to datadog api
"""
time.sleep(1)
self._send(metric)
def _send(self, metric):
"""
Take metrics from queue and send it to Datadog API
"""
logging.debug("Sending logs.")
topic, value, timestamp = str(metric).split()
msg = json.dumps({"event": {topic: value}})
req = urllib2.Request("https://js.logentries.com/v1/logs/"
+ self.log_token, msg)
urllib2.urlopen(req)
time.sleep(1)
| mit | Python | |
118f412777a2c65763cfa4bd0d45ca197f9865ff | Create andropy.py | ccjj/andropy | andropy.py | andropy.py | import argparser
import sys
import ConfigClass
import createfiles
import OnlineNotifierClass
import machineClass
import apkparse
import subprocess
import time
import thread
import threading
import adbcommands
def create_and_config(a, b, c ,d, e, isrunning):
config = ConfigClass.Config(a, b, c, d, e)
createfiles.create_avd_files(config.name, config.avddir, config.configdir, config.newavddir, config.outputpath, config.avdini, config.sdcard, isrunning)
f, g = apkparse.get_package_infos(config.samplepath)
config.set_apk_infos(f, g)
return config
def main(argv):
#inserts in the config-constructor args.samplepath, args.interval, args.sdcard, args.outputpath, args.customconfig
a, b, c, d, e = argparser.get_args()
adbcommands.kill_blocking_processes()
if adbcommands.check_online_devices() == False: #no other device running
config = create_and_config(a, b, c ,d, e, False)
machine = OnlineNotifierClass.OnlineNotifier(config.name, config.filedir)
machine.start_machine()
else: #some other device is running
config = create_and_config(a, b, c ,d, e, True)
start_online_timer()
android = machineClass.Device(config)
android.start()
def start_online_timer():
s = threading.Thread(target=timer_count).start()
def timer_count():
ti = 0
while True:
print "AVD running since " + str(ti/60) + " minutes"
ti += 60
time.sleep(60.0)
if __name__ == '__main__':
main(sys.argv[1:])
| mit | Python | |
f86fe6b9d1014c8115d776f7e03353e273142607 | Create SubwayFeed.py | dunnette/SubwayTimes,dunnette/st-gae | SubwayFeed.py | SubwayFeed.py | from google.transit import gtfs_realtime_pb2
import urllib
import datetime
# http://datamine.mta.info/sites/all/files/pdfs/GTFS-Realtime-NYC-Subway%20version%201%20dated%207%20Sep.pdf
# http://datamine.mta.info/list-of-feeds
class SubwayFeed:
endpoint_url = 'http://datamine.mta.info/mta_esi.php'
def __init__(self, key_str, feed_id_int):
self.refresh_feed(key_str, feed_id_int)
self.process_feed()
self.process_stations()
def refresh_feed(self, key_str, feed_id_int):
payload = urllib.urlencode({'key': key_str, 'feed_id': feed_id_int})
response = urllib.urlopen('{}?{}'.format(self.endpoint_url, payload))
self.feed = gtfs_realtime_pb2.FeedMessage()
self.feed.ParseFromString(response.read())
def process_feed(self):
self.trip_updates = [tu for tu in self.feed.entity if tu.HasField('trip_update')]
self.vehicles = [tu for tu in self.feed.entity if tu.HasField('vehicle')]
def process_stations(self):
station_arrivals = dict()
for entity in self.trip_updates:
for stu in entity.trip_update.stop_time_update:
route_id = entity.trip_update.trip.route_id
direction_id = stu.stop_id[-1]
stop_id = stu.stop_id[:-1]
arrival_time = datetime.datetime.fromtimestamp(stu.departure.time) if stu.departure.time else datetime.datetime.fromtimestamp(stu.arrival.time)
temp_stops = station_arrivals.get(route_id, dict())
temp_directions = temp_stops.get(stop_id, dict())
temp_arrivals = temp_directions.get(direction_id, list())
temp_arrivals.append(arrival_time)
temp_directions[direction_id] = temp_arrivals
temp_stops[stop_id] = temp_directions
station_arrivals[route_id] = temp_stops
self.station_arrivals = station_arrivals
def print_time_to_station(self, route_id, stop_id, direction_id):
for arrival_time in self.station_arrivals[route_id][stop_id][direction_id]:
print '{0:.1f} minutes to arrival'.format((arrival_time-datetime.datetime.now()).total_seconds()/60)
| apache-2.0 | Python | |
973405a852779f443c8c967a5e23ac40f5a0e4a2 | Add 217-contains-duplicate.py | mvj3/leetcode | 217-contains-duplicate.py | 217-contains-duplicate.py | """
Question:
Contains Duplicate
Given an array of integers, find if the array contains any duplicates. Your function should return true if any value appears at least twice in the array, and it should return false if every element is distinct.
Performance:
1. Total Accepted: 39720 Total Submissions: 105895 Difficulty: Easy
2. Your runtime beats 68.81% of python submissions.
"""
class Solution(object):
def containsDuplicate(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
return len(set(nums)) != len(nums)
assert Solution().containsDuplicate([]) is False
assert Solution().containsDuplicate([1]) is False
assert Solution().containsDuplicate([1, 1]) is True
assert Solution().containsDuplicate([1, 1, 2]) is True
| mit | Python | |
426f90ba500aa5d213a8b130e1841806e2dae388 | Implement roulette wheel selection algorithm | nemanja-m/gaps,nemanja-m/genetic-jigsaw-solver | solver/operators.py | solver/operators.py | import random
import bisect
def select(population):
"""Roulette wheel selection.
Each individual is selected to reproduce, with probability directly
proportional to its fitness score.
:params population: Collection of the individuals for selecting.
Usage::
>>> from operators import select
>>> population = [ 'create population' ]
>>> parent = select(population)
"""
fitnesses = [individual.fitness for individual in population]
probability_intervals = [sum(fitnesses[:i+1]) for i in range(len(fitnesses))]
random_select = random.uniform(0, probability_intervals[-1])
selected_index = bisect.bisect_left(probability_intervals, random_select)
return population[selected_index]
| mit | Python | |
435f55e8e25f51b9622ece010fa132383bfdfd31 | Add Support for /r/wasletztepreis | nsiregar/reddit2telegram,nsiregar/reddit2telegram,Fillll/reddit2telegram,Fillll/reddit2telegram | channels/wasletztepreis/app.py | channels/wasletztepreis/app.py | #encoding:utf-8
from utils import get_url, weighted_random_subreddit
from utils import SupplyResult
# Subreddit that will be a source of content
subreddit = weighted_random_subreddit({
'wasletztepreis': 1.0,
# If we want get content from several subreddits
# please provide here 'subreddit': probability
# 'any_other_subreddit': 0.02
})
# Telegram channel with @reddit2telegram_bot as an admin
t_channel = '@r_wasletztepreis'
def send_post(submission, r2t):
what, url, ext = get_url(submission)
# If this func returns:
# False – it means that we will not send
# this submission, let's move to the next.
# True – everything is ok, we send the submission
# None – we do not want to send anything this time,
# let's just sleep.
# Get all data from submission that we need
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
if what == 'text':
# If it is text submission, it is not really funny.
# return r2t.send_text(submission.selftext)
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
elif what == 'other':
# Also we are not interesting in any other content.
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
elif what == 'album':
# It is ok if it is an album.
base_url = submission.url
text = '{}\n{}\n\n{}'.format(title, base_url, link)
r2t.send_text(text)
r2t.send_album(url)
return SupplyResult.SUCCESSFULLY
elif what in ('gif', 'img'):
# Also it is ok if it is gif or any kind of image.
# Check if content has already appeared in
# out telegram channel.
if r2t.dup_check_and_mark(url) is True:
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
return r2t.send_gif_img(what, url, ext, text)
else:
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
| mit | Python | |
bd9641ae4f96fd15e3fe02b969d157e69962308c | Create solution2.py | lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges | leetcode/easy/remove_duplicates_from_sorted_array/py/solution2.py | leetcode/easy/remove_duplicates_from_sorted_array/py/solution2.py | #
# The lazybones' approach. In order to eliminate duplicates, conver the array
# to a set. Sort the values in the set and assign the sorted array back to "nums".
# The [:] subscript operator is important in emulating assignment by reference.
# In reality, given no arguments as it is shown in the code, it simply copies
# the values and alters its length in accordance with the number of values copied
# from the source.
#
# Returning the length of the array as required by the problem statement.
#
# O(n) time with the possibility of degrading to O(n^2) due to possible collisions
# that may arise in the set. O(n) space to hold the set and the temporary buffer
# for sorted values.
#
class Solution(object):
def removeDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
nums[:] = sorted(set(nums))
return len(nums)
| mit | Python | |
f09b51df190bcf0d22349de7a09560cb3069f402 | Add merge migration | mfraezz/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,mattclark/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,felliott/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,adlius/osf.io,baylee-d/osf.io,felliott/osf.io,icereval/osf.io,mfraezz/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,erinspace/osf.io,sloria/osf.io,cslzchen/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,felliott/osf.io,sloria/osf.io,caseyrollins/osf.io,felliott/osf.io,mfraezz/osf.io,aaxelb/osf.io,baylee-d/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,erinspace/osf.io,mattclark/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,brianjgeiger/osf.io,erinspace/osf.io,icereval/osf.io,Johnetordoff/osf.io,adlius/osf.io,mfraezz/osf.io,caseyrollins/osf.io,sloria/osf.io,brianjgeiger/osf.io,adlius/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,aaxelb/osf.io | osf/migrations/0107_merge_20180604_1232.py | osf/migrations/0107_merge_20180604_1232.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-06-04 17:32
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0105_merge_20180525_1529'),
('osf', '0106_set_preprint_identifier_category'),
]
operations = [
]
| apache-2.0 | Python | |
31fe72931d29d81088f23c7609aa612d4735814b | Add new sample files for Python3-py grammar | antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4,antlr/grammars-v4 | python3-py/examples/new_features.py | python3-py/examples/new_features.py | def ls(self, msg, match):
"""
A sample function to test the parsing of ** resolution
"""
langs = list(map(lambda x: x.lower(), match.group(1).split()))
bears = client.list.bears.get().json()
bears = [{**{'name': bear}, **content}
for bear, content in bears.items()]
# Asyncio example from https://stackabuse.com/python-async-await-tutorial/
import asyncio
async def ping_server(ip):
pass
@asyncio.coroutine
def load_file(path):
pass
| mit | Python | |
95769dcf378dd81d0ccf14cf1f1f380efcb602cd | Create PedidoDeletar.py | AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb | backend/Models/Matricula/PedidoDeletar.py | backend/Models/Matricula/PedidoDeletar.py | from Framework.Pedido import Pedido
from Framework.ErroNoHTTP import ErroNoHTTP
class PedidoDeletar(Pedido):
def __init__(self,variaveis_do_ambiente):
super(PedidoDeletar, self).__init__(variaveis_do_ambiente)
try:
self.id = self.corpo['id']
except:
raise ErroNoHTTP(400)
def getId(self):
return self.id
| mit | Python | |
2ed7b7b6a1bda9028b93120d410d3c4de850d6fe | Support unicode. | brockwhittaker/zulip,showell/zulip,verma-varsha/zulip,tommyip/zulip,mahim97/zulip,jackrzhang/zulip,tommyip/zulip,rishig/zulip,punchagan/zulip,amanharitsh123/zulip,rishig/zulip,zulip/zulip,rishig/zulip,shubhamdhama/zulip,verma-varsha/zulip,zulip/zulip,verma-varsha/zulip,rht/zulip,andersk/zulip,vabs22/zulip,showell/zulip,amanharitsh123/zulip,amanharitsh123/zulip,brockwhittaker/zulip,zulip/zulip,synicalsyntax/zulip,synicalsyntax/zulip,rishig/zulip,rishig/zulip,kou/zulip,vaidap/zulip,eeshangarg/zulip,eeshangarg/zulip,vabs22/zulip,timabbott/zulip,vaidap/zulip,vabs22/zulip,hackerkid/zulip,brockwhittaker/zulip,hackerkid/zulip,kou/zulip,eeshangarg/zulip,mahim97/zulip,dhcrzf/zulip,showell/zulip,zulip/zulip,brainwane/zulip,punchagan/zulip,kou/zulip,Galexrt/zulip,andersk/zulip,shubhamdhama/zulip,dhcrzf/zulip,showell/zulip,rht/zulip,andersk/zulip,tommyip/zulip,jackrzhang/zulip,mahim97/zulip,jackrzhang/zulip,vaidap/zulip,showell/zulip,rht/zulip,vabs22/zulip,shubhamdhama/zulip,mahim97/zulip,hackerkid/zulip,Galexrt/zulip,punchagan/zulip,jackrzhang/zulip,hackerkid/zulip,tommyip/zulip,showell/zulip,andersk/zulip,shubhamdhama/zulip,timabbott/zulip,brockwhittaker/zulip,dhcrzf/zulip,brainwane/zulip,hackerkid/zulip,kou/zulip,Galexrt/zulip,amanharitsh123/zulip,tommyip/zulip,synicalsyntax/zulip,dhcrzf/zulip,timabbott/zulip,hackerkid/zulip,punchagan/zulip,rht/zulip,zulip/zulip,brainwane/zulip,dhcrzf/zulip,shubhamdhama/zulip,shubhamdhama/zulip,Galexrt/zulip,jackrzhang/zulip,dhcrzf/zulip,hackerkid/zulip,shubhamdhama/zulip,brockwhittaker/zulip,brainwane/zulip,punchagan/zulip,andersk/zulip,vabs22/zulip,timabbott/zulip,brainwane/zulip,jackrzhang/zulip,synicalsyntax/zulip,timabbott/zulip,amanharitsh123/zulip,rht/zulip,rishig/zulip,kou/zulip,kou/zulip,brainwane/zulip,eeshangarg/zulip,eeshangarg/zulip,verma-varsha/zulip,andersk/zulip,showell/zulip,vaidap/zulip,Galexrt/zulip,mahim97/zulip,brainwane/zulip,zulip/zulip,vaidap/zulip,punchagan/zulip,jackrzhang/zulip,synicalsyntax/zulip,rishig/zulip,dhcrzf/zulip,Galexrt/zulip,synicalsyntax/zulip,rht/zulip,zulip/zulip,Galexrt/zulip,amanharitsh123/zulip,tommyip/zulip,verma-varsha/zulip,vaidap/zulip,vabs22/zulip,tommyip/zulip,brockwhittaker/zulip,andersk/zulip,verma-varsha/zulip,synicalsyntax/zulip,kou/zulip,eeshangarg/zulip,timabbott/zulip,eeshangarg/zulip,rht/zulip,timabbott/zulip,mahim97/zulip,punchagan/zulip | zerver/webhooks/slack/view.py | zerver/webhooks/slack/view.py | from __future__ import absolute_import
from django.utils.translation import ugettext as _
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.lib.actions import check_send_message, create_stream_if_needed
from zerver.lib.response import json_success, json_error
from zerver.lib.validator import check_string, check_int
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
ZULIP_MESSAGE_TEMPLATE = u"**{message_sender}**: `{text}`"
VALID_OPTIONS = {'SHOULD_NOT_BE_MAPPED': '0', 'SHOULD_BE_MAPPED': '1'}
@api_key_only_webhook_view('Slack')
@has_request_variables
def api_slack_webhook(request, user_profile,
user_name=REQ(),
text=REQ(),
channel_name=REQ(),
stream=REQ(default='slack'),
channels_map_to_topics=REQ(default='1')):
# type: (HttpRequest, UserProfile, str, str, str, str, str) -> HttpResponse
if channels_map_to_topics not in list(VALID_OPTIONS.values()):
return json_error(_('Error: channels_map_to_topics parameter other than 0 or 1'))
if channels_map_to_topics == VALID_OPTIONS['SHOULD_BE_MAPPED']:
subject = "channel: {}".format(channel_name)
else:
stream = channel_name
subject = _("Message from Slack")
content = ZULIP_MESSAGE_TEMPLATE.format(message_sender=user_name, text=text)
check_send_message(user_profile, request.client, "stream", [stream], subject, content)
return json_success()
| from __future__ import absolute_import
from django.utils.translation import ugettext as _
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.lib.actions import check_send_message, create_stream_if_needed
from zerver.lib.response import json_success, json_error
from zerver.lib.validator import check_string, check_int
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
ZULIP_MESSAGE_TEMPLATE = "**{message_sender}**: `{text}`"
VALID_OPTIONS = {'SHOULD_NOT_BE_MAPPED': '0', 'SHOULD_BE_MAPPED': '1'}
@api_key_only_webhook_view('Slack')
@has_request_variables
def api_slack_webhook(request, user_profile,
user_name=REQ(),
text=REQ(),
channel_name=REQ(),
stream=REQ(default='slack'),
channels_map_to_topics=REQ(default='1')):
# type: (HttpRequest, UserProfile, str, str, str, str, str) -> HttpResponse
if channels_map_to_topics not in list(VALID_OPTIONS.values()):
return json_error(_('Error: channels_map_to_topics parameter other than 0 or 1'))
if channels_map_to_topics == VALID_OPTIONS['SHOULD_BE_MAPPED']:
subject = "channel: {}".format(channel_name)
else:
stream = channel_name
subject = _("Message from Slack")
content = ZULIP_MESSAGE_TEMPLATE.format(message_sender=user_name, text=text)
check_send_message(user_profile, request.client, "stream", [stream], subject, content)
return json_success()
| apache-2.0 | Python |
2244885111fe505a757abf9bb528b8fd8ad346f6 | add rosenbrock task | aaronkl/RoBO,automl/RoBO,aaronkl/RoBO,aaronkl/RoBO,automl/RoBO,numairmansur/RoBO,numairmansur/RoBO | robo/task/synthetic_functions/rosenbrock.py | robo/task/synthetic_functions/rosenbrock.py | import numpy as np
from robo.task.base_task import BaseTask
class Rosenbrock(BaseTask):
def __init__(self, d=3):
self.d = d
X_lower = np.ones([d]) * -5
X_upper = np.ones([d]) * 10
opt = np.ones([1, d])
fopt = 0.0
super(Rosenbrock, self).__init__(X_lower, X_upper, opt=opt, fopt=fopt)
def objective_function(self, x):
f = 0
for i in range(self.d - 1):
f += 100 * (x[:, i+1] - x[:, i] ** 2) ** 2
f += (x[:, i] - 1) ** 2
return np.array([f])
def objective_function_test(self, x):
return self.objective_function(x) | bsd-3-clause | Python | |
aa8a0d3ce614a02233d31e97ce91eedc03727394 | add determine_endpoint_type helper | globus/globus-cli,globus/globus-cli | src/globus_cli/helpers/endpoint_type.py | src/globus_cli/helpers/endpoint_type.py | from enum import Enum, auto
class EndpointType(Enum):
# endpoint / collection types
GCP = auto()
GCSV5_ENDPOINT = auto()
GUEST_COLLECTION = auto()
MAPPED_COLLECTION = auto()
SHARE = auto()
NON_GCSV5_ENDPOINT = auto() # most likely GCSv4, but not necessarily
def determine_endpoint_type(ep_doc: dict) -> EndpointType:
"""
Given an endpoint document from transfer, determine what type of
endpoint or collection it is for
"""
if ep_doc["is_globus_connect"] is True:
return EndpointType.GCP
if ep_doc["non_functional"] is True:
return EndpointType.GCSV5_ENDPOINT
shared = ep_doc["host_endpoint_id"] is not None
if ep_doc["gcs_version"]:
major, minor, patch = ep_doc["gcs_version"].split(".")
gcsv5 = major == "5"
else:
gcsv5 = False
if gcsv5:
if shared:
return EndpointType.GUEST_COLLECTION
else:
return EndpointType.MAPPED_COLLECTION
elif shared:
return EndpointType.SHARE
return EndpointType.NON_GCSV5_ENDPOINT
| apache-2.0 | Python | |
c9266bd5cb11fd8f46b6f237f30d698048f88460 | Write test for dump_graph | okuta/chainer,jnishi/chainer,ktnyt/chainer,kashif/chainer,wkentaro/chainer,tkerola/chainer,delta2323/chainer,jnishi/chainer,niboshi/chainer,chainer/chainer,jnishi/chainer,hvy/chainer,wkentaro/chainer,okuta/chainer,rezoo/chainer,ktnyt/chainer,chainer/chainer,keisuke-umezawa/chainer,pfnet/chainer,keisuke-umezawa/chainer,wkentaro/chainer,wkentaro/chainer,hvy/chainer,keisuke-umezawa/chainer,chainer/chainer,hvy/chainer,ronekko/chainer,aonotas/chainer,ktnyt/chainer,niboshi/chainer,hvy/chainer,anaruse/chainer,chainer/chainer,niboshi/chainer,kiyukuta/chainer,niboshi/chainer,okuta/chainer,jnishi/chainer,ktnyt/chainer,ysekky/chainer,keisuke-umezawa/chainer,okuta/chainer | tests/chainer_tests/training_tests/extensions_tests/test_computational_graph.py | tests/chainer_tests/training_tests/extensions_tests/test_computational_graph.py | import tempfile
import os
import shutil
import unittest
import numpy
import chainer
from chainer import configuration
from chainer import functions
from chainer import links
from chainer import testing
from chainer import training
from chainer.training.extensions import computational_graph as c
class Dataset(chainer.dataset.DatasetMixin):
def __init__(self, values):
self.values = values
def __len__(self):
return len(self.values)
def get_example(self, i):
return numpy.array([self.values[i]], numpy.float32), numpy.int32(i % 2)
class Model(chainer.Link):
def __init__(self):
super(Model, self).__init__()
self.flag_history = []
self.l1 = links.Linear(2)
self.l2 = links.Linear(2)
self.i = 0
def __call__(self, x):
self.flag_history.append(
configuration.config.keep_graph_on_report)
h = self.l1(x)
if self.i == 0:
h = functions.Sigmoid()(h)
else:
h = functions.Tanh()(h)
h = self.l2(h)
self.i += 1
return h
class TestGraphBuilderKeepGraphOnReport(unittest.TestCase):
def _run_test(self, tempdir, initial_flag):
n_data = 4
n_epochs = 3
outdir = os.path.join(tempdir, 'testresult')
# Prepare
model = Model()
classifier = links.Classifier(model)
optimizer = chainer.optimizers.Adam()
optimizer.setup(classifier)
dataset = Dataset([i for i in range(n_data)])
iterator = chainer.iterators.SerialIterator(dataset, 1, shuffle=False)
updater = training.StandardUpdater(iterator, optimizer)
trainer = training.Trainer(updater, (n_epochs, 'epoch'), out=outdir)
extension = c.dump_graph('main/loss', out_name='test.dot')
trainer.extend(extension)
# Run
with chainer.using_config('keep_graph_on_report', initial_flag):
trainer.run()
# Check flag history
self.assertEqual(model.flag_history,
[True] + [initial_flag] * (n_data*n_epochs-1))
# Check the dumped graph
graph_path = os.path.join(outdir, 'test.dot')
with open(graph_path) as f:
graph_dot = f.read()
# Check that only the first iteration is dumped
self.assertTrue('Sigmoid' in graph_dot)
self.assertTrue('Tanh' not in graph_dot)
def _check(self, initial_flag):
tempdir = tempfile.mkdtemp()
try:
self._run_test(tempdir, initial_flag)
finally:
shutil.rmtree(tempdir)
def test_keep_graph_on_report_flag(self):
self._check(True)
self._check(False)
testing.run_module(__name__, __file__)
| mit | Python | |
2b74b894333557ab0aa9054c3d8af6b321d8e85e | Add background extractor | zephinzer/cs4243 | background_extractor.py | background_extractor.py | import cv2
import cv2.cv as cv
import numpy as np
import os
def bg_extract(video):
cap = cv2.VideoCapture(video)
fcount = int(cap.get(cv.CV_CAP_PROP_FRAME_COUNT))
_, img = cap.read()
avgImg = np.float32(img)
for fr in range(1, fcount):
_, img = cap.read()
img = np.float32(img)
avgImg = np.add(np.multiply(avgImg, fr/(fr+1.0)), np.divide(img, fr))
normImg = cv2.convertScaleAbs(avgImg) # convert into uint8 image
cap.release()
return normImg
def main():
bg = bg_extract(os.getcwd() + "/input.avi")
cv2.imwrite(os.getcwd() + "/bg.png", bg)
if __name__ == "__main__":
main()
| mit | Python | |
8149048a78ebbcdb46719eab5209e51bdeb3f86d | add new package (#25747) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-colorclass/package.py | var/spack/repos/builtin/packages/py-colorclass/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyColorclass(PythonPackage):
"""Colorful worry-free console applications for Linux, Mac OS X, and Windows."""
homepage = "https://github.com/Robpol86/colorclass"
pypi = "colorclass/colorclass-2.2.0.tar.gz"
version('2.2.0', sha256='b05c2a348dfc1aff2d502527d78a5b7b7e2f85da94a96c5081210d8e9ee8e18b')
depends_on('python@3.3.0:')
depends_on('py-setuptools', type='build')
depends_on('py-docopt', type=('build', 'run'))
| lgpl-2.1 | Python | |
9264d92444049b3aa2d7acbca27c2082aecac63c | Add a command line interface for bna | jleclanche/python-bna,Adys/python-bna | bin/cli.py | bin/cli.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import bna
import keyring
import sys
from binascii import hexlify, unhexlify
SERVICE = "trogdor"
def ERROR(txt):
sys.stderr.write("Error: %s\n" % (txt))
exit(1)
def normalizeSerial(serial):
return serial.lower().replace("-", "").strip()
def prettifySerial(serial):
serial = normalizeSerial(serial)
if len(serial) != 14:
raise ValueError("serial %r should be 14 characters long" % (serial))
def digits(chars):
if not chars.isdigit():
raise ValueError("bad serial %r" % (serial))
return "%04i" % int((chars))
return "%s-%s-%s-%s" % (serial[0:2].upper(), digits(serial[2:6]), digits(serial[6:10]), digits(serial[10:14]))
def getDefaultSerial():
return "us100604693849" # XXX
def setDefaultSerial(serial):
pass # TODO
def runAuthenticatorQuery(args):
try:
authenticator = bna.requestNewSerial(args.region)
except bna.HTTPError, e:
ERROR("Could not connect: %s" % (e))
serial = normalizeSerial(authenticator["serial"])
secret = hexlify(authenticator["secret"])
keyring.set_password(SERVICE, serial, secret)
# We set the authenticator as default if we don't have one set already
# Otherwise, we check for --set-default
if args.setdefault or not getDefaultSerial():
setDefaultSerial(serial)
print authenticator["serial"]
def runLive(secret):
from time import sleep
print "Ctrl-C to exit"
while 1:
token, timeRemaining = bna.getToken(secret=unhexlify(secret))
print "\r%08i" % (token),
sys.stdout.flush()
sleep(timeRemaining)
def main():
import signal
from optparse import OptionParser
signal.signal(signal.SIGINT, signal.SIG_DFL)
options = OptionParser()
options.add_option("-u", "--update", action="store_true", dest="update", help="update token every time")
options.add_option("-n", "--new", action="store_true", dest="new", help="request a new authenticator")
options.add_option("-r", "--region", type="string", dest="region", default="US", help="desired region for new authenticators")
options.add_option("--set-default", action="store_true", dest="setdefault", help="set authenticator as default (also works when requesting a new authenticator)")
args, serial = options.parse_args(sys.argv[1:])
# Are we requesting a new authenticator?
if args.new:
runAuthenticatorQuery(args)
exit()
# If not, we need a serial
if not serial:
serial = getDefaultSerial()
if serial is None:
ERROR("You must provide an authenticator serial")
else:
serial = serial[0]
serial = normalizeSerial(serial)
# Are we setting a serial as default?
if args.setdefault:
setDefaultSerial(serial)
# Get the secret from the keyring
secret = keyring.get_password(SERVICE, serial)
if secret is None: # No such serial
ERROR("%r: No such serial" % (serial))
# And print the token
if args.update:
runLive(secret)
else:
token, timeRemaining = bna.getToken(secret=unhexlify(secret))
print token
if __name__ == "__main__":
main()
| mit | Python | |
3aab9ef96c7b8b0c4c48be4cca2d8f8813099110 | Make zsh/zpython also call .shutdown correctly | dragon788/powerline,russellb/powerline,keelerm84/powerline,DoctorJellyface/powerline,DoctorJellyface/powerline,areteix/powerline,junix/powerline,bezhermoso/powerline,kenrachynski/powerline,russellb/powerline,blindFS/powerline,EricSB/powerline,wfscheper/powerline,blindFS/powerline,lukw00/powerline,xfumihiro/powerline,bezhermoso/powerline,QuLogic/powerline,xxxhycl2010/powerline,kenrachynski/powerline,bartvm/powerline,QuLogic/powerline,russellb/powerline,xfumihiro/powerline,firebitsbr/powerline,lukw00/powerline,dragon788/powerline,wfscheper/powerline,S0lll0s/powerline,lukw00/powerline,xxxhycl2010/powerline,IvanAli/powerline,DoctorJellyface/powerline,S0lll0s/powerline,darac/powerline,IvanAli/powerline,areteix/powerline,cyrixhero/powerline,darac/powerline,S0lll0s/powerline,EricSB/powerline,seanfisk/powerline,Liangjianghao/powerline,IvanAli/powerline,junix/powerline,areteix/powerline,magus424/powerline,xfumihiro/powerline,keelerm84/powerline,cyrixhero/powerline,magus424/powerline,firebitsbr/powerline,junix/powerline,QuLogic/powerline,Luffin/powerline,kenrachynski/powerline,blindFS/powerline,bezhermoso/powerline,prvnkumar/powerline,darac/powerline,Luffin/powerline,EricSB/powerline,bartvm/powerline,prvnkumar/powerline,magus424/powerline,prvnkumar/powerline,wfscheper/powerline,s0undt3ch/powerline,dragon788/powerline,firebitsbr/powerline,cyrixhero/powerline,seanfisk/powerline,xxxhycl2010/powerline,seanfisk/powerline,bartvm/powerline,Liangjianghao/powerline,s0undt3ch/powerline,s0undt3ch/powerline,Liangjianghao/powerline,Luffin/powerline | powerline/bindings/zsh/__init__.py | powerline/bindings/zsh/__init__.py | # vim:fileencoding=utf-8:noet
import zsh
import atexit
from powerline.shell import ShellPowerline
from powerline.lib import parsedotval
used_powerlines = []
def shutdown():
for powerline in used_powerlines:
powerline.renderer.shutdown()
def get_var_config(var):
try:
return [parsedotval(i) for i in zsh.getvalue(var).items()]
except:
return None
class Args(object):
ext = ['shell']
renderer_module = 'zsh_prompt'
@property
def last_exit_code(self):
return zsh.last_exit_code()
@property
def last_pipe_status(self):
return zsh.pipestatus()
@property
def config(self):
try:
return get_var_config('POWERLINE_CONFIG')
except IndexError:
return None
@property
def theme_option(self):
try:
return get_var_config('POWERLINE_THEME_CONFIG')
except IndexError:
return None
@property
def config_path(self):
try:
return zsh.getvalue('POWERLINE_CONFIG_PATH')
except IndexError:
return None
def string(s):
if type(s) is bytes:
return s.decode('utf-8', errors='replace')
else:
return str(s)
class Environment(object):
@staticmethod
def __getitem__(key):
try:
return string(zsh.getvalue(key))
except IndexError as e:
raise KeyError(*e.args)
@staticmethod
def get(key, default=None):
try:
return string(zsh.getvalue(key))
except IndexError:
return default
class Prompt(object):
__slots__ = ('powerline', 'side', 'savedpsvar', 'savedps', 'args')
def __init__(self, powerline, side, savedpsvar=None, savedps=None):
self.powerline = powerline
self.side = side
self.savedpsvar = savedpsvar
self.savedps = savedps
self.args = powerline.args
def __str__(self):
r = self.powerline.renderer.render(width=zsh.columns(), side=self.side, segment_info=self.args)
if type(r) is not str:
if type(r) is bytes:
return r.decode('utf-8')
else:
return r.encode('utf-8')
return r
def __del__(self):
if self.savedps:
zsh.setvalue(self.savedpsvar, self.savedps)
used_powerlines.remove(self.powerline)
if self.powerline not in used_powerlines:
self.powerline.renderer.shutdown()
def set_prompt(powerline, psvar, side):
savedps = zsh.getvalue(psvar)
zpyvar = 'ZPYTHON_POWERLINE_' + psvar
prompt = Prompt(powerline, side, psvar, savedps)
zsh.set_special_string(zpyvar, prompt)
zsh.setvalue(psvar, '${' + zpyvar + '}')
def setup():
environ = Environment()
powerline = ShellPowerline(Args(), environ=environ, getcwd=lambda: environ['PWD'])
used_powerlines.append(powerline)
used_powerlines.append(powerline)
set_prompt(powerline, 'PS1', 'left')
set_prompt(powerline, 'RPS1', 'right')
atexit.register(shutdown)
| # vim:fileencoding=utf-8:noet
import zsh
from powerline.shell import ShellPowerline
from powerline.lib import parsedotval
def get_var_config(var):
try:
return [parsedotval(i) for i in zsh.getvalue(var).items()]
except:
return None
class Args(object):
ext = ['shell']
renderer_module = 'zsh_prompt'
@property
def last_exit_code(self):
return zsh.last_exit_code()
@property
def last_pipe_status(self):
return zsh.pipestatus()
@property
def config(self):
try:
return get_var_config('POWERLINE_CONFIG')
except IndexError:
return None
@property
def theme_option(self):
try:
return get_var_config('POWERLINE_THEME_CONFIG')
except IndexError:
return None
@property
def config_path(self):
try:
return zsh.getvalue('POWERLINE_CONFIG_PATH')
except IndexError:
return None
def string(s):
if type(s) is bytes:
return s.decode('utf-8', errors='replace')
else:
return str(s)
class Environment(object):
@staticmethod
def __getitem__(key):
try:
return string(zsh.getvalue(key))
except IndexError as e:
raise KeyError(*e.args)
@staticmethod
def get(key, default=None):
try:
return string(zsh.getvalue(key))
except IndexError:
return default
class Prompt(object):
__slots__ = ('render', 'side', 'savedpsvar', 'savedps', 'args')
def __init__(self, powerline, side, savedpsvar=None, savedps=None):
self.render = powerline.renderer.render
self.side = side
self.savedpsvar = savedpsvar
self.savedps = savedps
self.args = powerline.args
def __str__(self):
r = self.render(width=zsh.columns(), side=self.side, segment_info=self.args)
if type(r) is not str:
if type(r) is bytes:
return r.decode('utf-8')
else:
return r.encode('utf-8')
return r
def __del__(self):
if self.savedps:
zsh.setvalue(self.savedpsvar, self.savedps)
def set_prompt(powerline, psvar, side):
savedps = zsh.getvalue(psvar)
zpyvar = 'ZPYTHON_POWERLINE_' + psvar
prompt = Prompt(powerline, side, psvar, savedps)
zsh.set_special_string(zpyvar, prompt)
zsh.setvalue(psvar, '${' + zpyvar + '}')
def setup():
environ = Environment()
powerline = ShellPowerline(Args(), environ=environ, getcwd=lambda: environ['PWD'])
set_prompt(powerline, 'PS1', 'left')
set_prompt(powerline, 'RPS1', 'right')
| mit | Python |
2d562c2642dc874d6734843a18073a1c7feaf066 | Add RandomTreesEmbedding for robust clustering | Don86/microscopium,Don86/microscopium,microscopium/microscopium,microscopium/microscopium,jni/microscopium,jni/microscopium,starcalibre/microscopium | husc/cluster.py | husc/cluster.py | from sklearn.ensemble import RandomTreesEmbedding
def rt_embedding(X, n_estimators=100, max_depth=10, n_jobs=-1, **kwargs):
"""Embed data matrix X in a random forest.
Parameters
----------
X : array, shape (n_samples, n_features)
The data matrix.
n_estimators : int, optional
The number of trees in the embedding.
max_depth : int, optional
The maximum depth of each tree.
n_jobs : int, optional
Number of compute jobs when fitting the trees. -1 means number
of processors on the current computer.
**kwargs : dict
Keyword arguments to be passed to
`sklearn.ensemble.RandomTreesEmbedding`
Returns
-------
rt : RandomTreesEmbedding object
The embedding object.
X_transformed : sparse matrix
The transformed data.
"""
rt = RandomTreesEmbedding(**kwargs)
X_transformed = rt.fit_transform(X)
return rt, X_transformed
| bsd-3-clause | Python | |
af1262d175e2a3b2371df77685483c8e5074d90e | Create most-wanted-letter.py | Pouf/CodingCompetition,Pouf/CodingCompetition | CiO/most-wanted-letter.py | CiO/most-wanted-letter.py | def checkio(l):
l = sorted(l.lower())
return max(filter(str.isalpha, l), key=l.count)
| mit | Python | |
fac97130396057802f1ebf21928667a971395ba9 | Add a basic example of the Tabler API. | bschmeck/tabler | examples/ex_tabler.py | examples/ex_tabler.py | from tabler import Tabler
table = """<table>
<thead>
<tr>
<th>Number</th>
<th>First Name</th>
<th>Last Name</th>
<th>Phone Number</th>
</tr>
<tr>
<td>1</td>
<td>Bob</td>
<td>Evans</td>
<td>(847) 332-0461</td>
</tr>
<tr>
<td>2</td>
<td>Mary</td>
<td>Newell</td>
<td>(414) 617-9516</td>
</tr>
</thead>
</table>"""
parser = Tabler(table)
print "There are", len(parser.rows), "rows."
print "First names:"
for row in parser.rows:
print row["first_name"]
| bsd-3-clause | Python | |
e279f8a046d2d9b985df2b01abe23dbe154da188 | Add a simple test for version finder. | punchagan/cinspect,punchagan/cinspect | cinspect/tests/test_version.py | cinspect/tests/test_version.py | from __future__ import absolute_import, print_function
# Standard library
import unittest
# Local library
from cinspect.index.serialize import _get_most_similar
class TestVersions(unittest.TestCase):
def test_most_similar(self):
# Given
names = ['index-2.7.3.json', 'index-3.4.json']
version = '2.7.8'
# When
name = _get_most_similar(version, names)
# Then
self.assertEqual('index-2.7.3.json', name)
| bsd-3-clause | Python | |
43d3d0351df4be3354519a6e5b7e1f630d5ede74 | Add testjars.py to tools - tests jars in plugins/ directory for CraftBukkit dependencies. Requires Solum (https://github.com/TkTech/Solum). | jimmikaelkael/GlowstonePlusPlus,keke142/GlowstonePlusPlus,keke142/GlowstonePlusPlus,Postremus/GlowstonePlusPlus,Tonodus/GlowSponge,kukrimate/Glowstone,GlowstoneMC/GlowstonePlusPlus,GreenBeard/GlowstonePlusPlus,karlthepagan/Glowstone,jimmikaelkael/GlowstonePlusPlus,Tonodus/GlowSponge,LukBukkit/GlowstonePlusPlus,BlazePowered/Blaze-Server,LukBukkit/GlowstonePlusPlus,keke142/GlowstonePlusPlus,GlowstoneMC/GlowstonePlusPlus,kukrimate/Glowstone,Tonodus/WaterGleam,GlowstoneMC/GlowstonePlusPlus,kukrimate/Glowstone,Postremus/GlowstonePlusPlus,jimmikaelkael/GlowstonePlusPlus,BlazePowered/Blaze-Server,Tonodus/WaterGleam,keke142/GlowstonePlusPlus,karlthepagan/Glowstone,GreenBeard/GlowstonePlusPlus,karlthepagan/Glowstone,GlowstonePlusPlus/GlowstonePlusPlus,kukrimate/Glowstone,Tonodus/GlowSponge,Tonodus/WaterGleam,BlazePowered/Blaze-Server,LukBukkit/GlowstonePlusPlus,GlowstonePlusPlus/GlowstonePlusPlus,Postremus/GlowstonePlusPlus,GlowstonePlusPlus/GlowstonePlusPlus,GlowstonePlusPlus/GlowstonePlusPlus,Postremus/GlowstonePlusPlus,Tonodus/GlowSponge,BlazePowered/Blaze-Server,jimmikaelkael/GlowstonePlusPlus,GreenBeard/GlowstonePlusPlus,GlowstoneMC/GlowstonePlusPlus,LukBukkit/GlowstonePlusPlus,Tonodus/WaterGleam,GreenBeard/GlowstonePlusPlus | etc/tools/testjars.py | etc/tools/testjars.py | # testjars.py
#
# Scans all .jar files in plugins/ directory of current
# WD for dependencies on CraftBukkit-only methods and
# classes (org.bukkit.craftbukkit and net.minecraft).
# Prints SAFE if no dependencies are found, or UNSAFE
# if the plugin requires CraftBukkit.
#
# Requires Solum (https://github.com/TkTech/Solum)
from glob import glob
from solum import JarFile, ClassFile, ConstantType
from os import path
def info(filename, plugin):
"""
Extracts name and author info from the given jar.
"""
return
def narrow(constant):
name = constant["class"]["name"]["value"]
if name.startswith("org/bukkit/craftbukkit"):
return True
if name.startswith("net/minecraft"):
return True
return False
def examine(buffer_):
"""
Returns True if the class is considered "Safe",
else False.
"""
cf = ClassFile(buffer_, str_as_buffer=True)
if cf.constants.find(tag=ConstantType.METHOD_REF, f=narrow):
return False
if cf.constants.find(tag=ConstantType.FIELD_REF, f=narrow):
return False
return True
if __name__ == "__main__":
for jar in glob("plugins/*.jar"):
plugin = JarFile(jar)
if False in plugin.map(examine, parallel=False):
print "UNSAFE %s" % path.basename(jar)
else:
print "SAFE %s" % path.basename(jar)
| mit | Python | |
fcbfaded67747984899dbbabb2cdcdefe00002df | Add example script for 'simpleflow.download.with_binaries' decorator | botify-labs/simpleflow,botify-labs/simpleflow | examples/download1.py | examples/download1.py | import subprocess
from simpleflow.download import with_binaries
@with_binaries({
"how-is-simpleflow": "s3://botify-labs-simpleflow/binaries/latest/how_is_simpleflow",
})
def a_task():
print "command: which how-is-simpleflow"
print subprocess.check_output(["which", "how-is-simpleflow"])
print "command: how-is-simpleflow"
print subprocess.check_output(["how-is-simpleflow"])
a_task()
| mit | Python | |
1904e1987114d9c57602b5c1fdb41a8725cdb090 | Add LED dance example | JoeGlancy/micropython,JoeGlancy/micropython,JoeGlancy/micropython | examples/led_dance.py | examples/led_dance.py | # Light LEDs at random and make them fade over time
#
# Usage:
#
# led_dance(delay)
#
# 'delay' is the time between each new LED being turned on.
#
# TODO The random number generator is not great. Perhaps the accelerometer
# or compass could be used to add entropy.
import microbit
def led_dance(delay):
dots = [ [0]*5, [0]*5, [0]*5, [0]*5, [0]*5 ]
microbit.display.set_display_mode(1)
while True:
dots[microbit.random(5)][microbit.random(5)] = 128
for i in range(5):
for j in range(5):
microbit.display.image.set_pixel(i, j, dots[i][j])
dots[i][j] = int(dots[i][j]/2)
microbit.sleep(delay)
led_dance(100)
| mit | Python | |
7e90e76c0fed3abeb5c163b96ac203b251ab5b81 | Add functions to simplify graphs. | musec/py-cdg | cgnet/simplify.py | cgnet/simplify.py | import networkx as nx
def is_simple_node( graph, node ):
"""A node is "Simple" if none of the following is true
- it has multiple inputs (it joins chains together)
- it has no inputs (it's a root node)
- it has multiple outputs (it splits chains apart)
- it has no outputs (it's a leaf node)
Keyword arguments:
node -- A networkx DiGraph Node
"""
return graph.in_degree(node) == 1 \
and graph.out_degree(node) == 1
def simplify_graph( graph ):
"""Simplify a CallGraph by collapsing call chains and dropping
any unreferenced calls.
Keyword arguments:
graph -- A networkx DiGraph
"""
g = graph.copy()
for n in graph:
if is_simple_node(graph, n):
pre = graph.predecessors(n)[0]
suc = graph.successors(n)[0]
g.add_edge(pre, suc)
g.remove_node(n)
return g
| apache-2.0 | Python | |
e840a73991cde9d291aa61989d8b59d07fe949f2 | add ressource tree entry point | NaturalSolutions/NsPortal,NaturalSolutions/NsPortal,NaturalSolutions/NsPortal | Back/ns_portal/routes/__init__.py | Back/ns_portal/routes/__init__.py | from ns_portal.resources import root_factory
def includeme(config):
'''
every resources or actions in this API will start by this object
be careful if you try to mix urlDispatch and traversal algorithm
keep it in mind
'''
config.set_root_factory(root_factory)
config.add_static_view('static', 'static', cache_max_age=3600)
| mit | Python | |
665f53c2b91ca974a6ee2f3ac4c494e9543af3cd | Add test for 0 version txs | qtumproject/qtum,qtumproject/qtum,qtumproject/qtum,qtumproject/qtum,qtumproject/qtum,qtumproject/qtum,qtumproject/qtum | qa/rpc-tests/qtum-no-exec-call-disabled.py | qa/rpc-tests/qtum-no-exec-call-disabled.py | #!/usr/bin/env python3
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.address import *
from test_framework.qtum import *
from test_framework.blocktools import *
import sys
import random
import time
import io
def rpc_sign_transaction(node, tx):
tx_signed_raw_hex = node.signrawtransaction(bytes_to_hex_str(tx.serialize()))['hex']
f = io.BytesIO(hex_str_to_bytes(tx_signed_raw_hex))
tx_signed = CTransaction()
tx_signed.deserialize(f)
return tx_signed
class QtumNoExecCallDisabledTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
self.is_network_split = False
self.node = self.nodes[0]
def run_test(self):
self.node.generate(10 + COINBASE_MATURITY)
"""
pragma solidity ^0.4.12;
contract Test {
function () payable {}
}
"""
bytecode = "60606040523415600e57600080fd5b5b603580601c6000396000f30060606040525b5b5b0000a165627a7a723058208938cb174ee70dbb41b522af1feac2c7e0e252b7bc9ecb92c8d87a50c445a26c0029"
contract_address = self.node.createcontract(bytecode)['address']
self.node.generate(1)
self.node.sendtocontract(contract_address, "00", 1)
self.node.generate(1)
tx = CTransaction()
tx.vin = [make_vin(self.node, int(COIN+1000000))]
tx.vout = [CTxOut(int(COIN), scriptPubKey=CScript([b"\x00", CScriptNum(0), CScriptNum(0), b"\x00", hex_str_to_bytes(contract_address), OP_CALL]))]
tx = rpc_sign_transaction(self.node, tx)
assert_raises(JSONRPCException, self.node.sendrawtransaction, bytes_to_hex_str(tx.serialize()))
tx.vout = [CTxOut(int(COIN), scriptPubKey=CScript([b"\x00", CScriptNum(100000), CScriptNum(1), b"\x00", hex_str_to_bytes(contract_address), OP_CALL]))]
tx = rpc_sign_transaction(self.node, tx)
assert_raises(JSONRPCException, self.node.sendrawtransaction, bytes_to_hex_str(tx.serialize()))
tip = self.node.getblock(self.node.getbestblockhash())
block = create_block(int(tip['hash'], 16), create_coinbase(tip['height']+1), tip['time']+1)
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
blockcount = self.node.getblockcount()
self.node.submitblock(bytes_to_hex_str(block.serialize()))
assert_equal(self.node.getblockcount(), blockcount)
block.vtx[1].vout = [CTxOut(int(COIN), scriptPubKey=CScript([b"\x00", CScriptNum(0), CScriptNum(0), b"\x00", hex_str_to_bytes(contract_address), OP_CALL]))]
block.vtx[1] = rpc_sign_transaction(self.node, block.vtx[1])
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
blockcount = self.node.getblockcount()
self.node.submitblock(bytes_to_hex_str(block.serialize()))
assert_equal(self.node.getblockcount(), blockcount)
if __name__ == '__main__':
QtumNoExecCallDisabledTest().main()
| mit | Python | |
639143e3145682d776251f39f0a791f0c77e5169 | ADD Domain association_requests unit tests | OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft | apps/domain/tests/test_routes/test_association_requests.py | apps/domain/tests/test_routes/test_association_requests.py |
def test_send_association_request(client):
result = client.post("/association-requests/request", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request sent!"}
def test_receive_association_request(client):
result = client.post("/association-requests/receive", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request received!"}
def test_reply_association_request(client):
result = client.post("/association-requests/respond", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request was replied!"}
def get_all_association_requests(client):
result = client.get("/association-requests/")
assert result.status_code == 200
assert result.get_json() == {"association-requests": ["Network A", "Network B", "Network C"]}
def get_specific_association_requests(client):
result = client.get("/association-requests/51613546")
assert result.status_code == 200
assert result.get_json() == {
"association-request": {
"ID": "51613546",
"address": "156.89.33.200",
}
}
def delete_association_requests(client):
result = client.get("/association-requests/51661659")
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request deleted!"}
| apache-2.0 | Python | |
576d4090cb087a60c3a996430fef5b1550798e6c | Create mean_vol.py | algoix/blog | mean_vol.py | mean_vol.py | """Compute mean volume"""
import pandas as pd
def get_mean_volume(symbol):
"""Return the mean volume for stock indicated by symbol.
Note: Data for a stock is stored in file: data/<symbol>.csv
"""
df = pd.read_csv("data/{}.csv".format(symbol)) # read in data
#Compute and return the mean volume for this stock
return df.Volume.mean()
def test_run():
"""Function called by Test Run."""
for symbol in ['AAPL', 'IBM']:
print "Mean Volume"
print symbol, get_mean_volume(symbol)
if __name__ == "__main__":
test_run()
| mit | Python | |
1d7d86ba12fd00d388b939206abf305a6db569db | Add trainer for siamese models | Flowerfan524/TriClustering,Flowerfan524/TriClustering,zydou/open-reid,Cysu/open-reid | reid/train_siamese.py | reid/train_siamese.py | from __future__ import print_function
import time
from torch.autograd import Variable
from .evaluation import accuracy
from .utils.meters import AverageMeter
class Trainer(object):
def __init__(self, model, criterion, args):
super(Trainer, self).__init__()
self.model = model
self.criterion = criterion
self.args = args
def train(self, epoch, data_loader, optimizer):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
self.model.train()
end = time.time()
for i, pair in enumerate(data_loader):
data_time.update(time.time() - end)
inputs1, inputs2, targets = self._parse_data(pair)
outputs = self.model(inputs1, inputs2)
loss = self.criterion(outputs, targets)
prec1, = accuracy(outputs.data, targets.data)
losses.update(loss.data[0], inputs1.size(0))
top1.update(prec1[0], inputs1.size(0))
optimizer.zero_grad()
loss.backward()
optimizer.step()
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.args.print_freq == 0:
print('Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Top1 {:.2%} ({:.2%})\t'.format(
epoch, i + 1, len(data_loader),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg, top1.val, top1.avg))
def _parse_data(self, pair):
(imgs1, _, pids1, _), (imgs2, _, pids2, _) = pair
inputs1, inputs2 = Variable(imgs1), Variable(imgs2)
targets = (pids1 == pids2).long().cuda()
return inputs1, inputs2, targets
| mit | Python | |
34be87320de5d5d10e55e8c91506dee230afb1b6 | Add cli menu | HugoPouliquen/lzw-tools | menu_cli.py | menu_cli.py | # -*- coding: utf-8 -*-
import curses
def init_curses():
stdsrc = curses.initscr()
curses.noecho()
curses.cbreak()
stdsrc.keypad(1)
return stdsrc
def close_curses(stdsrc):
stdsrc.keypad(0)
curses.nocbreak()
curses.echo()
curses.endwin()
def init_colors():
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_BLACK, curses.COLOR_RED)
def display_menu(title, menu, window, active=1):
window.addstr(1, 1, title, curses.color_pair(1))
window.addstr(3, 1, "Choisissez ↓ ↑ : ")
item_pos = 4
pos = 1
for item in menu:
if pos == active:
color = curses.color_pair(2)
else:
color = curses.A_NORMAL
window.addstr(item_pos, 1, " %d. %s" % (pos, item), color)
item_pos += 1
pos += 1
window.refresh()
def getKey(final, title, menu, window, active_pos=1):
c = None
while c != 10:
c = window.getch()
if c == 66:
if active_pos != final:
active_pos += 1
else:
active_pos = 1
elif c == 65:
if active_pos != 1:
active_pos -= 1
else:
active_pos = final
display_menu(title, menu, window, active_pos)
return active_pos
# if __name__ == "__main___":
try:
stdsrc = init_curses()
init_colors()
window = curses.newwin(40, 79, 3, 5)
# window = stdsrc.subwin(40, 79, 3, 5)
window.border(0)
menu_list = ('Fichier', 'Phrase', 'Quitter')
title = 'Que voulez vous compresser ?'
display_menu(title, menu_list, window)
choice = getKey(len(menu_list), title, menu_list, window)
window.addstr(
len(menu_list) + 5, 1, "Choix : %s (%d)"
% (menu_list[choice-1], choice)
)
window.addstr(
len(menu_list) + 6, 1,
"Ce n'est qu'un au-revoir ! (Appuyez sur une touche tu dois)"
)
window.refresh()
c = window.getch()
finally:
# Fermeture de curses
close_curses(stdsrc)
| mit | Python | |
3f92a621c35d84c4d1dffd3333e015cb5ca8c0d8 | add osvReader.py | buguen/pyosv | src/osvReader.py | src/osvReader.py | from OCC.STEPControl import STEPControl_Reader
from OCC.IFSelect import IFSelect_RetDone, IFSelect_ItemsByEntity
from OCC.Display.SimpleGui import init_display
step_reader = STEPControl_Reader()
status = step_reader.ReadFile('./TABBY_EVO_step_asm.stp')
if status == IFSelect_RetDone: # check status
failsonly = False
step_reader.PrintCheckLoad(failsonly, IFSelect_ItemsByEntity)
step_reader.PrintCheckTransfer(failsonly, IFSelect_ItemsByEntity)
ok = step_reader.TransferRoot(1)
_nbs = step_reader.NbShapes()
aResShape = step_reader.Shape(1)
else:
print("Error: can't read file.")
sys.exit(0)
display, start_display, add_menu, add_function_to_menu = init_display()
display.DisplayShape(aResShape, update=True)
start_display() | mit | Python | |
dec97d72f034826ec88de6de1609bb19bb2a410f | test file commit | mir-cat/BST234-FP,mir-cat/BST234-FP | just_for_git.py | just_for_git.py | # -*- coding: utf-8 -*-
"""
Created on Fri Apr 21 11:25:45 2017
@author: Katharine
"""
| mit | Python | |
528afdc0f00b958f6920bd6e66c3bac841b3a8b8 | Add a test for mission. | kcaa/kcaa,kcaa/kcaa,kcaa/kcaa,kcaa/kcaa | server/kcaa/kcsapi/mission_test.py | server/kcaa/kcsapi/mission_test.py | #!/usr/bin/env python
import pytest
import mission
class TestMissionList(object):
def pytest_funcarg__mission_list(self):
mission_list = mission.MissionList()
mission_list.missions.extend([
mission.Mission(
id=1,
name=u'Mission1',
maparea=mission.Mission.MAPAREA_BASE),
mission.Mission(
id=2,
name=u'Mission2',
maparea=mission.Mission.MAPAREA_BASE),
mission.Mission(
id=3,
name=u'Mission3',
maparea=mission.Mission.MAPAREA_SOUTHWESTERN_ISLANDS),
mission.Mission(
id=4,
name=u'Mission4',
maparea=mission.Mission.MAPAREA_SOUTHWESTERN_ISLANDS),
mission.Mission(
id=5,
name=u'Mission5',
maparea=mission.Mission.MAPAREA_SOUTHWESTERN_ISLANDS)])
return mission_list
def test_get_mission(self, mission_list):
assert mission_list.get_mission(0) is None
assert mission_list.get_mission(1) == mission_list.missions[0]
assert mission_list.get_mission(2) == mission_list.missions[1]
assert mission_list.get_mission(3) == mission_list.missions[2]
assert mission_list.get_mission(4) == mission_list.missions[3]
assert mission_list.get_mission(5) == mission_list.missions[4]
assert mission_list.get_mission(6) is None
def test_get_index_in_mapaea(self, mission_list):
assert mission_list.get_index_in_maparea(mission_list.missions[0]) == 0
assert mission_list.get_index_in_maparea(mission_list.missions[1]) == 1
assert mission_list.get_index_in_maparea(mission_list.missions[2]) == 0
assert mission_list.get_index_in_maparea(mission_list.missions[3]) == 1
assert mission_list.get_index_in_maparea(mission_list.missions[4]) == 2
def main():
import doctest
doctest.testmod(mission)
import sys
sys.exit(pytest.main(args=[__file__.replace('.pyc', '.py')]))
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
cba6fbf305beb3cb1c90c071339822a9f7ee7179 | Create __init__.py | seedinvest/base-crm-api-client | base_api/__init__.py | base_api/__init__.py | apache-2.0 | Python | ||
38e294e7d8e8053ac604fdbcdcaeed59fecae1e9 | Test libewf-less calls in ewf module | akaIDIOT/Digestive | tests/test_ewf.py | tests/test_ewf.py | from os import path
from digestive.ewf import EWFSource, format_supported, list_ewf_files
here = path.dirname(path.abspath(__file__))
def test_format_supported():
supported = ['file.S01', 'file.E01', 'file.e01', 'file.L01', 'file.Ex01', 'file.Lx01', 'file.tar.E01']
not_supported = ['file.dd', 'file.raw', 'file.E1', 'file.Ex1', 'file.tar.gz', 'file.E01.raw']
for supported in supported:
assert format_supported(supported)
for not_supported in not_supported:
assert not format_supported(not_supported)
def test_list_ewf_files():
files = [path.join(here, 'files/random.E01'), path.join(here, 'files/random.E02')]
assert list_ewf_files(path.join(here, 'files/random.E01')) == files
# non-primary file is not handled as the beginning of a set
assert list_ewf_files(path.join(here, 'files/random.E02')) == [path.join(here, 'files/random.E02')]
def test_ewf_source_simple():
source = EWFSource(path.join(here, 'files/random.E01'))
# random.E01 has two parts, should be visible
assert str(source) == path.join(here, 'files/random.E01') + '..E02'
| isc | Python | |
fbc375a51aca560554f3dd28fa212d6d877449f2 | Add tests for observers and observable. | VISTAS-IVES/pyvistas | source/tests/core/test_observer.py | source/tests/core/test_observer.py | from copy import copy
from pytest import fixture
from vistas.core.observers.interface import *
@fixture(scope='session')
def observer():
class TestObserver(Observer):
def __init__(self):
self.x = 5
def update(self, observable):
self.x **= 2
obs = TestObserver()
yield obs
def test_add_observer(observer):
obs = Observable()
obs.add_observer(observer)
assert len(obs.observers) == 1
obs.add_observer(observer)
assert len(obs.observers) == 1
def test_cls_observers():
assert len(Observable.observers) == 1
def test_notify_observers(observer):
obs = Observable()
obs.notify_observers()
assert observer.x == 25
def test_remove_observer(observer):
observer2 = copy(observer)
obs = Observable()
obs.add_observer(observer2)
assert len(obs.observers) == 2
# Test removal
obs.remove_observer(observer)
assert len(obs.observers) == 1
# Test unique removal
obs.remove_observer(observer)
assert len(obs.observers) == 1
obs.remove_observer(observer2)
assert len(obs.observers) == 0
| bsd-3-clause | Python | |
3c5116f3a26fb93ab85fd973462a582a0fa5d877 | Add script to validate web/db/handlerpresets.json file | nkapu/handlers,ouspg/urlhandlers,ouspg/urlhandlers,nkapu/handlers,nkapu/handlers,nkapu/handlers,ouspg/urlhandlers,ouspg/urlhandlers | bin/validate-presets.py | bin/validate-presets.py | #!/usr/bin/python
import json
import sys
def validate_presets(presets_file):
with open(presets_file) as jsonfile:
presets_dict = json.load(jsonfile)
for handler in presets_dict.iterkeys():
for entry in presets_dict.get(handler).get("presets"):
value = entry.get("value")
if not value.startswith(handler):
print "ERROR: \"{0}\" handler with \"{1}\" value".format(handler, value)
if __name__ == '__main__':
args = sys.argv[1:]
try:
validate_presets(*args)
except TypeError:
print "{0} <handlerpresets.json>".format(sys.argv[0])
sys.exit(1)
| mit | Python | |
35bc8d1d262d658dc1d75d20cc46f853245c4d2f | Add test | comandrei/celery-janitor | celery_janitor/tests.py | celery_janitor/tests.py | import unittest
import mock
from celery_janitor.utils import Config
class ConfigTest(unittest.TestCase):
@mock.patch('celery_janitor.utils.conf.BROKER_URL', 'sqs://aws_access_key_id:aws_secret@')
def test_sqs_backend(self):
self.config = Config()
backend = self.config.get_backend()
self.assertEqual(backend, 'celery_janitor.backends.sqs.SQSBackend')
key, secret = self.config.get_credentials()
self.assertEqual(key, 'aws_access_key_id')
self.assertEqual(secret, 'aws_secret')
| mit | Python | |
61b91f6541457834f6442a44d7e3f97630be931b | Split the passwords off to a seperate file | c00w/bitHopper,c00w/bitHopper | password.py | password.py |
#SET THESE
bclc_user = "FSkyvM"
bclc_pass = "xndzEU"
mtred_user = 'scarium'
mtred_pass = 'x'
eligius_address = '1AofHmwVef5QkamCW6KqiD4cRqEcq5U7hZ'
btcguild_user = 'c00w_test'
btcguild_pass = '1234'
bitclockers_user = 'flargle'
bitclockers_pass = 'x'
mineco_user = 'c00w.test'
mineco_pass = 'x'
#REALLY
| mit | Python | |
a840ca18158d63e0359d7316354e9833e0608712 | Add __init__.py | Geosyntec/gisutils,phobson/gisutils | Mapping/__init__.py | Mapping/__init__.py | '''Initialization File so that modules in this directory are accessible to outside modules'''
| bsd-3-clause | Python | |
b1a8c5d05fcc6bde19f7159d88f46da3fbadff6f | Add sub /r/arma | Fillll/reddit2telegram,Fillll/reddit2telegram | channels/arma/app.py | channels/arma/app.py | #encoding:utf-8
from utils import get_url, weighted_random_subreddit
from utils import SupplyResult
# Subreddit that will be a source of content
subreddit = weighted_random_subreddit({
'arma': 1.0,
# If we want get content from several subreddits
# please provide here 'subreddit': probability
# 'any_other_subreddit': 0.02
})
# Telegram channel with @reddit2telegram_bot as an admin
t_channel = '@r_arma'
def send_post(submission, r2t):
what, url, ext = get_url(submission)
# If this func returns:
# False – it means that we will not send
# this submission, let's move to the next.
# True – everything is ok, we send the submission
# None – we do not want to send anything this time,
# let's just sleep.
# Get all data from submission that we need
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
if what == 'text':
# If it is text submission, it is not really funny.
# return r2t.send_text(submission.selftext)
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
elif what == 'other':
# Also we are not interesting in any other content.
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
elif what == 'album':
# It is ok if it is an album.
base_url = submission.url
text = '{}\n{}\n\n{}'.format(title, base_url, link)
r2t.send_text(text)
r2t.send_album(url)
return SupplyResult.SUCCESSFULLY
elif what in ('gif', 'img'):
# Also it is ok if it is gif or any kind of image.
# Check if content has already appeared in
# out telegram channel.
if r2t.dup_check_and_mark(url) is True:
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
return r2t.send_gif_img(what, url, ext, text)
else:
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
| mit | Python | |
3a5e1b80a2d2242bee4e0ba524fc91883e131088 | Add Closure pattern | jackaljack/design-patterns | closure.py | closure.py | """Closure pattern
A closure is a record storing a function together with an environment.
"""
def outer(x):
def inner(y):
return x + y
return inner
def outer2(x):
def inner2(y, x=x):
return x + y
return inner2
def main():
# inner is defined in the local scope of outer, so we can't access it
try:
inner()
except NameError as e:
print(e)
# a closure
func = outer(3)
print(func(2))
# func stores inner and the environment where inner was defined
assert func.__name__ == 'inner'
# in inner's scope x was not defined, but it was - and still is - available
# in its environment, so we can access x
assert func.__code__.co_freevars[0] == 'x'
# so func is a closure
assert func.__closure__ is not None
# just a nested function, not a closure
func2 = outer2(3)
print(func2(2))
# func2 stores inner2 and the environment where inner2 was defined
assert func2.__name__ == 'inner2'
# in inner2's scope x was (re)defined (variable shadowing), so it's not a
# free variable
assert not func2.__code__.co_freevars
# so func2 is NOT a closure
assert func2.__closure__ is None
if __name__ == '__main__':
main()
| mit | Python | |
d3d813ca174924e2424bdafcb613aace8b7a5324 | Create publicip.py | liorvh/pythonpentest,liorvh/pythonpentest,funkandwagnalls/pythonpentest,funkandwagnalls/pythonpentest,funkandwagnalls/pythonpentest,liorvh/pythonpentest | publicip.py | publicip.py | #!/usr/bin/env python
# Author: Christopher Duffy
# Date: February 2, 2015
# Purpose: To grab your current public IP address
import urllib2
def get_public_ip(request_target):
grabber = urllib2.build_opener()
grabber.addheaders = [('User-agent','Mozilla/5.0')]
try:
public_ip_address = grabber.open(target_url).read()
except urllib2.HTTPError, error:
print("There was an error trying to get your Public IP: %s") % (error)
except urllib2.URLError, error:
print("There was an error trying to get your Public IP: %s") % (error)
return public_ip_address
public_ip = "None"
target_url = "http://ip.42.pl/raw"
public_ip = get_public_ip(target_url)
if not "None" in public_ip:
print("Your Public IP address is: %s") % (str(public_ip))
else:
print("Your Public IP address was not found")
| bsd-3-clause | Python | |
00a545e9ddc53ed99da59bdb99db728ff448fde4 | Add proof-of-concept implementation. | mbr/wrimg | wr.py | wr.py | #!/usr/bin/env python
import os
import sys
import time
source = '/dev/zero'
dest = '/dev/sdg'
min_chunk_size = 4 * 1024
chunk_size = min_chunk_size
max_chunk_size = min_chunk_size * 1024 * 4
adaptive = True
total = 4 * 10 * 1024 * 1024
with open(source, 'rb') as src, open(dest, 'wb') as dst:
while total:
# measure time
start = time.time()
buf = src.read(min(total, chunk_size))
dst.write(buf)
dst.flush()
os.fsync(dst.fileno())
end = time.time()
# adjust chunk size if needed
if adaptive:
if end - start > 2 and chunk_size > min_chunk_size:
# took longer then two seconds, halve chunk_size
chunk_size //= 2
print '-',
elif end - start < 0.5 and chunk_size < max_chunk_size:
chunk_size *= 2
print '+',
total -= len(buf)
print '.',
sys.stdout.flush()
print src
print dst
| mit | Python | |
1c7e7be7132c3495802a066bc81fe63286b029cb | add 155 | wait4pumpkin/leetcode,wait4pumpkin/leetcode | 155.py | 155.py | class MinStack:
def __init__(self):
"""
initialize your data structure here.
"""
self.data = []
self.minVals = []
def push(self, x):
"""
:type x: int
:rtype: void
"""
self.data.append(x)
if self.minVals:
self.minVals.append(min(self.minVals[-1], x))
else:
self.minVals.append(x)
def pop(self):
"""
:rtype: void
"""
if self.data:
del self.minVals[-1]
self.data
def top(self):
"""
:rtype: int
"""
if self.data:
return self.data[-1]
def getMin(self):
"""
:rtype: int
"""
if self.data:
return self.minVals[-1]
| mit | Python | |
24b6126871a5378faa2e8f9848c279999e50cb96 | Check issue numbers to find out of order listings | xchewtoyx/comicmgt,xchewtoyx/comicmgt | ooo.py | ooo.py | #!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#(\d+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), int(title_match.group(2))
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and seen[title] and issue != seen[title]+1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %d)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
| mit | Python | |
aaa64ca93372c8b9d534636482a2c6349b11b757 | Add run.py | illumenati/duwamish-sensor,tipsqueal/duwamish-sensor | run.py | run.py | import serial
import threading
print('Starting server...')
temperature_usb = '/dev/ttyAMA0'
BAUD_RATE = 9600
temperature_ser = ser.Serial(temperature_usb, BAUD_RATE)
def process_line(line):
print('Need to process line: {}'.format(line))
def temperature_loop():
while True:
data = ser.read()
if(data == "\r"):
process_line(line)
line = ""
else:
line = line + data
temperature_thread = threading.Thread(target=temperature_loop)
temperature_thread.start()
| mit | Python | |
c2cc91621535d121d2188f7e391f6c52728eeba1 | Create run.py for uwsgi setup | spoonref/haiku,spoonref/haiku,spoonref/litbit,spoonref/haiku,spoonref/haiku,spoonref/litbit,spoonref/haiku | run.py | run.py | from index import app
if __name__ == "name"
app.run()
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.